diff --git a/README.textile b/README.textile index ce7b3b7d344..aa61c035742 100644 --- a/README.textile +++ b/README.textile @@ -209,10 +209,6 @@ The distribution for each project will be created under the @build/distributions See the "TESTING":TESTING.asciidoc file for more information about running the Elasticsearch test suite. -h3. Upgrading from Elasticsearch 1.x? +h3. Upgrading from older Elasticsearch versions -In order to ensure a smooth upgrade process from earlier versions of -Elasticsearch (1.x), it is required to perform a full cluster restart. Please -see the "setup reference": -https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html -for more details on the upgrade process. +In order to ensure a smooth upgrade process from earlier versions of Elasticsearch, please see our "upgrade documentation":https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html for more details on the upgrade process. diff --git a/benchmarks/src/main/resources/log4j2.properties b/benchmarks/src/main/resources/log4j2.properties index c3ae1fe56d3..808d611670f 100644 --- a/benchmarks/src/main/resources/log4j2.properties +++ b/benchmarks/src/main/resources/log4j2.properties @@ -1,7 +1,7 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %m%n # Do not log at all if it is not really critical - we're in a benchmark rootLogger.level = error diff --git a/client/benchmark/src/main/resources/log4j2.properties b/client/benchmark/src/main/resources/log4j2.properties index 8652131bf49..a68c7f5653f 100644 --- a/client/benchmark/src/main/resources/log4j2.properties +++ b/client/benchmark/src/main/resources/log4j2.properties @@ -1,7 +1,7 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index d85f316efa8..728d304077f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -110,6 +110,7 @@ import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.protocol.xpack.indexlifecycle.SetIndexLifecyclePolicyRequest; import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; @@ -137,7 +138,7 @@ final class RequestConverters { static Request cancelTasks(CancelTasksRequest cancelTasksRequest) { Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel"); Params params = new Params(request); - params.withTimeout(cancelTasksRequest.getTimeout()) + params.withTimeout(cancelTasksRequest.getTimeout()) .withTaskId(cancelTasksRequest.getTaskId()) .withNodes(cancelTasksRequest.getNodes()) .withParentTaskId(cancelTasksRequest.getParentTaskId()) @@ -1134,6 +1135,18 @@ final class RequestConverters { return request; } + static Request xPackWatcherDeleteWatch(DeleteWatchRequest deleteWatchRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("watcher") + .addPathPartAsIs("watch") + .addPathPart(deleteWatchRequest.getId()) + .build(); + + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + return request; + } + static Request xpackUsage(XPackUsageRequest usageRequest) { Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage"); Params parameters = new Params(request); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java index 73c92ba5c45..48487926f02 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java @@ -19,12 +19,15 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import java.io.IOException; import static java.util.Collections.emptySet; +import static java.util.Collections.singleton; public final class WatcherClient { @@ -61,4 +64,31 @@ public final class WatcherClient { restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options, PutWatchResponse::fromXContent, listener, emptySet()); } + + /** + * Deletes a watch from the cluster + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public DeleteWatchResponse deleteWatch(DeleteWatchRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackWatcherDeleteWatch, options, + DeleteWatchResponse::fromXContent, singleton(404)); + } + + /** + * Asynchronously deletes a watch from the cluster + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void deleteWatchAsync(DeleteWatchRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherDeleteWatch, options, + DeleteWatchResponse::fromXContent, listener, singleton(404)); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 1b38ae195e9..0be12e8cebe 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -127,6 +127,7 @@ import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RestRankEvalAction; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.indexlifecycle.SetIndexLifecyclePolicyRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -2600,6 +2601,17 @@ public class RequestConvertersTests extends ESTestCase { assertThat(request.getParameters(), equalTo(expectedParams)); } + public void testXPackDeleteWatch() { + DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest(); + String watchId = randomAlphaOfLength(10); + deleteWatchRequest.setId(watchId); + + Request request = RequestConverters.xPackWatcherDeleteWatch(deleteWatchRequest); + assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint()); + assertThat(request.getEntity(), nullValue()); + } + /** * Randomize the {@link FetchSourceContext} request parameters. */ diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java index dec438a47ab..67d1def323a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java @@ -21,6 +21,8 @@ package org.elasticsearch.client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; @@ -30,6 +32,13 @@ public class WatcherIT extends ESRestHighLevelClientTestCase { public void testPutWatch() throws Exception { String watchId = randomAlphaOfLength(10); + PutWatchResponse putWatchResponse = createWatch(watchId); + assertThat(putWatchResponse.isCreated(), is(true)); + assertThat(putWatchResponse.getId(), is(watchId)); + assertThat(putWatchResponse.getVersion(), is(1L)); + } + + private PutWatchResponse createWatch(String watchId) throws Exception { String json = "{ \n" + " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" + " \"input\": { \"none\": {} },\n" + @@ -37,10 +46,30 @@ public class WatcherIT extends ESRestHighLevelClientTestCase { "}"; BytesReference bytesReference = new BytesArray(json); PutWatchRequest putWatchRequest = new PutWatchRequest(watchId, bytesReference, XContentType.JSON); - PutWatchResponse putWatchResponse = highLevelClient().xpack().watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT); - assertThat(putWatchResponse.isCreated(), is(true)); - assertThat(putWatchResponse.getId(), is(watchId)); - assertThat(putWatchResponse.getVersion(), is(1L)); + return highLevelClient().xpack().watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT); + } + + public void testDeleteWatch() throws Exception { + // delete watch that exists + { + String watchId = randomAlphaOfLength(10); + createWatch(watchId); + DeleteWatchResponse deleteWatchResponse = highLevelClient().xpack().watcher().deleteWatch(new DeleteWatchRequest(watchId), + RequestOptions.DEFAULT); + assertThat(deleteWatchResponse.getId(), is(watchId)); + assertThat(deleteWatchResponse.getVersion(), is(2L)); + assertThat(deleteWatchResponse.isFound(), is(true)); + } + + // delete watch that does not exist + { + String watchId = randomAlphaOfLength(10); + DeleteWatchResponse deleteWatchResponse = highLevelClient().xpack().watcher().deleteWatch(new DeleteWatchRequest(watchId), + RequestOptions.DEFAULT); + assertThat(deleteWatchResponse.getId(), is(watchId)); + assertThat(deleteWatchResponse.getVersion(), is(1L)); + assertThat(deleteWatchResponse.isFound(), is(false)); + } } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java index df51d896cda..47f8510b746 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java @@ -26,6 +26,8 @@ import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; @@ -34,7 +36,7 @@ import java.util.concurrent.TimeUnit; public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { - public void testPutWatch() throws Exception { + public void testWatcher() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -88,5 +90,46 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + { + //tag::x-pack-delete-watch-execute + DeleteWatchRequest request = new DeleteWatchRequest("my_watch_id"); + DeleteWatchResponse response = client.xpack().watcher().deleteWatch(request, RequestOptions.DEFAULT); + //end::x-pack-delete-watch-execute + + //tag::x-pack-delete-watch-response + String watchId = response.getId(); // <1> + boolean found = response.isFound(); // <2> + long version = response.getVersion(); // <3> + //end::x-pack-delete-watch-response + } + + { + DeleteWatchRequest request = new DeleteWatchRequest("my_other_watch_id"); + // tag::x-pack-delete-watch-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(DeleteWatchResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-delete-watch-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-delete-watch-execute-async + client.xpack().watcher().deleteWatchAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-delete-watch-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } } + } diff --git a/distribution/src/config/log4j2.properties b/distribution/src/config/log4j2.properties index 589f529e83e..6de21cd48f6 100644 --- a/distribution/src/config/log4j2.properties +++ b/distribution/src/config/log4j2.properties @@ -7,13 +7,13 @@ logger.action.level = debug appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n appender.rolling.type = RollingFile appender.rolling.name = rolling appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.rolling.layout.type = PatternLayout -appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n +appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %.-10000m%n appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz appender.rolling.policies.type = Policies appender.rolling.policies.time.type = TimeBasedTriggeringPolicy @@ -38,7 +38,7 @@ appender.deprecation_rolling.type = RollingFile appender.deprecation_rolling.name = deprecation_rolling appender.deprecation_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_rolling.layout.type = PatternLayout -appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n +appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %.-10000m%n appender.deprecation_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation-%i.log.gz appender.deprecation_rolling.policies.type = Policies appender.deprecation_rolling.policies.size.type = SizeBasedTriggeringPolicy @@ -55,7 +55,7 @@ appender.index_search_slowlog_rolling.type = RollingFile appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog.log appender.index_search_slowlog_rolling.layout.type = PatternLayout -appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n +appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog-%i.log.gz appender.index_search_slowlog_rolling.policies.type = Policies appender.index_search_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy @@ -72,7 +72,7 @@ appender.index_indexing_slowlog_rolling.type = RollingFile appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog.log appender.index_indexing_slowlog_rolling.layout.type = PatternLayout -appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n +appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog-%i.log.gz appender.index_indexing_slowlog_rolling.policies.type = Policies appender.index_indexing_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 25fbcaaaeaa..63aef865955 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -54,11 +54,14 @@ The Java High Level REST Client supports the following Miscellaneous APIs: * <> * <> * <> +* <> +* <> include::miscellaneous/main.asciidoc[] include::miscellaneous/ping.asciidoc[] include::x-pack/x-pack-info.asciidoc[] include::x-pack/watcher/put-watch.asciidoc[] +include::x-pack/watcher/delete-watch.asciidoc[] == Indices APIs diff --git a/docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc b/docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc new file mode 100644 index 00000000000..d5f35817558 --- /dev/null +++ b/docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc @@ -0,0 +1,53 @@ +[[java-rest-high-x-pack-watcher-delete-watch]] +=== X-Pack Delete Watch API + +[[java-rest-high-x-pack-watcher-delete-watch-execution]] +==== Execution + +A watch can be deleted as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-delete-watch-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-watcher-delete-watch-response]] +==== Response + +The returned `DeleteWatchResponse` contains `found`, `id`, +and `version` information. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-response] +-------------------------------------------------- +<1> `_id` contains id of the watch +<2> `found` is a boolean indicating whether the watch was found +<3> `_version` returns the version of the deleted watch + +[[java-rest-high-x-pack-watcher-delete-watch-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-delete-watch-execute-async] +-------------------------------------------------- +<1> The `DeleteWatchRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `DeleteWatchResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-delete-watch-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc b/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc index c803c54eb5e..a76ba407a1a 100644 --- a/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc +++ b/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc @@ -1,5 +1,5 @@ [[java-rest-high-x-pack-watcher-put-watch]] -=== X-Pack Info API +=== X-Pack Put Watch API [[java-rest-high-x-pack-watcher-put-watch-execution]] ==== Execution @@ -16,7 +16,7 @@ include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-execute [[java-rest-high-x-pack-watcher-put-watch-response]] ==== Response -The returned `XPackPutWatchResponse` contain `created`, `id`, +The returned `PutWatchResponse` contains `created`, `id`, and `version` information. ["source","java",subs="attributes,callouts,macros"] @@ -36,7 +36,7 @@ This request can be executed asynchronously: -------------------------------------------------- include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-execute-async] -------------------------------------------------- -<1> The `XPackPutWatchRequest` to execute and the `ActionListener` to use when +<1> The `PutWatchRequest` to execute and the `ActionListener` to use when the execution completes The asynchronous method does not block and returns immediately. Once it is @@ -44,7 +44,7 @@ completed the `ActionListener` is called back using the `onResponse` method if the execution successfully completed or using the `onFailure` method if it failed. -A typical listener for `XPackPutWatchResponse` looks like: +A typical listener for `PutWatchResponse` looks like: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index 551c082a662..f42d176aea0 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -653,7 +653,7 @@ GET /_search // CONSOLE In the above example, buckets will be created for all the tags that has the word `sport` in them, except those starting -with `water_` (so the tag `water_sports` will no be aggregated). The `include` regular expression will determine what +with `water_` (so the tag `water_sports` will not be aggregated). The `include` regular expression will determine what values are "allowed" to be aggregated, while the `exclude` determines the values that should not be aggregated. When both are defined, the `exclude` has precedence, meaning, the `include` is evaluated first and only then the `exclude`. diff --git a/docs/reference/index-modules/slowlog.asciidoc b/docs/reference/index-modules/slowlog.asciidoc index 1dc84f380f5..235256bdce7 100644 --- a/docs/reference/index-modules/slowlog.asciidoc +++ b/docs/reference/index-modules/slowlog.asciidoc @@ -50,7 +50,7 @@ appender.index_search_slowlog_rolling.type = RollingFile appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog.log appender.index_search_slowlog_rolling.layout.type = PatternLayout -appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n +appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog-%i.log.gz appender.index_search_slowlog_rolling.policies.type = Policies appender.index_search_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy @@ -103,7 +103,7 @@ appender.index_indexing_slowlog_rolling.type = RollingFile appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog.log appender.index_indexing_slowlog_rolling.layout.type = PatternLayout -appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n +appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog-%i.log.gz appender.index_indexing_slowlog_rolling.policies.type = Policies appender.index_indexing_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 8e0e84c6657..81a96510e46 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -257,6 +257,9 @@ and there are multiple indices referenced by an alias, then writes will not be a It is possible to specify an index associated with an alias as a write index using both the aliases API and index creation API. +Setting an index to be the write index with an alias also affects how the alias is manipulated during +Rollover (see <>). + [source,js] -------------------------------------------------- POST /_aliases diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index 1e3a361f1b1..1730c95e0dd 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -4,10 +4,19 @@ The rollover index API rolls an alias over to a new index when the existing index is considered to be too large or too old. -The API accepts a single alias name and a list of `conditions`. The alias -must point to a single index only. If the index satisfies the specified -conditions then a new index is created and the alias is switched to point to -the new index. +The API accepts a single alias name and a list of `conditions`. The alias must point to a write index for +a Rollover request to be valid. There are two ways this can be achieved, and depending on the configuration, the +alias metadata will be updated differently. The two scenarios are as follows: + + - The alias only points to a single index with `is_write_index` not configured (defaults to `null`). + +In this scenario, the original index will have their rollover alias will be added to the newly created index, and removed +from the original (rolled-over) index. + + - The alias points to one or more indices with `is_write_index` set to `true` on the index to be rolled over (the write index). + +In this scenario, the write index will have its rollover alias' `is_write_index` set to `false`, while the newly created index +will now have the rollover alias pointing to it as the write index with `is_write_index` as `true`. [source,js] @@ -231,3 +240,98 @@ POST /logs_write/_rollover?dry_run Because the rollover operation creates a new index to rollover to, the <> setting on index creation applies to the rollover action as well. + +[[indices-rollover-is-write-index]] +[float] +=== Write Index Alias Behavior + +The rollover alias when rolling over a write index that has `is_write_index` explicitly set to `true` is not +swapped during rollover actions. Since having an alias point to multiple indices is ambiguous in distinguishing +which is the correct write index to roll over, it is not valid to rollover an alias that points to multiple indices. +For this reason, the default behavior is to swap which index is being pointed to by the write-oriented alias. This +was `logs_write` in some of the above examples. Since setting `is_write_index` enables an alias to point to multiple indices +while also being explicit as to which is the write index that rollover should target, removing the alias from the rolled over +index is not necessary. This simplifies things by allowing for one alias to behave both as the write and read aliases for +indices that are being managed with Rollover. + +Look at the behavior of the aliases in the following example where `is_write_index` is set on the rolled over index. + +[source,js] +-------------------------------------------------- +PUT my_logs_index-000001 +{ + "aliases": { + "logs": { "is_write_index": true } <1> + } +} + +PUT logs/_doc/1 +{ + "message": "a dummy log" +} + +POST logs/_refresh + +POST /logs/_rollover +{ + "conditions": { + "max_docs": "1" + } +} + +PUT logs/_doc/2 <2> +{ + "message": "a newer log" +} +-------------------------------------------------- +// CONSOLE +<1> configures `my_logs_index` as the write index for the `logs` alias +<2> newly indexed documents against the `logs` alias will write to the new index + +[source,js] +-------------------------------------------------- +{ + "_index" : "my_logs_index-000002", + "_type" : "_doc", + "_id" : "2", + "_version" : 1, + "result" : "created", + "_shards" : { + "total" : 2, + "successful" : 1, + "failed" : 0 + }, + "_seq_no" : 0, + "_primary_term" : 1 +} +-------------------------------------------------- +// TESTRESPONSE + +////////////////////////// +[source,js] +-------------------------------------------------- +GET _alias +-------------------------------------------------- +// CONSOLE +// TEST[continued] +////////////////////////// + +After the rollover, the alias metadata for the two indices will have the `is_write_index` setting +reflect each index's role, with the newly created index as the write index. + +[source,js] +-------------------------------------------------- +{ + "my_logs_index-000002": { + "aliases": { + "logs": { "is_write_index": true } + } + }, + "my_logs_index-000001": { + "aliases": { + "logs": { "is_write_index" : false } + } + } +} +-------------------------------------------------- +// TESTRESPONSE diff --git a/docs/reference/query-dsl/fuzzy-query.asciidoc b/docs/reference/query-dsl/fuzzy-query.asciidoc index fa11cb257d4..4be54691624 100644 --- a/docs/reference/query-dsl/fuzzy-query.asciidoc +++ b/docs/reference/query-dsl/fuzzy-query.asciidoc @@ -5,10 +5,10 @@ The fuzzy query uses similarity based on Levenshtein edit distance. ==== String fields -The `fuzzy` query generates all possible matching terms that are within the +The `fuzzy` query generates matching terms that are within the maximum edit distance specified in `fuzziness` and then checks the term dictionary to find out which of those generated terms actually exist in the -index. +index. The final query uses up to `max_expansions` matching terms. Here is a simple example: diff --git a/docs/reference/setup/logging-config.asciidoc b/docs/reference/setup/logging-config.asciidoc index d35b3db2df7..ac949bc941a 100644 --- a/docs/reference/setup/logging-config.asciidoc +++ b/docs/reference/setup/logging-config.asciidoc @@ -25,7 +25,7 @@ appender.rolling.type = RollingFile <1> appender.rolling.name = rolling appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log <2> appender.rolling.layout.type = PatternLayout -appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n +appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %.-10000m%n appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz <3> appender.rolling.policies.type = Policies appender.rolling.policies.time.type = TimeBasedTriggeringPolicy <4> diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java index 7bb3ebfba6e..88cba512b86 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java @@ -68,8 +68,7 @@ public final class GrokProcessor extends AbstractProcessor { throw new IllegalArgumentException("Provided Grok expressions do not match field value: [" + fieldValue + "]"); } - matches.entrySet().stream() - .forEach((e) -> ingestDocument.setFieldValue(e.getKey(), e.getValue())); + matches.forEach(ingestDocument::setFieldValue); if (traceMatch) { if (matchPatterns.size() > 1) { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java index 500f1aa1719..34fd4ec27a3 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java @@ -67,10 +67,9 @@ public class ConvertProcessorTests extends ESTestCase { assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(10)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32370") public void testConvertIntHexError() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - String value = "0x" + randomAlphaOfLengthBetween(1, 10); + String value = "0xnotanumber"; String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, value); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); @@ -138,10 +137,9 @@ public class ConvertProcessorTests extends ESTestCase { assertThat(ingestDocument.getFieldValue(fieldName, Long.class), equalTo(10L)); } - @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/32370") public void testConvertLongHexError() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - String value = "0x" + randomAlphaOfLengthBetween(1, 10); + String value = "0xnotanumber"; String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, value); Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.LONG, false); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 03345fcfff3..807b4409d7a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -69,17 +69,14 @@ final class Compiler { /** * A secure class loader used to define Painless scripts. */ - static final class Loader extends SecureClassLoader { + final class Loader extends SecureClassLoader { private final AtomicInteger lambdaCounter = new AtomicInteger(0); - private final PainlessLookup painlessLookup; /** * @param parent The parent ClassLoader. */ - Loader(ClassLoader parent, PainlessLookup painlessLookup) { + Loader(ClassLoader parent) { super(parent); - - this.painlessLookup = painlessLookup; } /** @@ -90,6 +87,15 @@ final class Compiler { */ @Override public Class findClass(String name) throws ClassNotFoundException { + if (scriptClass.getName().equals(name)) { + return scriptClass; + } + if (factoryClass != null && factoryClass.getName().equals(name)) { + return factoryClass; + } + if (statefulFactoryClass != null && statefulFactoryClass.getName().equals(name)) { + return statefulFactoryClass; + } Class found = painlessLookup.getClassFromBinaryName(name); return found != null ? found : super.findClass(name); @@ -139,13 +145,23 @@ final class Compiler { * {@link Compiler}'s specified {@link PainlessLookup}. */ public Loader createLoader(ClassLoader parent) { - return new Loader(parent, painlessLookup); + return new Loader(parent); } /** - * The class/interface the script is guaranteed to derive/implement. + * The class/interface the script will implement. */ - private final Class base; + private final Class scriptClass; + + /** + * The class/interface to create the {@code scriptClass} instance. + */ + private final Class factoryClass; + + /** + * An optional class/interface to create the {@code factoryClass} instance. + */ + private final Class statefulFactoryClass; /** * The whitelist the script will use. @@ -154,11 +170,15 @@ final class Compiler { /** * Standard constructor. - * @param base The class/interface the script is guaranteed to derive/implement. + * @param scriptClass The class/interface the script will implement. + * @param factoryClass An optional class/interface to create the {@code scriptClass} instance. + * @param statefulFactoryClass An optional class/interface to create the {@code factoryClass} instance. * @param painlessLookup The whitelist the script will use. */ - Compiler(Class base, PainlessLookup painlessLookup) { - this.base = base; + Compiler(Class scriptClass, Class factoryClass, Class statefulFactoryClass, PainlessLookup painlessLookup) { + this.scriptClass = scriptClass; + this.factoryClass = factoryClass; + this.statefulFactoryClass = statefulFactoryClass; this.painlessLookup = painlessLookup; } @@ -177,7 +197,7 @@ final class Compiler { " plugin if a script longer than this length is a requirement."); } - ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, base); + ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); SSource root = Walker.buildPainlessTree(scriptClassInfo, reserved, name, source, settings, painlessLookup, null); root.analyze(painlessLookup); @@ -209,7 +229,7 @@ final class Compiler { " plugin if a script longer than this length is a requirement."); } - ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, base); + ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); SSource root = Walker.buildPainlessTree(scriptClassInfo, new MainMethodReserved(), name, source, settings, painlessLookup, debugStream); root.analyze(painlessLookup); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 8a90f53b4fd..9be2559e67a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -368,7 +368,7 @@ public final class Def { ref = new FunctionRef(clazz, interfaceMethod, call, handle.type(), captures.length); } else { // whitelist lookup - ref = new FunctionRef(painlessLookup, clazz, type, call, captures.length); + ref = FunctionRef.resolveFromLookup(painlessLookup, clazz, type, call, captures.length); } final CallSite callSite = LambdaBootstrap.lambdaBootstrap( methodHandlesLookup, diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 7b7e8d25f39..0da91438326 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -20,13 +20,16 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.objectweb.asm.Type; import java.lang.invoke.MethodType; +import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; +import java.util.List; import static org.elasticsearch.painless.WriterConstants.CLASS_NAME; import static org.objectweb.asm.Opcodes.H_INVOKEINTERFACE; @@ -59,8 +62,10 @@ public class FunctionRef { /** interface method */ public final PainlessMethod interfaceMethod; - /** delegate method */ - public final PainlessMethod delegateMethod; + /** delegate method type parameters */ + public final List> delegateTypeParameters; + /** delegate method return type */ + public final Class delegateReturnType; /** factory method type descriptor */ public final String factoryDescriptor; @@ -80,9 +85,47 @@ public class FunctionRef { * @param call the right hand side of a method reference expression * @param numCaptures number of captured arguments */ - public FunctionRef(PainlessLookup painlessLookup, Class expected, String type, String call, int numCaptures) { - this(expected, painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod, - lookup(painlessLookup, expected, type, call, numCaptures > 0), numCaptures); + public static FunctionRef resolveFromLookup( + PainlessLookup painlessLookup, Class expected, String type, String call, int numCaptures) { + + if ("new".equals(call)) { + return new FunctionRef(expected, painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod, + lookup(painlessLookup, expected, type), numCaptures); + } else { + return new FunctionRef(expected, painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod, + lookup(painlessLookup, expected, type, call, numCaptures > 0), numCaptures); + } + } + + /** + * Creates a new FunctionRef (already resolved) + * @param expected functional interface type to implement + * @param interfaceMethod functional interface method + * @param delegateConstructor implementation constructor + * @param numCaptures number of captured arguments + */ + public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessConstructor delegateConstructor, int numCaptures) { + Constructor javaConstructor = delegateConstructor.javaConstructor; + MethodType delegateMethodType = delegateConstructor.methodType; + + interfaceMethodName = interfaceMethod.name; + factoryMethodType = MethodType.methodType(expected, + delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); + interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); + + delegateClassName = javaConstructor.getDeclaringClass().getName(); + isDelegateInterface = false; + delegateInvokeType = H_NEWINVOKESPECIAL; + delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME; + this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); + + this.interfaceMethod = interfaceMethod; + delegateTypeParameters = delegateConstructor.typeParameters; + delegateReturnType = void.class; + + factoryDescriptor = factoryMethodType.toMethodDescriptorString(); + interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); + delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); } /** @@ -112,9 +155,7 @@ public class FunctionRef { isDelegateInterface = delegateMethod.target.isInterface(); } - if ("".equals(delegateMethod.name)) { - delegateInvokeType = H_NEWINVOKESPECIAL; - } else if (Modifier.isStatic(delegateMethod.modifiers)) { + if (Modifier.isStatic(delegateMethod.modifiers)) { delegateInvokeType = H_INVOKESTATIC; } else if (delegateMethod.target.isInterface()) { delegateInvokeType = H_INVOKEINTERFACE; @@ -126,7 +167,8 @@ public class FunctionRef { this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); this.interfaceMethod = interfaceMethod; - this.delegateMethod = delegateMethod; + delegateTypeParameters = delegateMethod.arguments; + delegateReturnType = delegateMethod.rtn; factoryDescriptor = factoryMethodType.toMethodDescriptorString(); interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); @@ -151,13 +193,37 @@ public class FunctionRef { isDelegateInterface = false; this.interfaceMethod = null; - delegateMethod = null; + delegateTypeParameters = null; + delegateReturnType = null; factoryDescriptor = null; interfaceType = null; delegateType = null; } + /** + * Looks up {@code type} from the whitelist, and returns a matching constructor. + */ + private static PainlessConstructor lookup(PainlessLookup painlessLookup, Class expected, String type) { + // check its really a functional interface + // for e.g. Comparable + PainlessMethod method = painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod; + if (method == null) { + throw new IllegalArgumentException("Cannot convert function reference [" + type + "::new] " + + "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); + } + + // lookup requested constructor + PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(painlessLookup.getJavaClassFromPainlessType(type)); + PainlessConstructor impl = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(method.arguments.size())); + + if (impl == null) { + throw new IllegalArgumentException("Unknown reference [" + type + "::new] matching [" + expected + "]"); + } + + return impl; + } + /** * Looks up {@code type::call} from the whitelist, and returns a matching method. */ @@ -174,27 +240,22 @@ public class FunctionRef { // lookup requested method PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(painlessLookup.getJavaClassFromPainlessType(type)); final PainlessMethod impl; - // ctor ref - if ("new".equals(call)) { - impl = struct.constructors.get(PainlessLookupUtility.buildPainlessMethodKey("", method.arguments.size())); - } else { - // look for a static impl first - PainlessMethod staticImpl = - struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.arguments.size())); - if (staticImpl == null) { - // otherwise a virtual impl - final int arity; - if (receiverCaptured) { - // receiver captured - arity = method.arguments.size(); - } else { - // receiver passed - arity = method.arguments.size() - 1; - } - impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity)); + // look for a static impl first + PainlessMethod staticImpl = + struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.arguments.size())); + if (staticImpl == null) { + // otherwise a virtual impl + final int arity; + if (receiverCaptured) { + // receiver captured + arity = method.arguments.size(); } else { - impl = staticImpl; + // receiver passed + arity = method.arguments.size() - 1; } + impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity)); + } else { + impl = staticImpl; } if (impl == null) { throw new IllegalArgumentException("Unknown reference [" + type + "::" + call + "] matching " + diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java index 3fc8554b271..d95dc426688 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java @@ -36,7 +36,6 @@ import java.security.AccessController; import java.security.PrivilegedAction; import static java.lang.invoke.MethodHandles.Lookup; -import static org.elasticsearch.painless.Compiler.Loader; import static org.elasticsearch.painless.WriterConstants.CLASS_VERSION; import static org.elasticsearch.painless.WriterConstants.CTOR_METHOD_NAME; import static org.elasticsearch.painless.WriterConstants.DELEGATE_BOOTSTRAP_HANDLE; @@ -207,7 +206,7 @@ public final class LambdaBootstrap { MethodType delegateMethodType, int isDelegateInterface) throws LambdaConversionException { - Loader loader = (Loader)lookup.lookupClass().getClassLoader(); + Compiler.Loader loader = (Compiler.Loader)lookup.lookupClass().getClassLoader(); String lambdaClassName = Type.getInternalName(lookup.lookupClass()) + "$$Lambda" + loader.newLambdaIdentifier(); Type lambdaClassType = Type.getObjectType(lambdaClassName); Type delegateClassType = Type.getObjectType(delegateClassName.replace('.', '/')); @@ -457,11 +456,11 @@ public final class LambdaBootstrap { } /** - * Defines the {@link Class} for the lambda class using the same {@link Loader} + * Defines the {@link Class} for the lambda class using the same {@link Compiler.Loader} * that originally defined the class for the Painless script. */ private static Class createLambdaClass( - Loader loader, + Compiler.Loader loader, ClassWriter cw, Type lambdaClassType) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java index a5a9823d130..3a2a6d1452d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java @@ -102,10 +102,10 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr for (Map.Entry, List> entry : contexts.entrySet()) { ScriptContext context = entry.getKey(); if (context.instanceClazz.equals(SearchScript.class) || context.instanceClazz.equals(ExecutableScript.class)) { - contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class, + contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class, null, null, PainlessLookupBuilder.buildFromWhitelists(entry.getValue()))); } else { - contextsToCompilers.put(context, new Compiler(context.instanceClazz, + contextsToCompilers.put(context, new Compiler(context.instanceClazz, context.factoryClazz, context.statefulFactoryClazz, PainlessLookupBuilder.buildFromWhitelists(entry.getValue()))); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index 24dcf0ebdba..835bfb5c505 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -24,7 +24,8 @@ import java.util.Collections; import java.util.Map; public final class PainlessClass { - public final Map constructors; + public final Map constructors; + public final Map staticMethods; public final Map methods; @@ -36,13 +37,14 @@ public final class PainlessClass { public final PainlessMethod functionalMethod; - PainlessClass(Map constructors, + PainlessClass(Map constructors, Map staticMethods, Map methods, Map staticFields, Map fields, Map getterMethodHandles, Map setterMethodHandles, PainlessMethod functionalMethod) { this.constructors = Collections.unmodifiableMap(constructors); + this.staticMethods = Collections.unmodifiableMap(staticMethods); this.methods = Collections.unmodifiableMap(methods); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java index 2f41ed5dca8..866f711ba0f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -24,7 +24,8 @@ import java.util.HashMap; import java.util.Map; final class PainlessClassBuilder { - final Map constructors; + final Map constructors; + final Map staticMethods; final Map methods; @@ -38,6 +39,7 @@ final class PainlessClassBuilder { PainlessClassBuilder() { constructors = new HashMap<>(); + staticMethods = new HashMap<>(); methods = new HashMap<>(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java new file mode 100644 index 00000000000..76597c1a29d --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodType; +import java.lang.reflect.Constructor; +import java.util.List; + +public class PainlessConstructor { + public final Constructor javaConstructor; + public final List> typeParameters; + public final MethodHandle methodHandle; + public final MethodType methodType; + + PainlessConstructor(Constructor javaConstructor, List> typeParameters, MethodHandle methodHandle, MethodType methodType) { + this.javaConstructor = javaConstructor; + this.typeParameters = typeParameters; + this.methodHandle = methodHandle; + this.methodType = methodType; + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index ba48fbea734..b0a11dea58a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -40,15 +40,47 @@ import java.util.Map; import java.util.Objects; import java.util.regex.Pattern; -import static org.elasticsearch.painless.lookup.PainlessLookupUtility.CONSTRUCTOR_NAME; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_CLASS_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessConstructorKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessFieldKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToJavaType; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typesToCanonicalTypeNames; -public class PainlessLookupBuilder { +public final class PainlessLookupBuilder { + + private static class PainlessConstructorCacheKey { + + private final Class targetType; + private final List> typeParameters; + + private PainlessConstructorCacheKey(Class targetType, List> typeParameters) { + this.targetType = targetType; + this.typeParameters = Collections.unmodifiableList(typeParameters); + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessConstructorCacheKey that = (PainlessConstructorCacheKey)object; + + return Objects.equals(targetType, that.targetType) && + Objects.equals(typeParameters, that.typeParameters); + } + + @Override + public int hashCode() { + return Objects.hash(targetType, typeParameters); + } + } private static class PainlessMethodCacheKey { @@ -120,8 +152,9 @@ public class PainlessLookupBuilder { } } - private static final Map painlessMethodCache = new HashMap<>(); - private static final Map painlessFieldCache = new HashMap<>(); + private static final Map painlessConstuctorCache = new HashMap<>(); + private static final Map painlessMethodCache = new HashMap<>(); + private static final Map painlessFieldCache = new HashMap<>(); private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); @@ -343,11 +376,10 @@ public class PainlessLookupBuilder { "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); } - String painlessMethodKey = buildPainlessMethodKey(CONSTRUCTOR_NAME, typeParametersSize); - PainlessMethod painlessConstructor = painlessClassBuilder.constructors.get(painlessMethodKey); + String painlessConstructorKey = buildPainlessConstructorKey(typeParametersSize); + PainlessConstructor painlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey); if (painlessConstructor == null) { - org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor); MethodHandle methodHandle; try { @@ -359,17 +391,16 @@ public class PainlessLookupBuilder { MethodType methodType = methodHandle.type(); - painlessConstructor = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, CONSTRUCTOR_NAME, typeParameters), - key -> new PainlessMethod(CONSTRUCTOR_NAME, targetClass, null, void.class, typeParameters, - asmConstructor, javaConstructor.getModifiers(), methodHandle, methodType) + painlessConstructor = painlessConstuctorCache.computeIfAbsent( + new PainlessConstructorCacheKey(targetClass, typeParameters), + key -> new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType) ); - painlessClassBuilder.constructors.put(painlessMethodKey, painlessConstructor); - } else if (painlessConstructor.arguments.equals(typeParameters) == false){ + painlessClassBuilder.constructors.put(painlessConstructorKey, painlessConstructor); + } else if (painlessConstructor.typeParameters.equals(typeParameters) == false){ throw new IllegalArgumentException("cannot have constructors " + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.arguments) + "] " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.typeParameters) + "] " + "with the same arity and different type parameters"); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index 86d3f876638..0a181c5f1b0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -336,6 +336,13 @@ public final class PainlessLookupUtility { type == String.class; } + /** + * Constructs a painless constructor key used to lookup painless constructors from a painless class. + */ + public static String buildPainlessConstructorKey(int constructorArity) { + return CONSTRUCTOR_NAME + "/" + constructorArity; + } + /** * Constructs a painless method key used to lookup painless methods from a painless class. */ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index 7b35bc1b48e..a4f7b130045 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -77,18 +77,18 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda // static case if (captured.clazz != def.class) { try { - ref = new FunctionRef(locals.getPainlessLookup(), expected, + ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1); // check casts between the interface method and the delegate method are legal for (int i = 0; i < ref.interfaceMethod.arguments.size(); ++i) { Class from = ref.interfaceMethod.arguments.get(i); - Class to = ref.delegateMethod.arguments.get(i); + Class to = ref.delegateTypeParameters.get(i); AnalyzerCaster.getLegalCast(location, from, to, false, true); } if (ref.interfaceMethod.rtn != void.class) { - AnalyzerCaster.getLegalCast(location, ref.delegateMethod.rtn, ref.interfaceMethod.rtn, false, true); + AnalyzerCaster.getLegalCast(location, ref.delegateReturnType, ref.interfaceMethod.rtn, false, true); } } catch (IllegalArgumentException e) { throw createError(e); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index d787db5d41c..23fb8a4180e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -90,7 +90,7 @@ public final class EFunctionRef extends AExpression implements ILambda { } } else { // whitelist lookup - ref = new FunctionRef(locals.getPainlessLookup(), expected, type, call, 0); + ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, type, call, 0); } } catch (IllegalArgumentException e) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index e0af653d209..bd4bea95ea0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -23,10 +23,12 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; +import org.objectweb.asm.commons.Method; import java.util.ArrayList; import java.util.List; @@ -38,7 +40,7 @@ import java.util.Set; public final class EListInit extends AExpression { private final List values; - private PainlessMethod constructor = null; + private PainlessConstructor constructor = null; private PainlessMethod method = null; public EListInit(Location location, List values) { @@ -62,8 +64,8 @@ public final class EListInit extends AExpression { actual = ArrayList.class; - constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors - .get(PainlessLookupUtility.buildPainlessMethodKey("", 0)); + constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get( + PainlessLookupUtility.buildPainlessConstructorKey(0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); @@ -92,7 +94,8 @@ public final class EListInit extends AExpression { writer.newInstance(MethodWriter.getType(actual)); writer.dup(); - writer.invokeConstructor(Type.getType(constructor.target), constructor.method); + writer.invokeConstructor( + Type.getType(constructor.javaConstructor.getDeclaringClass()), Method.getMethod(constructor.javaConstructor)); for (AExpression value : values) { writer.dup(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index d81f08dc3cc..5bef62a4b18 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -23,10 +23,12 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; +import org.objectweb.asm.commons.Method; import java.util.HashMap; import java.util.List; @@ -39,7 +41,7 @@ public final class EMapInit extends AExpression { private final List keys; private final List values; - private PainlessMethod constructor = null; + private PainlessConstructor constructor = null; private PainlessMethod method = null; public EMapInit(Location location, List keys, List values) { @@ -68,8 +70,8 @@ public final class EMapInit extends AExpression { actual = HashMap.class; - constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors - .get(PainlessLookupUtility.buildPainlessMethodKey("", 0)); + constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get( + PainlessLookupUtility.buildPainlessConstructorKey(0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); @@ -111,7 +113,8 @@ public final class EMapInit extends AExpression { writer.newInstance(MethodWriter.getType(actual)); writer.dup(); - writer.invokeConstructor(Type.getType(constructor.target), constructor.method); + writer.invokeConstructor( + Type.getType(constructor.javaConstructor.getDeclaringClass()), Method.getMethod(constructor.javaConstructor)); for (int index = 0; index < keys.size(); ++index) { AExpression key = keys.get(index); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index f092a17c9fc..4e08f257386 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -24,9 +24,10 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookupUtility; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.objectweb.asm.Type; +import org.objectweb.asm.commons.Method; import java.util.List; import java.util.Objects; @@ -40,7 +41,7 @@ public final class ENewObj extends AExpression { private final String type; private final List arguments; - private PainlessMethod constructor; + private PainlessConstructor constructor; public ENewObj(Location location, String type, List arguments) { super(location); @@ -65,16 +66,16 @@ public final class ENewObj extends AExpression { } PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual); - constructor = struct.constructors.get(PainlessLookupUtility.buildPainlessMethodKey("", arguments.size())); + constructor = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(arguments.size())); if (constructor != null) { - Class[] types = new Class[constructor.arguments.size()]; - constructor.arguments.toArray(types); + Class[] types = new Class[constructor.typeParameters.size()]; + constructor.typeParameters.toArray(types); - if (constructor.arguments.size() != arguments.size()) { + if (constructor.typeParameters.size() != arguments.size()) { throw createError(new IllegalArgumentException( "When calling constructor on type [" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] " + - "expected [" + constructor.arguments.size() + "] arguments, but found [" + arguments.size() + "].")); + "expected [" + constructor.typeParameters.size() + "] arguments, but found [" + arguments.size() + "].")); } for (int argument = 0; argument < arguments.size(); ++argument) { @@ -107,7 +108,8 @@ public final class ENewObj extends AExpression { argument.write(writer, globals); } - writer.invokeConstructor(Type.getType(constructor.target), constructor.method); + writer.invokeConstructor( + Type.getType(constructor.javaConstructor.getDeclaringClass()), Method.getMethod(constructor.javaConstructor)); } @Override diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java index c852d5a41de..c68302bde56 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java @@ -69,7 +69,7 @@ public class BaseClassTests extends ScriptTestCase { } public void testGets() { - Compiler compiler = new Compiler(Gets.class, painlessLookup); + Compiler compiler = new Compiler(Gets.class, null, null, painlessLookup); Map map = new HashMap<>(); map.put("s", 1); @@ -87,7 +87,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(); } public void testNoArgs() { - Compiler compiler = new Compiler(NoArgs.class, painlessLookup); + Compiler compiler = new Compiler(NoArgs.class, null, null, painlessLookup); assertEquals(1, ((NoArgs)scriptEngine.compile(compiler, null, "1", emptyMap())).execute()); assertEquals("foo", ((NoArgs)scriptEngine.compile(compiler, null, "'foo'", emptyMap())).execute()); @@ -111,13 +111,13 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(Object arg); } public void testOneArg() { - Compiler compiler = new Compiler(OneArg.class, painlessLookup); + Compiler compiler = new Compiler(OneArg.class, null, null, painlessLookup); Object rando = randomInt(); assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap())).execute(rando)); rando = randomAlphaOfLength(5); assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap())).execute(rando)); - Compiler noargs = new Compiler(NoArgs.class, painlessLookup); + Compiler noargs = new Compiler(NoArgs.class, null, null, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, () -> scriptEngine.compile(noargs, null, "doc", emptyMap())); assertEquals("Variable [doc] is not defined.", e.getMessage()); @@ -132,7 +132,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(String[] arg); } public void testArrayArg() { - Compiler compiler = new Compiler(ArrayArg.class, painlessLookup); + Compiler compiler = new Compiler(ArrayArg.class, null, null, painlessLookup); String rando = randomAlphaOfLength(5); assertEquals(rando, ((ArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new String[] {rando, "foo"})); } @@ -142,7 +142,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(int[] arg); } public void testPrimitiveArrayArg() { - Compiler compiler = new Compiler(PrimitiveArrayArg.class, painlessLookup); + Compiler compiler = new Compiler(PrimitiveArrayArg.class, null, null, painlessLookup); int rando = randomInt(); assertEquals(rando, ((PrimitiveArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new int[] {rando, 10})); } @@ -152,7 +152,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(Object[] arg); } public void testDefArrayArg() { - Compiler compiler = new Compiler(DefArrayArg.class, painlessLookup); + Compiler compiler = new Compiler(DefArrayArg.class, null, null, painlessLookup); Object rando = randomInt(); assertEquals(rando, ((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new Object[] {rando, 10})); rando = randomAlphaOfLength(5); @@ -170,7 +170,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract boolean needsD(); } public void testManyArgs() { - Compiler compiler = new Compiler(ManyArgs.class, painlessLookup); + Compiler compiler = new Compiler(ManyArgs.class, null, null, painlessLookup); int rando = randomInt(); assertEquals(rando, ((ManyArgs)scriptEngine.compile(compiler, null, "a", emptyMap())).execute(rando, 0, 0, 0)); assertEquals(10, ((ManyArgs)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap())).execute(1, 2, 3, 4)); @@ -198,7 +198,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(String... arg); } public void testVararg() { - Compiler compiler = new Compiler(VarargTest.class, painlessLookup); + Compiler compiler = new Compiler(VarargTest.class, null, null, painlessLookup); assertEquals("foo bar baz", ((VarargTest)scriptEngine.compile(compiler, null, "String.join(' ', Arrays.asList(arg))", emptyMap())) .execute("foo", "bar", "baz")); } @@ -214,7 +214,7 @@ public class BaseClassTests extends ScriptTestCase { } } public void testDefaultMethods() { - Compiler compiler = new Compiler(DefaultMethods.class, painlessLookup); + Compiler compiler = new Compiler(DefaultMethods.class, null, null, painlessLookup); int rando = randomInt(); assertEquals(rando, ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap())).execute(rando, 0, 0, 0)); assertEquals(rando, ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap())).executeWithASingleOne(rando, 0, 0)); @@ -228,7 +228,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract void execute(Map map); } public void testReturnsVoid() { - Compiler compiler = new Compiler(ReturnsVoid.class, painlessLookup); + Compiler compiler = new Compiler(ReturnsVoid.class, null, null, painlessLookup); Map map = new HashMap<>(); ((ReturnsVoid)scriptEngine.compile(compiler, null, "map.a = 'foo'", emptyMap())).execute(map); assertEquals(singletonMap("a", "foo"), map); @@ -247,7 +247,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract boolean execute(); } public void testReturnsPrimitiveBoolean() { - Compiler compiler = new Compiler(ReturnsPrimitiveBoolean.class, painlessLookup); + Compiler compiler = new Compiler(ReturnsPrimitiveBoolean.class, null, null, painlessLookup); assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "true", emptyMap())).execute()); assertEquals(false, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "false", emptyMap())).execute()); @@ -289,7 +289,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract int execute(); } public void testReturnsPrimitiveInt() { - Compiler compiler = new Compiler(ReturnsPrimitiveInt.class, painlessLookup); + Compiler compiler = new Compiler(ReturnsPrimitiveInt.class, null, null, painlessLookup); assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1", emptyMap())).execute()); assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1L", emptyMap())).execute()); @@ -331,7 +331,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract float execute(); } public void testReturnsPrimitiveFloat() { - Compiler compiler = new Compiler(ReturnsPrimitiveFloat.class, painlessLookup); + Compiler compiler = new Compiler(ReturnsPrimitiveFloat.class, null, null, painlessLookup); assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1f", emptyMap())).execute(), 0); assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "(float) 1.1d", emptyMap())).execute(), 0); @@ -362,7 +362,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract double execute(); } public void testReturnsPrimitiveDouble() { - Compiler compiler = new Compiler(ReturnsPrimitiveDouble.class, painlessLookup); + Compiler compiler = new Compiler(ReturnsPrimitiveDouble.class, null, null, painlessLookup); assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1", emptyMap())).execute(), 0); assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1L", emptyMap())).execute(), 0); @@ -396,7 +396,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(String foo); } public void testNoArgumentsConstant() { - Compiler compiler = new Compiler(NoArgumentsConstant.class, painlessLookup); + Compiler compiler = new Compiler(NoArgumentsConstant.class, null, null, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertThat(e.getMessage(), startsWith( @@ -409,7 +409,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(String foo); } public void testWrongArgumentsConstant() { - Compiler compiler = new Compiler(WrongArgumentsConstant.class, painlessLookup); + Compiler compiler = new Compiler(WrongArgumentsConstant.class, null, null, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertThat(e.getMessage(), startsWith( @@ -422,7 +422,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(String foo); } public void testWrongLengthOfArgumentConstant() { - Compiler compiler = new Compiler(WrongLengthOfArgumentConstant.class, painlessLookup); + Compiler compiler = new Compiler(WrongLengthOfArgumentConstant.class, null, null, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertThat(e.getMessage(), startsWith("[" + WrongLengthOfArgumentConstant.class.getName() + "#ARGUMENTS] has length [2] but [" @@ -434,7 +434,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(UnknownArgType foo); } public void testUnknownArgType() { - Compiler compiler = new Compiler(UnknownArgType.class, painlessLookup); + Compiler compiler = new Compiler(UnknownArgType.class, null, null, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertEquals("[foo] is of unknown type [" + UnknownArgType.class.getName() + ". Painless interfaces can only accept arguments " @@ -446,7 +446,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract UnknownReturnType execute(String foo); } public void testUnknownReturnType() { - Compiler compiler = new Compiler(UnknownReturnType.class, painlessLookup); + Compiler compiler = new Compiler(UnknownReturnType.class, null, null, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertEquals("Painless can only implement execute methods returning a whitelisted type but [" + UnknownReturnType.class.getName() @@ -458,7 +458,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(UnknownArgTypeInArray[] foo); } public void testUnknownArgTypeInArray() { - Compiler compiler = new Compiler(UnknownArgTypeInArray.class, painlessLookup); + Compiler compiler = new Compiler(UnknownArgTypeInArray.class, null, null, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertEquals("[foo] is of unknown type [" + UnknownArgTypeInArray.class.getName() + ". Painless interfaces can only accept " @@ -470,7 +470,7 @@ public class BaseClassTests extends ScriptTestCase { public abstract Object execute(boolean foo); } public void testTwoExecuteMethods() { - Compiler compiler = new Compiler(TwoExecuteMethods.class, painlessLookup); + Compiler compiler = new Compiler(TwoExecuteMethods.class, null, null, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "null", emptyMap())); assertEquals("Painless can only implement interfaces that have a single method named [execute] but [" diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java index 48af3898e09..ae33ebfb6e9 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java @@ -40,7 +40,7 @@ final class Debugger { PrintWriter outputWriter = new PrintWriter(output); Textifier textifier = new Textifier(); try { - new Compiler(iface, PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS)) + new Compiler(iface, null, null, PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS)) .compile("", source, settings, textifier); } catch (RuntimeException e) { textifier.print(outputWriter); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index ff0d4231175..65429154301 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; @@ -57,7 +58,8 @@ public class PainlessDocGenerator { private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class); private static final Comparator FIELD_NAME = comparing(f -> f.name); private static final Comparator METHOD_NAME = comparing(m -> m.name); - private static final Comparator NUMBER_OF_ARGS = comparing(m -> m.arguments.size()); + private static final Comparator METHOD_NUMBER_OF_PARAMS = comparing(m -> m.arguments.size()); + private static final Comparator CONSTRUCTOR_NUMBER_OF_PARAMS = comparing(m -> m.typeParameters.size()); public static void main(String[] args) throws IOException { Path apiRootPath = PathUtils.get(args[0]); @@ -103,12 +105,15 @@ public class PainlessDocGenerator { Consumer documentField = field -> PainlessDocGenerator.documentField(typeStream, field); Consumer documentMethod = method -> PainlessDocGenerator.documentMethod(typeStream, method); + Consumer documentConstructor = + constructor -> PainlessDocGenerator.documentConstructor(typeStream, constructor); struct.staticFields.values().stream().sorted(FIELD_NAME).forEach(documentField); struct.fields.values().stream().sorted(FIELD_NAME).forEach(documentField); - struct.staticMethods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(documentMethod); - struct.constructors.values().stream().sorted(NUMBER_OF_ARGS).forEach(documentMethod); + struct.staticMethods.values().stream().sorted( + METHOD_NAME.thenComparing(METHOD_NUMBER_OF_PARAMS)).forEach(documentMethod); + struct.constructors.values().stream().sorted(CONSTRUCTOR_NUMBER_OF_PARAMS).forEach(documentConstructor); Map> inherited = new TreeMap<>(); - struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(method -> { + struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(METHOD_NUMBER_OF_PARAMS)).forEach(method -> { if (method.target == clazz) { documentMethod(typeStream, method); } else { @@ -164,6 +169,41 @@ public class PainlessDocGenerator { stream.println(); } + /** + * Document a constructor. + */ + private static void documentConstructor(PrintStream stream, PainlessConstructor constructor) { + stream.print("* ++[["); + emitAnchor(stream, constructor); + stream.print("]]"); + + String javadocRoot = javadocRoot(constructor.javaConstructor.getDeclaringClass()); + emitJavadocLink(stream, javadocRoot, constructor); + stream.print('['); + + stream.print(constructorName(constructor)); + + stream.print("]("); + boolean first = true; + for (Class arg : constructor.typeParameters) { + if (first) { + first = false; + } else { + stream.print(", "); + } + emitType(stream, arg); + } + stream.print(")++"); + + if (javadocRoot.equals("java8")) { + stream.print(" ("); + emitJavadocLink(stream, "java9", constructor); + stream.print("[java 9])"); + } + + stream.println(); + } + /** * Document a method. */ @@ -176,10 +216,8 @@ public class PainlessDocGenerator { stream.print("static "); } - if (false == method.name.equals("")) { - emitType(stream, method.rtn); - stream.print(' '); - } + emitType(stream, method.rtn); + stream.print(' '); String javadocRoot = javadocRoot(method); emitJavadocLink(stream, javadocRoot, method); @@ -216,6 +254,17 @@ public class PainlessDocGenerator { stream.print(PainlessLookupUtility.typeToCanonicalTypeName(clazz).replace('.', '-')); } + /** + * Anchor text for a {@link PainlessConstructor}. + */ + private static void emitAnchor(PrintStream stream, PainlessConstructor constructor) { + emitAnchor(stream, constructor.javaConstructor.getDeclaringClass()); + stream.print('-'); + stream.print(constructorName(constructor)); + stream.print('-'); + stream.print(constructor.typeParameters.size()); + } + /** * Anchor text for a {@link PainlessMethod}. */ @@ -236,8 +285,12 @@ public class PainlessDocGenerator { stream.print(field.name); } + private static String constructorName(PainlessConstructor constructor) { + return PainlessLookupUtility.typeToCanonicalTypeName(constructor.javaConstructor.getDeclaringClass()); + } + private static String methodName(PainlessMethod method) { - return method.name.equals("") ? PainlessLookupUtility.typeToCanonicalTypeName(method.target) : method.name; + return PainlessLookupUtility.typeToCanonicalTypeName(method.target); } /** @@ -269,6 +322,34 @@ public class PainlessDocGenerator { } } + /** + * Emit an external link to Javadoc for a {@link PainlessMethod}. + * + * @param root name of the root uri variable + */ + private static void emitJavadocLink(PrintStream stream, String root, PainlessConstructor constructor) { + stream.print("link:{"); + stream.print(root); + stream.print("-javadoc}/"); + stream.print(classUrlPath(constructor.javaConstructor.getDeclaringClass())); + stream.print(".html#"); + stream.print(constructorName(constructor)); + stream.print("%2D"); + boolean first = true; + for (Class clazz: constructor.typeParameters) { + if (first) { + first = false; + } else { + stream.print("%2D"); + } + stream.print(clazz.getName()); + if (clazz.isArray()) { + stream.print(":A"); + } + } + stream.print("%2D"); + } + /** * Emit an external link to Javadoc for a {@link PainlessMethod}. * diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index c1defe56adc..706f2c0b8f8 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -27,13 +27,13 @@ import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.function.Supplier; public class TransportDeleteByQueryAction extends HandledTransportAction { @@ -46,7 +46,7 @@ public class TransportDeleteByQueryAction extends HandledTransportAction) DeleteByQueryRequest::new); + (Writeable.Reader) DeleteByQueryRequest::new); this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index e54b5f50ae6..1d80c28b585 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -39,6 +39,7 @@ import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.action.index.IndexRequest; @@ -104,7 +105,7 @@ public class TransportReindexAction extends HandledTransportAction)ReindexRequest::new); this.threadPool = threadPool; this.clusterService = clusterService; this.scriptService = scriptService; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index 34ae3fdd0c6..00d14822ba0 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.IdFieldMapper; @@ -43,7 +44,6 @@ import org.elasticsearch.transport.TransportService; import java.util.Map; import java.util.function.BiFunction; -import java.util.function.Supplier; public class TransportUpdateByQueryAction extends HandledTransportAction { @@ -56,7 +56,7 @@ public class TransportUpdateByQueryAction extends HandledTransportAction) UpdateByQueryRequest::new); + (Writeable.Reader) UpdateByQueryRequest::new); this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index 2dc4b59e8d9..97809c9bc8d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -67,19 +67,17 @@ public class RoundTripTests extends ESTestCase { new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), port, null, query, username, password, headers, socketTimeout, connectTimeout)); } - ReindexRequest tripped = new ReindexRequest(); - roundTrip(reindex, tripped); + ReindexRequest tripped = new ReindexRequest(toInputByteStream(reindex)); assertRequestEquals(reindex, tripped); // Try slices=auto with a version that doesn't support it, which should fail reindex.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES); - Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_6_0_0_alpha1, reindex, null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(Version.V_6_0_0_alpha1, reindex)); assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage()); // Try regular slices with a version that doesn't support slices=auto, which should succeed - tripped = new ReindexRequest(); reindex.setSlices(between(1, Integer.MAX_VALUE)); - roundTrip(Version.V_6_0_0_alpha1, reindex, tripped); + tripped = new ReindexRequest(toInputByteStream(reindex)); assertRequestEquals(Version.V_6_0_0_alpha1, reindex, tripped); } @@ -89,20 +87,18 @@ public class RoundTripTests extends ESTestCase { if (randomBoolean()) { update.setPipeline(randomAlphaOfLength(5)); } - UpdateByQueryRequest tripped = new UpdateByQueryRequest(); - roundTrip(update, tripped); + UpdateByQueryRequest tripped = new UpdateByQueryRequest(toInputByteStream(update)); assertRequestEquals(update, tripped); assertEquals(update.getPipeline(), tripped.getPipeline()); // Try slices=auto with a version that doesn't support it, which should fail update.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES); - Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_6_0_0_alpha1, update, null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(Version.V_6_0_0_alpha1, update)); assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage()); // Try regular slices with a version that doesn't support slices=auto, which should succeed - tripped = new UpdateByQueryRequest(); update.setSlices(between(1, Integer.MAX_VALUE)); - roundTrip(Version.V_6_0_0_alpha1, update, tripped); + tripped = new UpdateByQueryRequest(toInputByteStream(update)); assertRequestEquals(update, tripped); assertEquals(update.getPipeline(), tripped.getPipeline()); } @@ -110,19 +106,17 @@ public class RoundTripTests extends ESTestCase { public void testDeleteByQueryRequest() throws IOException { DeleteByQueryRequest delete = new DeleteByQueryRequest(new SearchRequest()); randomRequest(delete); - DeleteByQueryRequest tripped = new DeleteByQueryRequest(); - roundTrip(delete, tripped); + DeleteByQueryRequest tripped = new DeleteByQueryRequest(toInputByteStream(delete)); assertRequestEquals(delete, tripped); // Try slices=auto with a version that doesn't support it, which should fail delete.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES); - Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_6_0_0_alpha1, delete, null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(Version.V_6_0_0_alpha1, delete)); assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage()); // Try regular slices with a version that doesn't support slices=auto, which should succeed - tripped = new DeleteByQueryRequest(); delete.setSlices(between(1, Integer.MAX_VALUE)); - roundTrip(Version.V_6_0_0_alpha1, delete, tripped); + tripped = new DeleteByQueryRequest(toInputByteStream(delete)); assertRequestEquals(delete, tripped); } @@ -198,23 +192,24 @@ public class RoundTripTests extends ESTestCase { request.setTaskId(new TaskId(randomAlphaOfLength(5), randomLong())); } RethrottleRequest tripped = new RethrottleRequest(); - roundTrip(request, tripped); + // We use readFrom here because Rethrottle does not support the Writeable.Reader interface + tripped.readFrom(toInputByteStream(request)); assertEquals(request.getRequestsPerSecond(), tripped.getRequestsPerSecond(), 0.00001); assertArrayEquals(request.getActions(), tripped.getActions()); assertEquals(request.getTaskId(), tripped.getTaskId()); } - private void roundTrip(Streamable example, Streamable empty) throws IOException { - roundTrip(Version.CURRENT, example, empty); + private StreamInput toInputByteStream(Streamable example) throws IOException { + return toInputByteStream(Version.CURRENT, example); } - private void roundTrip(Version version, Streamable example, Streamable empty) throws IOException { + private StreamInput toInputByteStream(Version version, Streamable example) throws IOException { BytesStreamOutput out = new BytesStreamOutput(); out.setVersion(version); example.writeTo(out); StreamInput in = out.bytes().streamInput(); in.setVersion(version); - empty.readFrom(in); + return in; } private Script randomScript() { diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 6d7412f8250..13ec8031c0f 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -23,8 +23,9 @@ esplugin { } dependencies { - compile 'com.microsoft.azure:azure-storage:5.0.0' - compile 'com.microsoft.azure:azure-keyvault-core:0.8.0' + compile 'com.microsoft.azure:azure-storage:8.0.0' + compile 'com.microsoft.azure:azure-keyvault-core:1.0.0' + compile 'com.google.guava:guava:20.0' compile 'org.apache.commons:commons-lang3:3.4' } @@ -40,6 +41,18 @@ thirdPartyAudit.excludes = [ // Optional and not enabled by Elasticsearch 'org.slf4j.Logger', 'org.slf4j.LoggerFactory', + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', ] check { diff --git a/plugins/repository-azure/licenses/azure-keyvault-core-0.8.0.jar.sha1 b/plugins/repository-azure/licenses/azure-keyvault-core-0.8.0.jar.sha1 deleted file mode 100644 index b86c58db842..00000000000 --- a/plugins/repository-azure/licenses/azure-keyvault-core-0.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35f7ac687462f491d0f8b0d96733dfe347493d70 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-keyvault-core-1.0.0.jar.sha1 b/plugins/repository-azure/licenses/azure-keyvault-core-1.0.0.jar.sha1 new file mode 100644 index 00000000000..8b44e99aee1 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-keyvault-core-1.0.0.jar.sha1 @@ -0,0 +1 @@ +e89dd5e621e21b753096ec6a03f203c01482c612 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-5.0.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-5.0.0.jar.sha1 deleted file mode 100644 index 9882cb80204..00000000000 --- a/plugins/repository-azure/licenses/azure-storage-5.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ba8f04bfeac08016c0f88423a202d0f3aac03aed \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-8.0.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-8.0.0.jar.sha1 new file mode 100644 index 00000000000..4e333ad8241 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-8.0.0.jar.sha1 @@ -0,0 +1 @@ +f6759c16ade4e2a05bc1dfbaf55161b9ed0e78b9 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/guava-20.0.jar.sha1 b/plugins/repository-azure/licenses/guava-20.0.jar.sha1 new file mode 100644 index 00000000000..7b6ae09060b --- /dev/null +++ b/plugins/repository-azure/licenses/guava-20.0.jar.sha1 @@ -0,0 +1 @@ +89507701249388e1ed5ddcf8c41f4ce1be7831ef \ No newline at end of file diff --git a/plugins/repository-azure/licenses/guava-LICENSE.txt b/plugins/repository-azure/licenses/guava-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-azure/licenses/guava-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-azure/licenses/guava-NOTICE.txt b/plugins/repository-azure/licenses/guava-NOTICE.txt new file mode 100644 index 00000000000..e69de29bb2d diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java index 9482182b02d..75bcc3e9c1e 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java @@ -61,7 +61,10 @@ import static java.util.Collections.emptyMap; public class AzureStorageService extends AbstractComponent { public static final ByteSizeValue MIN_CHUNK_SIZE = new ByteSizeValue(1, ByteSizeUnit.BYTES); - public static final ByteSizeValue MAX_CHUNK_SIZE = new ByteSizeValue(64, ByteSizeUnit.MB); + /** + * {@link com.microsoft.azure.storage.blob.BlobConstants#MAX_SINGLE_UPLOAD_BLOB_SIZE_IN_BYTES} + */ + public static final ByteSizeValue MAX_CHUNK_SIZE = new ByteSizeValue(256, ByteSizeUnit.MB); // 'package' for testing volatile Map storageSettings = emptyMap(); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java index b4b71577cbc..43891a8e9d5 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java @@ -104,7 +104,7 @@ public class AzureRepositorySettingsTests extends ESTestCase { assertEquals(AzureStorageService.MAX_CHUNK_SIZE, azureRepository.chunkSize()); // chunk size in settings - int size = randomIntBetween(1, 64); + int size = randomIntBetween(1, 256); azureRepository = azureRepository(Settings.builder().put("chunk_size", size + "mb").build()); assertEquals(new ByteSizeValue(size, ByteSizeUnit.MB), azureRepository.chunkSize()); @@ -120,8 +120,8 @@ public class AzureRepositorySettingsTests extends ESTestCase { // greater than max chunk size not allowed e = expectThrows(IllegalArgumentException.class, () -> - azureRepository(Settings.builder().put("chunk_size", "65mb").build())); - assertEquals("failed to parse value [65mb] for setting [chunk_size], must be <= [64mb]", e.getMessage()); + azureRepository(Settings.builder().put("chunk_size", "257mb").build())); + assertEquals("failed to parse value [257mb] for setting [chunk_size], must be <= [256mb]", e.getMessage()); } } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java index 0dc9ea0a170..b034cd08d77 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java @@ -87,7 +87,7 @@ public class EvilBootstrapChecksTests extends ESTestCase { final boolean enforceLimits = randomBoolean(); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> BootstrapChecks.check(new BootstrapContext(Settings.EMPTY, null), enforceLimits, emptyList(), "testInvalidValue")); + () -> BootstrapChecks.check(new BootstrapContext(Settings.EMPTY, null), enforceLimits, emptyList())); final Matcher matcher = containsString( "[es.enforce.bootstrap.checks] must be [true] but was [" + value + "]"); assertThat(e, hasToString(matcher)); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index d4bc754689e..9285abad9b9 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -56,6 +56,7 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.lessThan; @@ -319,9 +320,11 @@ public class EvilLoggerTests extends ESTestCase { assertThat(events.size(), equalTo(expectedLogLines + stackTraceLength)); for (int i = 0; i < expectedLogLines; i++) { if (prefix == null) { - assertThat(events.get(i), startsWith("test")); + assertThat("Contents of [" + path + "] are wrong", + events.get(i), startsWith("[" + getTestName() + "] test")); } else { - assertThat(events.get(i), startsWith("[" + prefix + "] test")); + assertThat("Contents of [" + path + "] are wrong", + events.get(i), startsWith("[" + getTestName() + "][" + prefix + "] test")); } } } @@ -357,6 +360,24 @@ public class EvilLoggerTests extends ESTestCase { } } + public void testNoNodeNameWarning() throws IOException, UserException { + setupLogging("no_node_name"); + + final String path = + System.getProperty("es.logs.base_path") + + System.getProperty("file.separator") + + System.getProperty("es.logs.cluster_name") + ".log"; + final List events = Files.readAllLines(PathUtils.get(path)); + assertThat(events.size(), equalTo(2)); + final String location = "org.elasticsearch.common.logging.LogConfigurator"; + // the first message is a warning for unsupported configuration files + assertLogLine(events.get(0), Level.WARN, location, "\\[null\\] Some logging configurations have %marker but don't " + + "have %node_name. We will automatically add %node_name to the pattern to ease the migration for users " + + "who customize log4j2.properties but will stop this behavior in 7.0. You should manually replace " + + "`%node_name` with `\\[%node_name\\]%marker ` in these locations:"); + assertThat(events.get(1), endsWith("no_node_name/log4j2.properties")); + } + private void setupLogging(final String config) throws IOException, UserException { setupLogging(config, Settings.EMPTY); } diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties index 8b956421458..2c9f48a359a 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties @@ -1,13 +1,13 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %marker%m%n +appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console @@ -23,10 +23,9 @@ appender.deprecation_file.type = File appender.deprecation_file.name = deprecation_file appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_file.layout.type = PatternLayout -appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n +appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n logger.deprecation.name = deprecation logger.deprecation.level = warn logger.deprecation.appenderRef.deprecation_file.ref = deprecation_file logger.deprecation.additivity = false - diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties index ed794cb7c3b..8a9ec160fb1 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties @@ -1,7 +1,7 @@ appender.console3.type = Console appender.console3.name = console3 appender.console3.layout.type = PatternLayout -appender.console3.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console3.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n logger.third.name = third logger.third.level = debug diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties index fd7af2ce731..388c9f9b2fc 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/deprecation/log4j2.properties @@ -1,13 +1,13 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %marker%m%n +appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console @@ -17,7 +17,7 @@ appender.deprecation_file.type = File appender.deprecation_file.name = deprecation_file appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_file.layout.type = PatternLayout -appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n +appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n logger.deprecation.name = deprecation logger.deprecation.level = warn diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties index 8553ec5e791..d036e571cf0 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/find_appender/log4j2.properties @@ -1,7 +1,7 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n logger.has_console_appender.name = has_console_appender logger.has_console_appender.level = trace diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties index 5fa85b5d156..ee59755a2d7 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/location_info/log4j2.properties @@ -1,13 +1,13 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %marker%m%n +appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/no_node_name/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/no_node_name/log4j2.properties new file mode 100644 index 00000000000..fd7af2ce731 --- /dev/null +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/no_node_name/log4j2.properties @@ -0,0 +1,25 @@ +appender.console.type = Console +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n + +appender.file.type = File +appender.file.name = file +appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log +appender.file.layout.type = PatternLayout +appender.file.layout.pattern = [%p][%l] %marker%m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = console +rootLogger.appenderRef.file.ref = file + +appender.deprecation_file.type = File +appender.deprecation_file.name = deprecation_file +appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log +appender.deprecation_file.layout.type = PatternLayout +appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n + +logger.deprecation.name = deprecation +logger.deprecation.level = warn +logger.deprecation.appenderRef.deprecation_file.ref = deprecation_file +logger.deprecation.additivity = false diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties index 5dfa369a3bd..199fddc31c6 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/prefix/log4j2.properties @@ -1,13 +1,13 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = %marker%m%n +appender.file.layout.pattern = [%test_thread_info]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/settings/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/settings/log4j2.properties index 1acb65511d6..abe4a279dc8 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/settings/log4j2.properties +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/settings/log4j2.properties @@ -1,13 +1,13 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n appender.file.type = File appender.file.name = file appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.file.layout.type = PatternLayout -appender.file.layout.pattern = [%p][%l] %marker%m%n +appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = console @@ -17,7 +17,7 @@ appender.deprecation_file.type = File appender.deprecation_file.name = deprecation_file appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log appender.deprecation_file.layout.type = PatternLayout -appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n +appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n logger.deprecation.name = org.elasticsearch.deprecation.common.settings logger.deprecation.level = warn diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index 3fa046263af..ab05d069003 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -109,8 +109,9 @@ public class TransportRolloverAction extends TransportMasterNodeAction listener) { final MetaData metaData = state.metaData(); validate(metaData, rolloverRequest); - final AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(rolloverRequest.getAlias()); - final IndexMetaData indexMetaData = aliasOrIndex.getIndices().get(0); + final AliasOrIndex.Alias alias = (AliasOrIndex.Alias) metaData.getAliasAndIndexLookup().get(rolloverRequest.getAlias()); + final IndexMetaData indexMetaData = alias.getWriteIndex(); + final boolean explicitWriteIndex = Boolean.TRUE.equals(indexMetaData.getAliases().get(alias.getAliasName()).writeIndex()); final String sourceProvidedName = indexMetaData.getSettings().get(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, indexMetaData.getIndex().getName()); final String sourceIndexName = indexMetaData.getIndex().getName(); @@ -138,10 +139,15 @@ public class TransportRolloverAction extends TransportMasterNodeAction { - // switch the alias to point to the newly created index - indexAliasesService.indicesAliases( - prepareRolloverAliasesUpdateRequest(sourceIndexName, rolloverIndexName, - rolloverRequest), + final IndicesAliasesClusterStateUpdateRequest aliasesUpdateRequest; + if (explicitWriteIndex) { + aliasesUpdateRequest = prepareRolloverAliasesWriteIndexUpdateRequest(sourceIndexName, + rolloverIndexName, rolloverRequest); + } else { + aliasesUpdateRequest = prepareRolloverAliasesUpdateRequest(sourceIndexName, + rolloverIndexName, rolloverRequest); + } + indexAliasesService.indicesAliases(aliasesUpdateRequest, ActionListener.wrap(aliasClusterStateUpdateResponse -> { if (aliasClusterStateUpdateResponse.isAcknowledged()) { clusterService.submitStateUpdateTask("update_rollover_info", new ClusterStateUpdateTask() { @@ -196,8 +202,19 @@ public class TransportRolloverAction extends TransportMasterNodeAction actions = unmodifiableList(Arrays.asList( - new AliasAction.Add(newIndex, request.getAlias(), null, null, null, null), - new AliasAction.Remove(oldIndex, request.getAlias()))); + new AliasAction.Add(newIndex, request.getAlias(), null, null, null, null), + new AliasAction.Remove(oldIndex, request.getAlias()))); + final IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest(actions) + .ackTimeout(request.ackTimeout()) + .masterNodeTimeout(request.masterNodeTimeout()); + return updateRequest; + } + + static IndicesAliasesClusterStateUpdateRequest prepareRolloverAliasesWriteIndexUpdateRequest(String oldIndex, String newIndex, + RolloverRequest request) { + List actions = unmodifiableList(Arrays.asList( + new AliasAction.Add(newIndex, request.getAlias(), null, null, null, true), + new AliasAction.Add(oldIndex, request.getAlias(), null, null, null, false))); final IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest(actions) .ackTimeout(request.ackTimeout()) .masterNodeTimeout(request.masterNodeTimeout()); @@ -244,8 +261,9 @@ public class TransportRolloverAction extends TransportMasterNodeAction Node.NODE_NAME_SETTING.get(environment.settings()))); + Thread.setDefaultUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler()); INSTANCE.setup(true, environment); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index 19fdb8837d6..1a028042db2 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.process.ProcessProbe; -import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; import java.io.BufferedReader; @@ -74,8 +73,7 @@ final class BootstrapChecks { combinedChecks.addAll(additionalChecks); check( context, enforceLimits(boundTransportAddress, DiscoveryModule.DISCOVERY_TYPE_SETTING.get(context.settings)), - Collections.unmodifiableList(combinedChecks), - Node.NODE_NAME_SETTING.get(context.settings)); + Collections.unmodifiableList(combinedChecks)); } /** @@ -86,14 +84,12 @@ final class BootstrapChecks { * @param context the current node boostrap context * @param enforceLimits {@code true} if the checks should be enforced or otherwise warned * @param checks the checks to execute - * @param nodeName the node name to be used as a logging prefix */ static void check( final BootstrapContext context, final boolean enforceLimits, - final List checks, - final String nodeName) throws NodeValidationException { - check(context, enforceLimits, checks, Loggers.getLogger(BootstrapChecks.class, nodeName)); + final List checks) throws NodeValidationException { + check(context, enforceLimits, checks, Loggers.getLogger(BootstrapChecks.class)); } /** diff --git a/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java b/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java index 857ff65b6c2..1ef9b7740c2 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java @@ -27,16 +27,9 @@ import org.elasticsearch.common.logging.Loggers; import java.io.IOError; import java.security.AccessController; import java.security.PrivilegedAction; -import java.util.Objects; -import java.util.function.Supplier; class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { - - private final Supplier loggingPrefixSupplier; - - ElasticsearchUncaughtExceptionHandler(final Supplier loggingPrefixSupplier) { - this.loggingPrefixSupplier = Objects.requireNonNull(loggingPrefixSupplier); - } + private static final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class); @Override public void uncaughtException(Thread t, Throwable e) { @@ -70,12 +63,10 @@ class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionH } void onFatalUncaught(final String threadName, final Throwable t) { - final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class, loggingPrefixSupplier.get()); logger.error(() -> new ParameterizedMessage("fatal error in thread [{}], exiting", threadName), t); } void onNonFatalUncaught(final String threadName, final Throwable t) { - final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class, loggingPrefixSupplier.get()); logger.warn(() -> new ParameterizedMessage("uncaught exception in thread [{}]", threadName), t); } diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index dbcb46d9936..e4d48b8df45 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -46,6 +46,8 @@ import java.util.Locale; import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; +import static org.apache.lucene.geo.GeoUtils.orient; + /** * The {@link PolygonBuilder} implements the groundwork to create polygons. This contains * Methods to wrap polygons at the dateline and building shapes from the data held by the @@ -642,14 +644,8 @@ public class PolygonBuilder extends ShapeBuilder { */ private static Edge[] ring(int component, boolean direction, boolean handedness, Coordinate[] points, int offset, Edge[] edges, int toffset, int length, final AtomicBoolean translated) { - // calculate the direction of the points: - // find the point a the top of the set and check its - // neighbors orientation. So direction is equivalent - // to clockwise/counterclockwise - final int top = top(points, offset, length); - final int prev = (offset + ((top + length - 1) % length)); - final int next = (offset + ((top + 1) % length)); - boolean orientation = points[offset + prev].x > points[offset + next].x; + + boolean orientation = getOrientation(points, offset, length); // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards @@ -678,6 +674,35 @@ public class PolygonBuilder extends ShapeBuilder { return concat(component, direction ^ orientation, points, offset, edges, toffset, length); } + /** + * @return whether the points are clockwise (true) or anticlockwise (false) + */ + private static boolean getOrientation(Coordinate[] points, int offset, int length) { + // calculate the direction of the points: find the southernmost point + // and check its neighbors orientation. + + final int top = top(points, offset, length); + final int prev = (top + length - 1) % length; + final int next = (top + 1) % length; + + final int determinantSign = orient( + points[offset + prev].x, points[offset + prev].y, + points[offset + top].x, points[offset + top].y, + points[offset + next].x, points[offset + next].y); + + if (determinantSign == 0) { + // Points are collinear, but `top` is not in the middle if so, so the edges either side of `top` are intersecting. + throw new InvalidShapeException("Cannot determine orientation: edges adjacent to (" + + points[offset + top].x + "," + points[offset +top].y + ") coincide"); + } + + return determinantSign < 0; + } + + /** + * @return the (offset) index of the point that is furthest west amongst + * those points that are the furthest south in the set. + */ private static int top(Coordinate[] points, int offset, int length) { int top = 0; // we start at 1 here since top points to 0 for (int i = 1; i < length; i++) { diff --git a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 44d7d17b593..caa922b92cf 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -46,6 +46,15 @@ public final class ESLoggerFactory { } public static Logger getLogger(String prefix, Logger logger) { + /* + * In a followup we'll throw an exception if prefix is null or empty + * redirecting folks to LogManager.getLogger. + * + * This and more is tracked in https://github.com/elastic/elasticsearch/issues/32174 + */ + if (prefix == null || prefix.length() == 0) { + return logger; + } return new PrefixLogger((ExtendedLogger)logger, logger.getName(), prefix); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index 01aca53db05..4e3771f3668 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -23,12 +23,16 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.AbstractConfiguration; +import org.apache.logging.log4j.core.config.ConfigurationException; +import org.apache.logging.log4j.core.config.ConfigurationSource; import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilder; import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilderFactory; import org.apache.logging.log4j.core.config.builder.impl.BuiltConfiguration; import org.apache.logging.log4j.core.config.composite.CompositeConfiguration; +import org.apache.logging.log4j.core.config.plugins.util.PluginManager; import org.apache.logging.log4j.core.config.properties.PropertiesConfiguration; +import org.apache.logging.log4j.core.config.properties.PropertiesConfigurationBuilder; import org.apache.logging.log4j.core.config.properties.PropertiesConfigurationFactory; import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; @@ -43,6 +47,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.node.Node; import java.io.IOException; +import java.io.InputStream; import java.nio.file.FileVisitOption; import java.nio.file.FileVisitResult; import java.nio.file.Files; @@ -50,9 +55,12 @@ import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; +import java.util.Collections; import java.util.EnumSet; +import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Properties; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.StreamSupport; @@ -119,6 +127,13 @@ public class LogConfigurator { configure(environment.settings(), environment.configFile(), environment.logsFile()); } + /** + * Load logging plugins so we can have {@code node_name} in the pattern. + */ + public static void loadLog4jPlugins() { + PluginManager.addPackage(LogConfigurator.class.getPackage().getName()); + } + private static void checkErrorListener() { assert errorListenerIsRegistered() : "expected error listener to be registered"; if (error.get()) { @@ -135,6 +150,8 @@ public class LogConfigurator { Objects.requireNonNull(configsPath); Objects.requireNonNull(logsPath); + loadLog4jPlugins(); + setLogConfigurationSystemProperty(logsPath, settings); // we initialize the status logger immediately otherwise Log4j will complain when we try to get the context configureStatusLogger(); @@ -142,7 +159,53 @@ public class LogConfigurator { final LoggerContext context = (LoggerContext) LogManager.getContext(false); final List configurations = new ArrayList<>(); - final PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory(); + + /* + * Subclass the properties configurator to hack the new pattern in + * place so users don't have to change log4j2.properties in + * a minor release. In 7.0 we'll remove this and force users to + * change log4j2.properties. If they don't customize log4j2.properties + * then they won't have to do anything anyway. + * + * Everything in this subclass that isn't marked as a hack is copied + * from log4j2's source. + */ + Set locationsWithDeprecatedPatterns = Collections.synchronizedSet(new HashSet<>()); + final PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory() { + @Override + public PropertiesConfiguration getConfiguration(final LoggerContext loggerContext, final ConfigurationSource source) { + final Properties properties = new Properties(); + try (InputStream configStream = source.getInputStream()) { + properties.load(configStream); + } catch (final IOException ioe) { + throw new ConfigurationException("Unable to load " + source.toString(), ioe); + } + // Hack the new pattern into place + for (String name : properties.stringPropertyNames()) { + if (false == name.endsWith(".pattern")) continue; + // Null is weird here but we can't do anything with it so ignore it + String value = properties.getProperty(name); + if (value == null) continue; + // Tests don't need to be changed + if (value.contains("%test_thread_info")) continue; + /* + * Patterns without a marker are sufficiently customized + * that we don't have an opinion about them. + */ + if (false == value.contains("%marker")) continue; + if (false == value.contains("%node_name")) { + locationsWithDeprecatedPatterns.add(source.getLocation()); + properties.setProperty(name, value.replace("%marker", "[%node_name]%marker ")); + } + } + // end hack + return new PropertiesConfigurationBuilder() + .setConfigurationSource(source) + .setRootProperties(properties) + .setLoggerContext(loggerContext) + .build(); + } + }; final Set options = EnumSet.of(FileVisitOption.FOLLOW_LINKS); Files.walkFileTree(configsPath, options, Integer.MAX_VALUE, new SimpleFileVisitor() { @Override @@ -163,6 +226,14 @@ public class LogConfigurator { context.start(new CompositeConfiguration(configurations)); configureLoggerLevels(settings); + + final String deprecatedLocationsString = String.join("\n ", locationsWithDeprecatedPatterns); + if (deprecatedLocationsString.length() > 0) { + LogManager.getLogger(LogConfigurator.class).warn("Some logging configurations have %marker but don't have %node_name. " + + "We will automatically add %node_name to the pattern to ease the migration for users who customize " + + "log4j2.properties but will stop this behavior in 7.0. You should manually replace `%node_name` with " + + "`[%node_name]%marker ` in these locations:\n {}", deprecatedLocationsString); + } } private static void configureStatusLogger() { diff --git a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java index 40983c517c7..58ffe277531 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -31,13 +31,9 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.node.Node; -import java.util.ArrayList; -import java.util.List; import java.util.Map; -import static java.util.Arrays.asList; import static org.elasticsearch.common.util.CollectionUtils.asArrayList; /** @@ -71,29 +67,19 @@ public class Loggers { } public static Logger getLogger(Class clazz, Settings settings, String... prefixes) { - final List prefixesList = prefixesList(settings, prefixes); - return Loggers.getLogger(clazz, prefixesList.toArray(new String[prefixesList.size()])); + return Loggers.getLogger(clazz, prefixes); } public static Logger getLogger(String loggerName, Settings settings, String... prefixes) { - final List prefixesList = prefixesList(settings, prefixes); - return Loggers.getLogger(loggerName, prefixesList.toArray(new String[prefixesList.size()])); - } - - private static List prefixesList(Settings settings, String... prefixes) { - List prefixesList = new ArrayList<>(); - if (Node.NODE_NAME_SETTING.exists(settings)) { - prefixesList.add(Node.NODE_NAME_SETTING.get(settings)); - } - if (prefixes != null && prefixes.length > 0) { - prefixesList.addAll(asList(prefixes)); - } - return prefixesList; + return Loggers.getLogger(loggerName, prefixes); } public static Logger getLogger(Logger parentLogger, String s) { - assert parentLogger instanceof PrefixLogger; - return ESLoggerFactory.getLogger(((PrefixLogger)parentLogger).prefix(), parentLogger.getName() + s); + String prefix = null; + if (parentLogger instanceof PrefixLogger) { + prefix = ((PrefixLogger)parentLogger).prefix(); + } + return ESLoggerFactory.getLogger(prefix, parentLogger.getName() + s); } public static Logger getLogger(String s) { @@ -126,7 +112,6 @@ public class Loggers { } } if (sb.length() > 0) { - sb.append(" "); prefix = sb.toString(); } } diff --git a/server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java b/server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java new file mode 100644 index 00000000000..ca4c9ab776f --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.logging; + +import java.util.Arrays; + +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.pattern.ConverterKeys; +import org.apache.logging.log4j.core.pattern.LogEventPatternConverter; +import org.apache.logging.log4j.core.pattern.PatternConverter; +import org.apache.lucene.util.SetOnce; + +/** + * Converts {@code %node_name} in log4j patterns into the current node name. + * We *could* use a system property lookup instead but this is very explicit + * and fails fast if we try to use the logger without initializing the node + * name. As a bonus it ought to be ever so slightly faster because it doesn't + * have to look up the system property every time. + */ +@Plugin(category = PatternConverter.CATEGORY, name = "NodeNamePatternConverter") +@ConverterKeys({"node_name"}) +public class NodeNamePatternConverter extends LogEventPatternConverter { + private static final SetOnce NODE_NAME = new SetOnce<>(); + + /** + * Set the name of this node. + */ + public static void setNodeName(String nodeName) { + NODE_NAME.set(nodeName); + } + + /** + * Called by log4j2 to initialize this converter. + */ + public static NodeNamePatternConverter newInstance(final String[] options) { + if (options.length > 0) { + throw new IllegalArgumentException("no options supported but options provided: " + + Arrays.toString(options)); + } + return new NodeNamePatternConverter(NODE_NAME.get()); + } + + private final String nodeName; + + private NodeNamePatternConverter(String nodeName) { + super("NodeName", "node_name"); + this.nodeName = nodeName; + } + + @Override + public void format(LogEvent event, StringBuilder toAppendTo) { + toAppendTo.append(nodeName); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index eee45743ee3..e017e9e7ca9 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -359,7 +359,7 @@ public class KeyStoreWrapper implements SecureSettings { if (input.read() != -1) { throw new SecurityException("Keystore has been corrupted or tampered with"); } - } catch (EOFException e) { + } catch (IOException e) { throw new SecurityException("Keystore has been corrupted or tampered with", e); } } diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index a3f83609037..ba5c4cd929f 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.get; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.document.DocumentField; @@ -30,6 +31,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.search.lookup.SourceLookup; @@ -225,10 +227,13 @@ public class GetResult implements Streamable, Iterable, ToXConten } } } - for (DocumentField field : metaFields) { - Object value = field.getValue(); - builder.field(field.getName(), value); + // TODO: can we avoid having an exception here? + if (field.getName().equals(IgnoredFieldMapper.NAME)) { + builder.field(field.getName(), field.getValues()); + } else { + builder.field(field.getName(), field.getValue()); + } } builder.field(FOUND, exists); @@ -316,7 +321,11 @@ public class GetResult implements Streamable, Iterable, ToXConten parser.skipChildren(); // skip potential inner objects for forward compatibility } } else if (token == XContentParser.Token.START_ARRAY) { - parser.skipChildren(); // skip potential inner arrays for forward compatibility + if (IgnoredFieldMapper.NAME.equals(currentFieldName)) { + fields.put(currentFieldName, new DocumentField(currentFieldName, parser.list())); + } else { + parser.skipChildren(); // skip potential inner arrays for forward compatibility + } } } return new GetResult(index, type, id, version, found, source, fields); @@ -400,7 +409,12 @@ public class GetResult implements Streamable, Iterable, ToXConten @Override public int hashCode() { - return Objects.hash(index, type, id, version, exists, fields, sourceAsMap()); + return Objects.hash(version, exists, index, type, id, fields, sourceAsMap()); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java index aa8543175d9..f848e8722c7 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java @@ -23,8 +23,11 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.tasks.TaskId; +import java.io.IOException; + import static org.elasticsearch.action.ValidateActions.addValidationError; /** @@ -53,6 +56,10 @@ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest list, Object value) { if (value instanceof List) { - List valueList = (List) value; - valueList.stream().forEach(list::add); + list.addAll((List) value); } else { list.add(value); } diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index c1ce864223b..b5cc7964085 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -231,7 +231,14 @@ public class Node implements Closeable { } private static final String CLIENT_TYPE = "node"; + private final Lifecycle lifecycle = new Lifecycle(); + + /** + * Logger initialized in the ctor because if it were initialized statically + * then it wouldn't get the node name. + */ + private final Logger logger; private final Injector injector; private final Settings settings; private final Settings originalSettings; @@ -257,13 +264,9 @@ public class Node implements Closeable { } protected Node(final Environment environment, Collection> classpathPlugins) { + logger = Loggers.getLogger(Node.class); final List resourcesToClose = new ArrayList<>(); // register everything we need to release in the case of an error boolean success = false; - { - // use temp logger just to say we are starting. we can't use it later on because the node name might not be set - Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(environment.settings())); - logger.info("initializing ..."); - } try { originalSettings = environment.settings(); Settings tmpSettings = Settings.builder().put(environment.settings()) @@ -279,7 +282,6 @@ public class Node implements Closeable { final boolean hadPredefinedNodeName = NODE_NAME_SETTING.exists(tmpSettings); final String nodeId = nodeEnvironment.nodeId(); tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeId); - final Logger logger = Loggers.getLogger(Node.class, tmpSettings); // this must be captured after the node name is possibly added to the settings final String nodeName = NODE_NAME_SETTING.get(tmpSettings); if (hadPredefinedNodeName == false) { @@ -631,7 +633,6 @@ public class Node implements Closeable { return this; } - Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings)); logger.info("starting ..."); pluginLifecycleComponents.forEach(LifecycleComponent::start); @@ -742,7 +743,6 @@ public class Node implements Closeable { if (!lifecycle.moveToStopped()) { return this; } - Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings)); logger.info("stopping ..."); injector.getInstance(ResourceWatcherService.class).stop(); @@ -785,7 +785,6 @@ public class Node implements Closeable { return; } - Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings)); logger.info("closing ..."); List toClose = new ArrayList<>(); StopWatch stopWatch = new StopWatch("node_close"); diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 8c688cbf446..28a600c0d21 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -19,6 +19,16 @@ package org.elasticsearch.search; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; + import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.OriginalIndices; @@ -51,16 +61,6 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.transport.RemoteClusterAware; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; - import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; @@ -107,6 +107,9 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable fields = get(Fields.FIELDS, values, Collections.emptyMap()); SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, fields); - searchHit.index = get(Fields._INDEX, values, null); + String index = get(Fields._INDEX, values, null); + String clusterAlias = null; + if (index != null) { + int indexOf = index.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); + if (indexOf > 0) { + clusterAlias = index.substring(0, indexOf); + index = index.substring(indexOf + 1); + } + } + ShardId shardId = get(Fields._SHARD, values, null); + String nodeId = get(Fields._NODE, values, null); + if (shardId != null && nodeId != null) { + assert shardId.getIndexName().equals(index); + searchHit.shard(new SearchShardTarget(nodeId, shardId, clusterAlias, OriginalIndices.NONE)); + } else { + //these fields get set anyways when setting the shard target, + //but we set them explicitly when we don't have enough info to rebuild the shard target + searchHit.index = index; + searchHit.clusterAlias = clusterAlias; + } searchHit.score(get(Fields._SCORE, values, DEFAULT_SCORE)); searchHit.version(get(Fields._VERSION, values, -1L)); searchHit.sortValues(get(Fields.SORT, values, SearchSortValues.EMPTY)); @@ -561,12 +583,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable matchedQueries = get(Fields.MATCHED_QUERIES, values, null); if (matchedQueries != null) { - searchHit.matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()])); - } - ShardId shardId = get(Fields._SHARD, values, null); - String nodeId = get(Fields._NODE, values, null); - if (shardId != null && nodeId != null) { - searchHit.shard(new SearchShardTarget(nodeId, shardId, null, OriginalIndices.NONE)); + searchHit.matchedQueries(matchedQueries.toArray(new String[0])); } return searchHit; } @@ -602,16 +619,24 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable { - @SuppressWarnings("unchecked") - Map fieldMap = (Map) map.computeIfAbsent(Fields.FIELDS, - v -> new HashMap()); - fieldMap.put(field.getName(), field); - }, (p, c) -> { - List values = new ArrayList<>(); - values.add(parseFieldsValue(p)); - return new DocumentField(metadatafield, values); - }, new ParseField(metadatafield), ValueType.VALUE); + if (metadatafield.equals(IgnoredFieldMapper.NAME)) { + parser.declareObjectArray((map, list) -> { + @SuppressWarnings("unchecked") + Map fieldMap = (Map) map.computeIfAbsent(Fields.FIELDS, + v -> new HashMap()); + DocumentField field = new DocumentField(metadatafield, list); + fieldMap.put(field.getName(), field); + }, (p, c) -> parseFieldsValue(p), + new ParseField(metadatafield)); + } else { + parser.declareField((map, field) -> { + @SuppressWarnings("unchecked") + Map fieldMap = (Map) map.computeIfAbsent(Fields.FIELDS, + v -> new HashMap()); + fieldMap.put(field.getName(), field); + }, (p, c) -> new DocumentField(metadatafield, Collections.singletonList(parseFieldsValue(p))), + new ParseField(metadatafield), ValueType.VALUE); + } } } } @@ -834,13 +859,15 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable PARSER; static { @@ -63,6 +64,29 @@ public class AutoDateHistogramAggregationBuilder PARSER.declareInt(AutoDateHistogramAggregationBuilder::setNumBuckets, NUM_BUCKETS_FIELD); } + /** + * + * Build roundings, computed dynamically as roundings are time zone dependent. + * The current implementation probably should not be invoked in a tight loop. + * @return Array of RoundingInfo + */ + static RoundingInfo[] buildRoundings(DateTimeZone timeZone) { + RoundingInfo[] roundings = new RoundingInfo[6]; + roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone), + 1000L, 1, 5, 10, 30); + roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone), + 60 * 1000L, 1, 5, 10, 30); + roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone), + 60 * 60 * 1000L, 1, 3, 12); + roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone), + 24 * 60 * 60 * 1000L, 1, 7); + roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone), + 30 * 24 * 60 * 60 * 1000L, 1, 3); + roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone), + 365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100); + return roundings; + } + public static AutoDateHistogramAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { return PARSER.parse(parser, new AutoDateHistogramAggregationBuilder(aggregationName), null); } @@ -116,14 +140,7 @@ public class AutoDateHistogramAggregationBuilder @Override protected ValuesSourceAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { - RoundingInfo[] roundings = new RoundingInfo[6]; - roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE), 1000L, 1, 5, 10, 30); - roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR), 60 * 1000L, 1, 5, 10, 30); - roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY), 60 * 60 * 1000L, 1, 3, 12); - roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH), 24 * 60 * 60 * 1000L, 1, 7); - roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR), 30 * 24 * 60 * 60 * 1000L, 1, 3); - roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY), 365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100); - + RoundingInfo[] roundings = buildRoundings(timeZone()); int maxRoundingInterval = Arrays.stream(roundings,0, roundings.length-1) .map(rounding -> rounding.innerIntervals) .flatMapToInt(Arrays::stream) @@ -139,10 +156,10 @@ public class AutoDateHistogramAggregationBuilder return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData); } - private Rounding createRounding(DateTimeUnit interval) { + private static Rounding createRounding(DateTimeUnit interval, DateTimeZone timeZone) { Rounding.Builder tzRoundingBuilder = Rounding.builder(interval); - if (timeZone() != null) { - tzRoundingBuilder.timeZone(timeZone()); + if (timeZone != null) { + tzRoundingBuilder.timeZone(timeZone); } Rounding rounding = tzRoundingBuilder.build(); return rounding; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java index 27c195cbdae..6a78ca67249 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -447,7 +447,8 @@ public final class InternalAutoDateHistogram extends return new BucketReduceResult(list, roundingInfo, roundingIdx); } - private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, RoundingInfo[] roundings) { + private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, + RoundingInfo[] roundings) { if (roundingIdx == roundings.length - 1) { return roundingIdx; } @@ -509,7 +510,8 @@ public final class InternalAutoDateHistogram extends pipelineAggregators(), getMetaData()); } - private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult, ReduceContext reduceContext) { + private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult, + ReduceContext reduceContext) { List buckets = reducedBucketsResult.buckets; RoundingInfo roundingInfo = reducedBucketsResult.roundingInfo; int roundingIdx = reducedBucketsResult.roundingIdx; @@ -539,7 +541,7 @@ public final class InternalAutoDateHistogram extends key = roundingInfo.rounding.round(bucket.key); } reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); - sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + sameKeyedBuckets.add(new Bucket(Math.round(key), bucket.docCount, format, bucket.aggregations)); } if (sameKeyedBuckets.isEmpty() == false) { reduceContext.consumeBucketsAndMaybeBreak(1); diff --git a/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java index c8503603f66..79527582405 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.test.ESIntegTestCase; -import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.StandardCopyOption; @@ -205,14 +204,7 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase { assertThat(nodesMap.size(), equalTo(cluster().size())); for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { assertThat(nodeResponse.reloadException(), notNullValue()); - // Running in a JVM with a BouncyCastle FIPS Security Provider, decrypting the Keystore with the wrong - // password returns a SecurityException if the DataInputStream can't be fully consumed - if (inFipsJvm()) { assertThat(nodeResponse.reloadException(), instanceOf(SecurityException.class)); - } else { - assertThat(nodeResponse.reloadException(), instanceOf(IOException.class)); - } - } } catch (final AssertionError e) { reloadSettingsError.set(e); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index 5379769e819..7823cd8849c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -60,7 +60,12 @@ public class RolloverIT extends ESIntegTestCase { public void testRolloverOnEmptyIndex() throws Exception { - assertAcked(prepareCreate("test_index-1").addAlias(new Alias("test_alias")).get()); + Alias testAlias = new Alias("test_alias"); + boolean explicitWriteIndex = randomBoolean(); + if (explicitWriteIndex) { + testAlias.writeIndex(true); + } + assertAcked(prepareCreate("test_index-1").addAlias(testAlias).get()); final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get(); assertThat(response.getOldIndex(), equalTo("test_index-1")); assertThat(response.getNewIndex(), equalTo("test_index-000002")); @@ -69,7 +74,12 @@ public class RolloverIT extends ESIntegTestCase { assertThat(response.getConditionStatus().size(), equalTo(0)); final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index-1"); - assertFalse(oldIndex.getAliases().containsKey("test_alias")); + if (explicitWriteIndex) { + assertTrue(oldIndex.getAliases().containsKey("test_alias")); + assertFalse(oldIndex.getAliases().get("test_alias").writeIndex()); + } else { + assertFalse(oldIndex.getAliases().containsKey("test_alias")); + } final IndexMetaData newIndex = state.metaData().index("test_index-000002"); assertTrue(newIndex.getAliases().containsKey("test_alias")); } @@ -97,8 +107,49 @@ public class RolloverIT extends ESIntegTestCase { is(both(greaterThanOrEqualTo(beforeTime)).and(lessThanOrEqualTo(client().threadPool().absoluteTimeInMillis() + 1000L)))); } + public void testRolloverWithExplicitWriteIndex() throws Exception { + long beforeTime = client().threadPool().absoluteTimeInMillis() - 1000L; + assertAcked(prepareCreate("test_index-2").addAlias(new Alias("test_alias").writeIndex(true)).get()); + index("test_index-2", "type1", "1", "field", "value"); + flush("test_index-2"); + final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get(); + assertThat(response.getOldIndex(), equalTo("test_index-2")); + assertThat(response.getNewIndex(), equalTo("test_index-000003")); + assertThat(response.isDryRun(), equalTo(false)); + assertThat(response.isRolledOver(), equalTo(true)); + assertThat(response.getConditionStatus().size(), equalTo(0)); + final ClusterState state = client().admin().cluster().prepareState().get().getState(); + final IndexMetaData oldIndex = state.metaData().index("test_index-2"); + assertTrue(oldIndex.getAliases().containsKey("test_alias")); + assertFalse(oldIndex.getAliases().get("test_alias").writeIndex()); + final IndexMetaData newIndex = state.metaData().index("test_index-000003"); + assertTrue(newIndex.getAliases().containsKey("test_alias")); + assertTrue(newIndex.getAliases().get("test_alias").writeIndex()); + assertThat(oldIndex.getRolloverInfos().size(), equalTo(1)); + assertThat(oldIndex.getRolloverInfos().get("test_alias").getAlias(), equalTo("test_alias")); + assertThat(oldIndex.getRolloverInfos().get("test_alias").getMetConditions(), is(empty())); + assertThat(oldIndex.getRolloverInfos().get("test_alias").getTime(), + is(both(greaterThanOrEqualTo(beforeTime)).and(lessThanOrEqualTo(client().threadPool().absoluteTimeInMillis() + 1000L)))); + } + + public void testRolloverWithNoWriteIndex() { + Boolean firstIsWriteIndex = randomFrom(false, null); + assertAcked(prepareCreate("index1").addAlias(new Alias("alias").writeIndex(firstIsWriteIndex)).get()); + if (firstIsWriteIndex == null) { + assertAcked(prepareCreate("index2").addAlias(new Alias("alias").writeIndex(randomFrom(false, null))).get()); + } + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> client().admin().indices().prepareRolloverIndex("alias").dryRun(randomBoolean()).get()); + assertThat(exception.getMessage(), equalTo("source alias [alias] does not point to a write index")); + } + public void testRolloverWithIndexSettings() throws Exception { - assertAcked(prepareCreate("test_index-2").addAlias(new Alias("test_alias")).get()); + Alias testAlias = new Alias("test_alias"); + boolean explicitWriteIndex = randomBoolean(); + if (explicitWriteIndex) { + testAlias.writeIndex(true); + } + assertAcked(prepareCreate("test_index-2").addAlias(testAlias).get()); index("test_index-2", "type1", "1", "field", "value"); flush("test_index-2"); final Settings settings = Settings.builder() @@ -114,12 +165,17 @@ public class RolloverIT extends ESIntegTestCase { assertThat(response.getConditionStatus().size(), equalTo(0)); final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index-2"); - assertFalse(oldIndex.getAliases().containsKey("test_alias")); final IndexMetaData newIndex = state.metaData().index("test_index-000003"); assertThat(newIndex.getNumberOfShards(), equalTo(1)); assertThat(newIndex.getNumberOfReplicas(), equalTo(0)); assertTrue(newIndex.getAliases().containsKey("test_alias")); assertTrue(newIndex.getAliases().containsKey("extra_alias")); + if (explicitWriteIndex) { + assertFalse(oldIndex.getAliases().get("test_alias").writeIndex()); + assertTrue(newIndex.getAliases().get("test_alias").writeIndex()); + } else { + assertFalse(oldIndex.getAliases().containsKey("test_alias")); + } } public void testRolloverDryRun() throws Exception { @@ -140,7 +196,12 @@ public class RolloverIT extends ESIntegTestCase { } public void testRolloverConditionsNotMet() throws Exception { - assertAcked(prepareCreate("test_index-0").addAlias(new Alias("test_alias")).get()); + boolean explicitWriteIndex = randomBoolean(); + Alias testAlias = new Alias("test_alias"); + if (explicitWriteIndex) { + testAlias.writeIndex(true); + } + assertAcked(prepareCreate("test_index-0").addAlias(testAlias).get()); index("test_index-0", "type1", "1", "field", "value"); flush("test_index-0"); final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias") @@ -160,12 +221,22 @@ public class RolloverIT extends ESIntegTestCase { final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index-0"); assertTrue(oldIndex.getAliases().containsKey("test_alias")); + if (explicitWriteIndex) { + assertTrue(oldIndex.getAliases().get("test_alias").writeIndex()); + } else { + assertNull(oldIndex.getAliases().get("test_alias").writeIndex()); + } final IndexMetaData newIndex = state.metaData().index("test_index-000001"); assertNull(newIndex); } public void testRolloverWithNewIndexName() throws Exception { - assertAcked(prepareCreate("test_index").addAlias(new Alias("test_alias")).get()); + Alias testAlias = new Alias("test_alias"); + boolean explicitWriteIndex = randomBoolean(); + if (explicitWriteIndex) { + testAlias.writeIndex(true); + } + assertAcked(prepareCreate("test_index").addAlias(testAlias).get()); index("test_index", "type1", "1", "field", "value"); flush("test_index"); final RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias") @@ -177,9 +248,14 @@ public class RolloverIT extends ESIntegTestCase { assertThat(response.getConditionStatus().size(), equalTo(0)); final ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetaData oldIndex = state.metaData().index("test_index"); - assertFalse(oldIndex.getAliases().containsKey("test_alias")); final IndexMetaData newIndex = state.metaData().index("test_new_index"); assertTrue(newIndex.getAliases().containsKey("test_alias")); + if (explicitWriteIndex) { + assertFalse(oldIndex.getAliases().get("test_alias").writeIndex()); + assertTrue(newIndex.getAliases().get("test_alias").writeIndex()); + } else { + assertFalse(oldIndex.getAliases().containsKey("test_alias")); + } } public void testRolloverOnExistingIndex() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 6149c380cd7..d59700f2b7a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -172,39 +172,75 @@ public class TransportRolloverActionTests extends ESTestCase { assertTrue(foundRemove); } + public void testCreateUpdateAliasRequestWithExplicitWriteIndex() { + String sourceAlias = randomAlphaOfLength(10); + String sourceIndex = randomAlphaOfLength(10); + String targetIndex = randomAlphaOfLength(10); + final RolloverRequest rolloverRequest = new RolloverRequest(sourceAlias, targetIndex); + final IndicesAliasesClusterStateUpdateRequest updateRequest = + TransportRolloverAction.prepareRolloverAliasesWriteIndexUpdateRequest(sourceIndex, targetIndex, rolloverRequest); + + List actions = updateRequest.actions(); + assertThat(actions, hasSize(2)); + boolean foundAddWrite = false; + boolean foundRemoveWrite = false; + for (AliasAction action : actions) { + AliasAction.Add addAction = (AliasAction.Add) action; + if (action.getIndex().equals(targetIndex)) { + assertEquals(sourceAlias, addAction.getAlias()); + assertTrue(addAction.writeIndex()); + foundAddWrite = true; + } else if (action.getIndex().equals(sourceIndex)) { + assertEquals(sourceAlias, addAction.getAlias()); + assertFalse(addAction.writeIndex()); + foundRemoveWrite = true; + } else { + throw new AssertionError("Unknow index [" + action.getIndex() + "]"); + } + } + assertTrue(foundAddWrite); + assertTrue(foundRemoveWrite); + } + public void testValidation() { String index1 = randomAlphaOfLength(10); - String alias = randomAlphaOfLength(10); + String aliasWithWriteIndex = randomAlphaOfLength(10); String index2 = randomAlphaOfLength(10); - String aliasWithMultipleIndices = randomAlphaOfLength(10); + String aliasWithNoWriteIndex = randomAlphaOfLength(10); + Boolean firstIsWriteIndex = randomFrom(false, null); final Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build(); - final MetaData metaData = MetaData.builder() + MetaData.Builder metaDataBuilder = MetaData.builder() .put(IndexMetaData.builder(index1) .settings(settings) - .putAlias(AliasMetaData.builder(alias)) - .putAlias(AliasMetaData.builder(aliasWithMultipleIndices)) - ) - .put(IndexMetaData.builder(index2) - .settings(settings) - .putAlias(AliasMetaData.builder(aliasWithMultipleIndices)) - ).build(); + .putAlias(AliasMetaData.builder(aliasWithWriteIndex)) + .putAlias(AliasMetaData.builder(aliasWithNoWriteIndex).writeIndex(firstIsWriteIndex)) + ); + IndexMetaData.Builder indexTwoBuilder = IndexMetaData.builder(index2).settings(settings); + if (firstIsWriteIndex == null) { + indexTwoBuilder.putAlias(AliasMetaData.builder(aliasWithNoWriteIndex).writeIndex(randomFrom(false, null))); + } + metaDataBuilder.put(indexTwoBuilder); + MetaData metaData = metaDataBuilder.build(); - expectThrows(IllegalArgumentException.class, () -> - TransportRolloverAction.validate(metaData, new RolloverRequest(aliasWithMultipleIndices, + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + TransportRolloverAction.validate(metaData, new RolloverRequest(aliasWithNoWriteIndex, randomAlphaOfLength(10)))); - expectThrows(IllegalArgumentException.class, () -> + assertThat(exception.getMessage(), equalTo("source alias [" + aliasWithNoWriteIndex + "] does not point to a write index")); + exception = expectThrows(IllegalArgumentException.class, () -> TransportRolloverAction.validate(metaData, new RolloverRequest(randomFrom(index1, index2), randomAlphaOfLength(10)))); - expectThrows(IllegalArgumentException.class, () -> + assertThat(exception.getMessage(), equalTo("source alias is a concrete index")); + exception = expectThrows(IllegalArgumentException.class, () -> TransportRolloverAction.validate(metaData, new RolloverRequest(randomAlphaOfLength(5), randomAlphaOfLength(10))) ); - TransportRolloverAction.validate(metaData, new RolloverRequest(alias, randomAlphaOfLength(10))); + assertThat(exception.getMessage(), equalTo("source alias does not exist")); + TransportRolloverAction.validate(metaData, new RolloverRequest(aliasWithWriteIndex, randomAlphaOfLength(10))); } public void testGenerateRolloverIndexName() { @@ -248,7 +284,7 @@ public class TransportRolloverActionTests extends ESTestCase { public void testRejectDuplicateAlias() { final IndexTemplateMetaData template = IndexTemplateMetaData.builder("test-template") .patterns(Arrays.asList("foo-*", "bar-*")) - .putAlias(AliasMetaData.builder("foo-write")).putAlias(AliasMetaData.builder("bar-write")) + .putAlias(AliasMetaData.builder("foo-write")).putAlias(AliasMetaData.builder("bar-write").writeIndex(randomBoolean())) .build(); final MetaData metaData = MetaData.builder().put(createMetaData(), false).put(template).build(); String indexName = randomFrom("foo-123", "bar-xyz"); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/server/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java index 59ede535c2f..60053748d68 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java @@ -27,10 +27,12 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.RoutingTable.Builder; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; @@ -44,6 +46,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; @@ -93,7 +96,8 @@ public class ClusterStateCreationUtils { IndexMetaData indexMetaData = IndexMetaData.builder(index).settings(Settings.builder() .put(SETTING_VERSION_CREATED, Version.CURRENT) .put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas) - .put(SETTING_CREATION_DATE, System.currentTimeMillis())).primaryTerm(0, primaryTerm).build(); + .put(SETTING_CREATION_DATE, System.currentTimeMillis())).primaryTerm(0, primaryTerm) + .build(); RoutingTable.Builder routing = new RoutingTable.Builder(); routing.addAsNew(indexMetaData); @@ -138,12 +142,19 @@ public class ClusterStateCreationUtils { TestShardRouting.newShardRouting(index, shardId.id(), replicaNode, relocatingNode, false, replicaState, unassignedInfo)); } + final IndexShardRoutingTable indexShardRoutingTable = indexShardRoutingBuilder.build(); + + IndexMetaData.Builder indexMetaDataBuilder = new IndexMetaData.Builder(indexMetaData); + indexMetaDataBuilder.putInSyncAllocationIds(0, + indexShardRoutingTable.activeShards().stream().map(ShardRouting::allocationId).map(AllocationId::getId) + .collect(Collectors.toSet()) + ); ClusterState.Builder state = ClusterState.builder(new ClusterName("test")); state.nodes(discoBuilder); - state.metaData(MetaData.builder().put(indexMetaData, false).generateClusterUuidIfNeeded()); + state.metaData(MetaData.builder().put(indexMetaDataBuilder.build(), false).generateClusterUuidIfNeeded()); state.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(indexMetaData.getIndex()) - .addIndexShard(indexShardRoutingBuilder.build())).build()); + .addIndexShard(indexShardRoutingTable)).build()); return state.build(); } @@ -272,21 +283,21 @@ public class ClusterStateCreationUtils { state.routingTable(RoutingTable.builder().add(indexRoutingTableBuilder.build()).build()); return state.build(); } - - + + /** * Creates cluster state with several indexes, shards and replicas and all shards STARTED. */ public static ClusterState stateWithAssignedPrimariesAndReplicas(String[] indices, int numberOfShards, int numberOfReplicas) { - int numberOfDataNodes = numberOfReplicas + 1; + int numberOfDataNodes = numberOfReplicas + 1; DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); for (int i = 0; i < numberOfDataNodes + 1; i++) { final DiscoveryNode node = newNode(i); discoBuilder = discoBuilder.add(node); } discoBuilder.localNodeId(newNode(0).getId()); - discoBuilder.masterNodeId(newNode(numberOfDataNodes + 1).getId()); + discoBuilder.masterNodeId(newNode(numberOfDataNodes + 1).getId()); ClusterState.Builder state = ClusterState.builder(new ClusterName("test")); state.nodes(discoBuilder); Builder routingTableBuilder = RoutingTable.builder(); @@ -316,7 +327,7 @@ public class ClusterStateCreationUtils { state.metaData(metadataBuilder); state.routingTable(routingTableBuilder.build()); return state.build(); - } + } /** * Creates cluster state with and index that has one shard and as many replicas as numberOfReplicas. diff --git a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java index e3ba6d19b39..8180dd96e8e 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java @@ -131,7 +131,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows(NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, checks, "testExceptionAggregation")); + () -> BootstrapChecks.check(defaultContext, true, checks)); assertThat(e, hasToString(allOf(containsString("bootstrap checks failed"), containsString("first"), containsString("second")))); final Throwable[] suppressed = e.getSuppressed(); assertThat(suppressed.length, equalTo(2)); @@ -162,7 +162,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat( e.getMessage(), containsString("initial heap size [" + initialHeapSize.get() + "] " + @@ -170,7 +170,7 @@ public class BootstrapChecksTests extends ESTestCase { initialHeapSize.set(maxHeapSize.get()); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if the initial heap size or the max // heap size is not available @@ -179,7 +179,7 @@ public class BootstrapChecksTests extends ESTestCase { } else { maxHeapSize.set(0); } - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testFileDescriptorLimits() throws NodeValidationException { @@ -205,17 +205,17 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows(NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat(e.getMessage(), containsString("max file descriptors")); maxFileDescriptorCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if current file descriptor count is // not available maxFileDescriptorCount.set(-1); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testFileDescriptorLimitsThrowsOnInvalidLimit() { @@ -262,15 +262,13 @@ public class BootstrapChecksTests extends ESTestCase { () -> BootstrapChecks.check( bootstrapContext, true, - Collections.singletonList(check), - "testFileDescriptorLimitsThrowsOnInvalidLimit")); + Collections.singletonList(check))); assertThat( e.getMessage(), containsString("memory locking requested for elasticsearch process but memory is not locked")); } else { // nothing should happen - BootstrapChecks.check(bootstrapContext, true, Collections.singletonList(check), - "testFileDescriptorLimitsThrowsOnInvalidLimit"); + BootstrapChecks.check(bootstrapContext, true, Collections.singletonList(check)); } } } @@ -287,17 +285,17 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat(e.getMessage(), containsString("max number of threads")); maxNumberOfThreads.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if current max number of threads is // not available maxNumberOfThreads.set(-1); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testMaxSizeVirtualMemory() throws NodeValidationException { @@ -317,16 +315,16 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat(e.getMessage(), containsString("max size virtual memory")); maxSizeVirtualMemory.set(rlimInfinity); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if max size virtual memory is not available maxSizeVirtualMemory.set(Long.MIN_VALUE); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testMaxFileSizeCheck() throws NodeValidationException { @@ -346,16 +344,16 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat(e.getMessage(), containsString("max file size")); maxFileSize.set(rlimInfinity); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if max file size is not available maxFileSize.set(Long.MIN_VALUE); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testMaxMapCountCheck() throws NodeValidationException { @@ -370,17 +368,17 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat(e.getMessage(), containsString("max virtual memory areas vm.max_map_count")); maxMapCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // nothing should happen if current vm.max_map_count is not // available maxMapCount.set(-1); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testClientJvmCheck() throws NodeValidationException { @@ -394,14 +392,14 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testClientJvmCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat( e.getMessage(), containsString("JVM is using the client VM [Java HotSpot(TM) 32-Bit Client VM] " + "but should be using a server VM for the best performance")); vmName.set("Java HotSpot(TM) 32-Bit Server VM"); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testClientJvmCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testUseSerialGCCheck() throws NodeValidationException { @@ -415,14 +413,14 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testUseSerialGCCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check))); assertThat( e.getMessage(), containsString("JVM is using the serial collector but should not be for the best performance; " + "" + "either it's the default for the VM [" + JvmInfo.jvmInfo().getVmName() +"] or -XX:+UseSerialGC was explicitly specified")); useSerialGC.set("false"); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testUseSerialGCCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); } public void testSystemCallFilterCheck() throws NodeValidationException { @@ -439,15 +437,14 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck), - "testSystemCallFilterCheck")); + () -> BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck))); assertThat( e.getMessage(), containsString("system call filters failed to install; " + "check the logs and fix your configuration or disable system call filters at your own risk")); isSystemCallFilterInstalled.set(true); - BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck), "testSystemCallFilterCheck"); + BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck)); BootstrapContext context_1 = new BootstrapContext(Settings.builder().put("bootstrap.system_call_filter", false).build(), null); final BootstrapChecks.SystemCallFilterCheck systemCallFilterNotEnabledCheck = new BootstrapChecks.SystemCallFilterCheck() { @Override @@ -456,9 +453,9 @@ public class BootstrapChecksTests extends ESTestCase { } }; isSystemCallFilterInstalled.set(false); - BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck), "testSystemCallFilterCheck"); + BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck)); isSystemCallFilterInstalled.set(true); - BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck), "testSystemCallFilterCheck"); + BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck)); } public void testMightForkCheck() throws NodeValidationException { @@ -553,8 +550,6 @@ public class BootstrapChecksTests extends ESTestCase { final Runnable enableMightFork, final Consumer consumer) throws NodeValidationException { - final String methodName = Thread.currentThread().getStackTrace()[2].getMethodName(); - // if system call filter is disabled, nothing should happen isSystemCallFilterInstalled.set(false); if (randomBoolean()) { @@ -562,13 +557,13 @@ public class BootstrapChecksTests extends ESTestCase { } else { enableMightFork.run(); } - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), methodName); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // if system call filter is enabled, but we will not fork, nothing should // happen isSystemCallFilterInstalled.set(true); disableMightFork.run(); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), methodName); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)); // if system call filter is enabled, and we might fork, the check should be enforced, regardless of bootstrap checks being enabled // or not @@ -577,7 +572,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check), methodName)); + () -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check))); consumer.accept(e); } @@ -602,7 +597,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, () -> { - BootstrapChecks.check(defaultContext, true, checks, "testEarlyAccessCheck"); + BootstrapChecks.check(defaultContext, true, checks); }); assertThat( e.getMessage(), @@ -613,7 +608,7 @@ public class BootstrapChecksTests extends ESTestCase { // if not on an early-access build, nothing should happen javaVersion.set(randomFrom("1.8.0_152", "9")); - BootstrapChecks.check(defaultContext, true, checks, "testEarlyAccessCheck"); + BootstrapChecks.check(defaultContext, true, checks); } @@ -649,7 +644,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck")); + () -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck))); assertThat( e.getMessage(), containsString( @@ -657,12 +652,12 @@ public class BootstrapChecksTests extends ESTestCase { // if G1GC is disabled, nothing should happen isG1GCEnabled.set(false); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck)); // if on or after update 40, nothing should happen independent of whether or not G1GC is enabled isG1GCEnabled.set(randomBoolean()); jvmVersion.set(String.format(Locale.ROOT, "25.%d-b%d", randomIntBetween(40, 112), randomIntBetween(1, 128))); - BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck)); final BootstrapChecks.G1GCCheck nonOracleCheck = new BootstrapChecks.G1GCCheck() { @@ -674,7 +669,7 @@ public class BootstrapChecksTests extends ESTestCase { }; // if not on an Oracle JVM, nothing should happen - BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonOracleCheck), "testG1GCCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonOracleCheck)); final BootstrapChecks.G1GCCheck nonJava8Check = new BootstrapChecks.G1GCCheck() { @@ -686,7 +681,7 @@ public class BootstrapChecksTests extends ESTestCase { }; // if not Java 8, nothing should happen - BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonJava8Check), "testG1GCCheck"); + BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonJava8Check)); } public void testAllPermissionCheck() throws NodeValidationException { @@ -701,12 +696,12 @@ public class BootstrapChecksTests extends ESTestCase { final List checks = Collections.singletonList(allPermissionCheck); final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, true, checks, "testIsAllPermissionCheck")); + () -> BootstrapChecks.check(defaultContext, true, checks)); assertThat(e, hasToString(containsString("granting the all permission effectively disables security"))); // if all permissions are not granted, nothing should happen isAllPermissionGranted.set(false); - BootstrapChecks.check(defaultContext, true, checks, "testIsAllPermissionCheck"); + BootstrapChecks.check(defaultContext, true, checks); } public void testAlwaysEnforcedChecks() { @@ -724,7 +719,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException alwaysEnforced = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check), "testAlwaysEnforcedChecks")); + () -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check))); assertThat(alwaysEnforced, hasToString(containsString("error"))); } diff --git a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java index e2bf07b7d0b..a6e50170d7e 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java @@ -65,7 +65,7 @@ public class ElasticsearchUncaughtExceptionHandlerTests extends ESTestCase { final AtomicInteger observedStatus = new AtomicInteger(); final AtomicReference threadNameReference = new AtomicReference<>(); final AtomicReference throwableReference = new AtomicReference<>(); - thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(() -> "testUncaughtError") { + thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler() { @Override void halt(int status) { @@ -106,7 +106,7 @@ public class ElasticsearchUncaughtExceptionHandlerTests extends ESTestCase { thread.setName(name); final AtomicReference threadNameReference = new AtomicReference<>(); final AtomicReference throwableReference = new AtomicReference<>(); - thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(() -> "testUncaughtException") { + thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler() { @Override void halt(int status) { fail(); diff --git a/server/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java b/server/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java index 7f8b893caf0..83bf2bae6a6 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/builders/PolygonBuilderTests.java @@ -143,4 +143,22 @@ public class PolygonBuilderTests extends AbstractShapeBuilderTestCase loadedkeystore.decrypt(new char[]{'i', 'n', 'v', 'a', 'l', 'i', 'd'})); + assertThat(exception.getMessage(), containsString("Keystore has been corrupted or tampered with")); + } + public void testCannotReadStringFromClosedKeystore() throws Exception { KeyStoreWrapper keystore = KeyStoreWrapper.create(); assertThat(keystore.getSettingNames(), Matchers.hasItem(KeyStoreWrapper.SEED_SETTING.getKey())); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index f47cd11e1b4..6169f6d12ee 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1978,7 +1978,7 @@ public class InternalEngineTests extends EngineTestCase { @Override public void append(LogEvent event) { final String formattedMessage = event.getMessage().getFormattedMessage(); - if (event.getLevel() == Level.TRACE && event.getMarker().getName().contains("[index][0] ")) { + if (event.getLevel() == Level.TRACE && event.getMarker().getName().contains("[index][0]")) { if (event.getLoggerName().endsWith(".IW") && formattedMessage.contains("IW: now apply all deletes")) { sawIndexWriterMessage = true; diff --git a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java index 830e41996e9..e2e3d4df67c 100644 --- a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java @@ -26,7 +26,11 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.RoutingFieldMapper; +import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.IgnoredFieldMapper; +import org.elasticsearch.index.mapper.IndexFieldMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; @@ -98,14 +102,28 @@ public class DocumentFieldTests extends ESTestCase { public static Tuple randomDocumentField(XContentType xContentType) { if (randomBoolean()) { - String fieldName = randomFrom(RoutingFieldMapper.NAME); - DocumentField documentField = new DocumentField(fieldName, Collections.singletonList(randomAlphaOfLengthBetween(3, 10))); + String metaField = randomValueOtherThanMany(field -> field.equals(TypeFieldMapper.NAME) + || field.equals(IndexFieldMapper.NAME) || field.equals(IdFieldMapper.NAME), + () -> randomFrom(MapperService.getAllMetaFields())); + DocumentField documentField; + if (metaField.equals(IgnoredFieldMapper.NAME)) { + int numValues = randomIntBetween(1, 3); + List ignoredFields = new ArrayList<>(numValues); + for (int i = 0; i < numValues; i++) { + ignoredFields.add(randomAlphaOfLengthBetween(3, 10)); + } + documentField = new DocumentField(metaField, ignoredFields); + } else { + //meta fields are single value only, besides _ignored + documentField = new DocumentField(metaField, Collections.singletonList(randomAlphaOfLengthBetween(3, 10))); + } return Tuple.tuple(documentField, documentField); + } else { + String fieldName = randomAlphaOfLengthBetween(3, 10); + Tuple, List> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType); + DocumentField input = new DocumentField(fieldName, tuple.v1()); + DocumentField expected = new DocumentField(fieldName, tuple.v2()); + return Tuple.tuple(input, expected); } - String fieldName = randomAlphaOfLengthBetween(3, 10); - Tuple, List> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType); - DocumentField input = new DocumentField(fieldName, tuple.v1()); - DocumentField expected = new DocumentField(fieldName, tuple.v2()); - return Tuple.tuple(input, expected); } } diff --git a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java index a38d183299c..1cc2612041f 100644 --- a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java @@ -90,7 +90,6 @@ public class GetResultTests extends ESTestCase { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomGetResult(xContentType); GetResult getResult = tuple.v1(); - // We don't expect to retrieve the index/type/id of the GetResult because they are not rendered // by the toXContentEmbedded method. GetResult expectedGetResult = new GetResult(null, null, null, -1, @@ -106,7 +105,6 @@ public class GetResultTests extends ESTestCase { parsedEmbeddedGetResult = GetResult.fromXContentEmbedded(parser); assertNull(parser.nextToken()); } - assertEquals(expectedGetResult, parsedEmbeddedGetResult); //print the parsed object out and test that the output is the same as the original output BytesReference finalBytes = toXContentEmbedded(parsedEmbeddedGetResult, xContentType, humanReadable); @@ -203,16 +201,17 @@ public class GetResultTests extends ESTestCase { return Tuple.tuple(getResult, expectedGetResult); } - private static Tuple,Map> randomDocumentFields(XContentType xContentType) { + public static Tuple,Map> randomDocumentFields(XContentType xContentType) { int numFields = randomIntBetween(2, 10); Map fields = new HashMap<>(numFields); Map expectedFields = new HashMap<>(numFields); - for (int i = 0; i < numFields; i++) { + while (fields.size() < numFields) { Tuple tuple = randomDocumentField(xContentType); DocumentField getField = tuple.v1(); DocumentField expectedGetField = tuple.v2(); - fields.put(getField.getName(), getField); - expectedFields.put(expectedGetField.getName(), expectedGetField); + if (fields.putIfAbsent(getField.getName(), getField) == null) { + assertNull(expectedFields.putIfAbsent(expectedGetField.getName(), expectedGetField)); + } } return Tuple.tuple(fields, expectedFields); } diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index 5c6b000f7e5..580696264bd 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -74,6 +74,10 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC enableRandomFailures = randomBoolean(); } + protected void disableRandomFailures() { + enableRandomFailures = false; + } + protected void failRandomly() { if (enableRandomFailures && rarely()) { throw new RuntimeException("dummy test failure"); diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index 5611421594a..39091ce04ec 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.PrimaryReplicaSyncer; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.threadpool.TestThreadPool; @@ -75,6 +76,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPA import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -97,7 +99,6 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice terminate(threadPool); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32308") public void testRandomClusterStateUpdates() { // we have an IndicesClusterStateService per node in the cluster final Map clusterStateServiceMap = new HashMap<>(); @@ -199,6 +200,59 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice } } + /** + * In rare cases it is possible that a nodes gets an instruction to replace a replica + * shard that's in POST_RECOVERY with a new initializing primary with the same allocation id. + * This can happen by batching cluster states that include the starting of the replica, with + * closing of the indices, opening it up again and allocating the primary shard to the node in + * question. The node should then clean it's initializing replica and replace it with a new + * initializing primary. + */ + public void testInitializingPrimaryRemovesInitializingReplicaWithSameAID() { + disableRandomFailures(); + String index = "index_" + randomAlphaOfLength(8).toLowerCase(Locale.ROOT); + ClusterState state = ClusterStateCreationUtils.state(index, randomBoolean(), + ShardRoutingState.STARTED, ShardRoutingState.INITIALIZING); + + // the initial state which is derived from the newly created cluster state but doesn't contain the index + ClusterState previousState = ClusterState.builder(state) + .metaData(MetaData.builder(state.metaData()).remove(index)) + .routingTable(RoutingTable.builder().build()) + .build(); + + // pick a data node to simulate the adding an index cluster state change event on, that has shards assigned to it + final ShardRouting shardRouting = state.routingTable().index(index).shard(0).replicaShards().get(0); + final ShardId shardId = shardRouting.shardId(); + DiscoveryNode node = state.nodes().get(shardRouting.currentNodeId()); + + // simulate the cluster state change on the node + ClusterState localState = adaptClusterStateToLocalNode(state, node); + ClusterState previousLocalState = adaptClusterStateToLocalNode(previousState, node); + IndicesClusterStateService indicesCSSvc = createIndicesClusterStateService(node, RecordingIndicesService::new); + indicesCSSvc.start(); + indicesCSSvc.applyClusterState(new ClusterChangedEvent("cluster state change that adds the index", localState, previousLocalState)); + previousState = state; + + // start the replica + state = cluster.applyStartedShards(state, state.routingTable().index(index).shard(0).replicaShards()); + + // close the index and open it up again (this will sometimes swap roles between primary and replica) + CloseIndexRequest closeIndexRequest = new CloseIndexRequest(state.metaData().index(index).getIndex().getName()); + state = cluster.closeIndices(state, closeIndexRequest); + OpenIndexRequest openIndexRequest = new OpenIndexRequest(state.metaData().index(index).getIndex().getName()); + state = cluster.openIndices(state, openIndexRequest); + + localState = adaptClusterStateToLocalNode(state, node); + previousLocalState = adaptClusterStateToLocalNode(previousState, node); + + indicesCSSvc.applyClusterState(new ClusterChangedEvent("new cluster state", localState, previousLocalState)); + + final MockIndexShard shardOrNull = ((RecordingIndicesService) indicesCSSvc.indicesService).getShardOrNull(shardId); + assertThat(shardOrNull == null ? null : shardOrNull.routingEntry(), + equalTo(state.getRoutingNodes().node(node.getId()).getByShardId(shardId))); + + } + public ClusterState randomInitialClusterState(Map clusterStateServiceMap, Supplier indicesServiceSupplier) { List allNodes = new ArrayList<>(); diff --git a/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 4d0d0d6ff3f..5ed4b370307 100644 --- a/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -1004,6 +1004,7 @@ public class IndexStatsIT extends ESIntegTestCase { assertEquals(total, shardTotal); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32506") public void testFilterCacheStats() throws Exception { assertAcked(prepareCreate("index").setSettings(Settings.builder().put(indexSettings()).put("number_of_replicas", 0).build()).get()); indexRandom(true, diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 97dfad46454..996faf03ffb 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -19,30 +19,6 @@ package org.elasticsearch.search; -import org.apache.lucene.search.Explanation; -import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.SearchHit.NestedIdentity; -import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; -import org.elasticsearch.search.fetch.subphase.highlight.HighlightFieldTests; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.RandomObjects; - import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; @@ -50,9 +26,31 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.Predicate; +import org.apache.lucene.search.Explanation; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.get.GetResultTests; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchHit.NestedIdentity; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightFieldTests; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.RandomObjects; + import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; @@ -63,8 +61,6 @@ import static org.hamcrest.Matchers.nullValue; public class SearchHitTests extends ESTestCase { - private static Set META_FIELDS = Sets.newHashSet("_uid", "_parent", "_routing", "_size", "_timestamp", "_ttl"); - public static SearchHit createTestItem(boolean withOptionalInnerHits) { int internalId = randomInt(); String uid = randomAlphaOfLength(10); @@ -75,18 +71,7 @@ public class SearchHitTests extends ESTestCase { } Map fields = new HashMap<>(); if (randomBoolean()) { - int size = randomIntBetween(0, 10); - for (int i = 0; i < size; i++) { - Tuple, List> values = RandomObjects.randomStoredFieldValues(random(), - XContentType.JSON); - if (randomBoolean()) { - String metaField = randomFrom(META_FIELDS); - fields.put(metaField, new DocumentField(metaField, values.v1())); - } else { - String fieldName = randomAlphaOfLengthBetween(5, 10); - fields.put(fieldName, new DocumentField(fieldName, values.v1())); - } - } + fields = GetResultTests.randomDocumentFields(XContentType.JSON).v1(); } SearchHit hit = new SearchHit(internalId, uid, type, nestedIdentity, fields); if (frequently()) { @@ -109,7 +94,8 @@ public class SearchHitTests extends ESTestCase { int size = randomIntBetween(0, 5); Map highlightFields = new HashMap<>(size); for (int i = 0; i < size; i++) { - highlightFields.put(randomAlphaOfLength(5), HighlightFieldTests.createTestItem()); + HighlightField testItem = HighlightFieldTests.createTestItem(); + highlightFields.put(testItem.getName(), testItem); } hit.highlightFields(highlightFields); } @@ -133,9 +119,10 @@ public class SearchHitTests extends ESTestCase { hit.setInnerHits(innerHits); } if (randomBoolean()) { + String index = randomAlphaOfLengthBetween(5, 10); + String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); hit.shard(new SearchShardTarget(randomAlphaOfLengthBetween(5, 10), - new ShardId(new Index(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)), randomInt()), null, - OriginalIndices.NONE)); + new ShardId(new Index(index, randomAlphaOfLengthBetween(5, 10)), randomInt()), clusterAlias, OriginalIndices.NONE)); } return hit; } diff --git a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java index d1a9a15a393..d69039b72f5 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java @@ -46,13 +46,13 @@ public class SearchSortValuesTests extends ESTestCase { List> valueSuppliers = new ArrayList<>(); // this should reflect all values that are allowed to go through the transport layer valueSuppliers.add(() -> null); - valueSuppliers.add(() -> randomInt()); - valueSuppliers.add(() -> randomLong()); - valueSuppliers.add(() -> randomDouble()); - valueSuppliers.add(() -> randomFloat()); - valueSuppliers.add(() -> randomByte()); - valueSuppliers.add(() -> randomShort()); - valueSuppliers.add(() -> randomBoolean()); + valueSuppliers.add(ESTestCase::randomInt); + valueSuppliers.add(ESTestCase::randomLong); + valueSuppliers.add(ESTestCase::randomDouble); + valueSuppliers.add(ESTestCase::randomFloat); + valueSuppliers.add(ESTestCase::randomByte); + valueSuppliers.add(ESTestCase::randomShort); + valueSuppliers.add(ESTestCase::randomBoolean); valueSuppliers.add(() -> frequently() ? randomAlphaOfLengthBetween(1, 30) : randomRealisticUnicodeOfCodepointLength(30)); int size = randomIntBetween(1, 20); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index 96811ce424c..a14fca63154 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -20,8 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.rounding.DateTimeUnit; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; @@ -51,14 +49,6 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati public void setUp() throws Exception { super.setUp(); format = randomNumericDocValueFormat(); - - roundingInfos = new RoundingInfo[6]; - roundingInfos[0] = new RoundingInfo(Rounding.builder(DateTimeUnit.SECOND_OF_MINUTE).build(), 1, 5, 10, 30); - roundingInfos[1] = new RoundingInfo(Rounding.builder(DateTimeUnit.MINUTES_OF_HOUR).build(), 1, 5, 10, 30); - roundingInfos[2] = new RoundingInfo(Rounding.builder(DateTimeUnit.HOUR_OF_DAY).build(), 1, 3, 12); - roundingInfos[3] = new RoundingInfo(Rounding.builder(DateTimeUnit.DAY_OF_MONTH).build(), 1, 7); - roundingInfos[4] = new RoundingInfo(Rounding.builder(DateTimeUnit.MONTH_OF_YEAR).build(), 1, 3); - roundingInfos[5] = new RoundingInfo(Rounding.builder(DateTimeUnit.YEAR_OF_CENTURY).build(), 1, 10, 20, 50, 100); } @Override @@ -66,6 +56,8 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati List pipelineAggregators, Map metaData, InternalAggregations aggregations) { + + roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null); int nbBuckets = randomNumberOfBuckets(); int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1); List buckets = new ArrayList<>(nbBuckets); @@ -81,6 +73,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList()); BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations); + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); } @@ -92,13 +85,50 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati roundingIdx = histogram.getBucketInfo().roundingIdx; } } - Map expectedCounts = new TreeMap<>(); - for (Histogram histogram : inputs) { + RoundingInfo roundingInfo = roundingInfos[roundingIdx]; + + long lowest = Long.MAX_VALUE; + long highest = 0; + for (InternalAutoDateHistogram histogram : inputs) { for (Histogram.Bucket bucket : histogram.getBuckets()) { - expectedCounts.compute(roundingInfos[roundingIdx].rounding.round(((DateTime) bucket.getKey()).getMillis()), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + long bucketKey = ((DateTime) bucket.getKey()).getMillis(); + if (bucketKey < lowest) { + lowest = bucketKey; + } + if (bucketKey > highest) { + highest = bucketKey; + } } } + long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis(); + long innerIntervalToUse = 0; + for (int interval : roundingInfo.innerIntervals) { + if (normalizedDuration / interval < maxNumberOfBuckets()) { + innerIntervalToUse = interval; + } + } + Map expectedCounts = new TreeMap<>(); + long intervalInMillis = innerIntervalToUse*roundingInfo.getRoughEstimateDurationMillis(); + for (long keyForBucket = roundingInfo.rounding.round(lowest); + keyForBucket <= highest; + keyForBucket = keyForBucket + intervalInMillis) { + expectedCounts.put(keyForBucket, 0L); + + for (InternalAutoDateHistogram histogram : inputs) { + for (Histogram.Bucket bucket : histogram.getBuckets()) { + long bucketKey = ((DateTime) bucket.getKey()).getMillis(); + long roundedBucketKey = roundingInfo.rounding.round(bucketKey); + if (roundedBucketKey >= keyForBucket + && roundedBucketKey < keyForBucket + intervalInMillis) { + long count = bucket.getDocCount(); + expectedCounts.compute(keyForBucket, + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + count); + } + } + } + } + + Map actualCounts = new TreeMap<>(); for (Histogram.Bucket bucket : reduced.getBuckets()) { actualCounts.compute(((DateTime) bucket.getKey()).getMillis(), @@ -117,12 +147,6 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati return ParsedAutoDateHistogram.class; } - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32215") - public void testReduceRandom() { - super.testReduceRandom(); - } - @Override protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index 6de15146fae..4f725ed2f11 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -105,6 +105,7 @@ public class QueryProfilerIT extends ESIntegTestCase { * search for each query. It then does some basic sanity checking of score and hits * to make sure the profiling doesn't interfere with the hits being returned */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32492") public void testProfileMatchesRegular() throws Exception { createIndex("test"); ensureGreen(); diff --git a/test/framework/src/main/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverter.java b/test/framework/src/main/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverter.java new file mode 100644 index 00000000000..35ad331f532 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverter.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.logging; + +import java.util.Arrays; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.pattern.ConverterKeys; +import org.apache.logging.log4j.core.pattern.LogEventPatternConverter; +import org.apache.logging.log4j.core.pattern.PatternConverter; +import org.elasticsearch.test.ESIntegTestCase; + +/** + * Converts {@code %test_thread_info} in log4j patterns into information + * based on the loggin thread's name. If that thread is part of an + * {@link ESIntegTestCase} then this information is the node name. + */ +@Plugin(category = PatternConverter.CATEGORY, name = "TestInfoPatternConverter") +@ConverterKeys({"test_thread_info"}) +public class TestThreadInfoPatternConverter extends LogEventPatternConverter { + /** + * Called by log4j2 to initialize this converter. + */ + public static TestThreadInfoPatternConverter newInstance(final String[] options) { + if (options.length > 0) { + throw new IllegalArgumentException("no options supported but options provided: " + + Arrays.toString(options)); + } + return new TestThreadInfoPatternConverter(); + } + + private TestThreadInfoPatternConverter() { + super("TestInfo", "test_thread_info"); + } + + @Override + public void format(LogEvent event, StringBuilder toAppendTo) { + toAppendTo.append(threadInfo(event.getThreadName())); + } + + private static final Pattern ELASTICSEARCH_THREAD_NAME_PATTERN = + Pattern.compile("elasticsearch\\[(.+)\\]\\[.+\\].+"); + private static final Pattern TEST_THREAD_NAME_PATTERN = + Pattern.compile("TEST-.+\\.(.+)-seed#\\[.+\\]"); + private static final Pattern TEST_SUITE_INIT_THREAD_NAME_PATTERN = + Pattern.compile("SUITE-.+-worker"); + private static final Pattern NOT_YET_NAMED_NODE_THREAD_NAME_PATTERN = + Pattern.compile("test_SUITE-CHILD_VM.+cluster\\[T#(.+)\\]"); + static String threadInfo(String threadName) { + Matcher m = ELASTICSEARCH_THREAD_NAME_PATTERN.matcher(threadName); + if (m.matches()) { + // Thread looks like a node thread so use the node name + return m.group(1); + } + m = TEST_THREAD_NAME_PATTERN.matcher(threadName); + if (m.matches()) { + /* + * Thread looks like a test thread so use the test method name. + * It'd be pretty reasonable not to use a prefix at all here but + * the logger layout pretty much expects one and the test method + * can be pretty nice to have around anyway. + */ + return m.group(1); + } + m = TEST_SUITE_INIT_THREAD_NAME_PATTERN.matcher(threadName); + if (m.matches()) { + /* + * Thread looks like test suite initialization or tead down and + * we don't have any more information to give. Like above, we + * could spit out nothing here but the logger layout expect + * something and it *is* nice to know what lines come from test + * teardown and initialization. + */ + return "suite"; + } + m = NOT_YET_NAMED_NODE_THREAD_NAME_PATTERN.matcher(threadName); + if (m.matches()) { + /* + * These are as yet unnamed integ test nodes. I'd prefer to log + * the node name but I don't have it yet. + */ + return "integ_" + m.group(1) + ""; + } + /* + * These are uncategorized threads. We log the entire thread name in + * case it is useful. We wrap it in `[]` so you tell that it is a + * thread name rather than a node name or something. + */ + return "[" + threadName + "]"; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index ee4f2f3b222..1498a69cf6a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -65,6 +65,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -200,6 +201,8 @@ public abstract class ESTestCase extends LuceneTestCase { System.setProperty("log4j.shutdownHookEnabled", "false"); System.setProperty("log4j2.disable.jmx", "true"); + LogConfigurator.loadLog4jPlugins(); + // Enable Netty leak detection and monitor logger for logged leak errors System.setProperty("io.netty.leakDetection.level", "paranoid"); String leakLoggerName = "io.netty.util.ResourceLeakDetector"; @@ -321,7 +324,7 @@ public abstract class ESTestCase extends LuceneTestCase { @Before public final void before() { - logger.info("[{}]: before test", getTestName()); + logger.info("{}before test", getTestParamsForLogging()); assertNull("Thread context initialized twice", threadContext); if (enableWarningsCheck()) { this.threadContext = new ThreadContext(Settings.EMPTY); @@ -350,7 +353,16 @@ public abstract class ESTestCase extends LuceneTestCase { } ensureAllSearchContextsReleased(); ensureCheckIndexPassed(); - logger.info("[{}]: after test", getTestName()); + logger.info("{}after test", getTestParamsForLogging()); + } + + private String getTestParamsForLogging() { + String name = getTestName(); + int start = name.indexOf('{'); + if (start < 0) return ""; + int end = name.lastIndexOf('}'); + if (end < 0) return ""; + return "[" + name.substring(start + 1, end) + "] "; } private void ensureNoWarnings() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index 06eefb7ccba..68434f0f29e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -48,7 +48,7 @@ import java.util.List; import java.util.Random; import static com.carrotsearch.randomizedtesting.generators.RandomNumbers.randomIntBetween; -import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiOfLength; +import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiLettersOfLength; import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomUnicodeOfLengthBetween; import static java.util.Collections.singleton; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_UUID_NA_VALUE; @@ -122,7 +122,7 @@ public final class RandomObjects { expectedParsedValues.add(randomBoolean); break; case 7: - String randomString = random.nextBoolean() ? RandomStrings.randomAsciiOfLengthBetween(random, 3, 10 ) : + String randomString = random.nextBoolean() ? RandomStrings.randomAsciiLettersOfLengthBetween(random, 3, 10) : randomUnicodeOfLengthBetween(random, 3, 10); originalValues.add(randomString); expectedParsedValues.add(randomString); @@ -191,11 +191,11 @@ public final class RandomObjects { for (int i = 0; i < numFields; i++) { if (currentDepth < 5 && random.nextInt(100) >= 70) { if (random.nextBoolean()) { - builder.startObject(RandomStrings.randomAsciiOfLengthBetween(random, 6, 10)); + builder.startObject(RandomStrings.randomAsciiLettersOfLengthBetween(random, 6, 10)); addFields(random, builder, minNumFields, currentDepth + 1); builder.endObject(); } else { - builder.startArray(RandomStrings.randomAsciiOfLengthBetween(random, 6, 10)); + builder.startArray(RandomStrings.randomAsciiLettersOfLengthBetween(random, 6, 10)); int numElements = randomIntBetween(random, 1, 5); boolean object = random.nextBoolean(); int dataType = -1; @@ -214,7 +214,7 @@ public final class RandomObjects { builder.endArray(); } } else { - builder.field(RandomStrings.randomAsciiOfLengthBetween(random, 6, 10), + builder.field(RandomStrings.randomAsciiLettersOfLengthBetween(random, 6, 10), randomFieldValue(random, randomDataType(random))); } } @@ -227,9 +227,9 @@ public final class RandomObjects { private static Object randomFieldValue(Random random, int dataType) { switch(dataType) { case 0: - return RandomStrings.randomAsciiOfLengthBetween(random, 3, 10); + return RandomStrings.randomAsciiLettersOfLengthBetween(random, 3, 10); case 1: - return RandomStrings.randomAsciiOfLengthBetween(random, 3, 10); + return RandomStrings.randomAsciiLettersOfLengthBetween(random, 3, 10); case 2: return random.nextLong(); case 3: @@ -287,10 +287,10 @@ public final class RandomObjects { * @param random Random generator */ private static Tuple randomShardInfoFailure(Random random) { - String index = randomAsciiOfLength(random, 5); - String indexUuid = randomAsciiOfLength(random, 5); + String index = randomAsciiLettersOfLength(random, 5); + String indexUuid = randomAsciiLettersOfLength(random, 5); int shardId = randomIntBetween(random, 1, 10); - String nodeId = randomAsciiOfLength(random, 5); + String nodeId = randomAsciiLettersOfLength(random, 5); RestStatus status = randomFrom(random, RestStatus.INTERNAL_SERVER_ERROR, RestStatus.FORBIDDEN, RestStatus.NOT_FOUND); boolean primary = random.nextBoolean(); ShardId shard = new ShardId(index, indexUuid, shardId); diff --git a/test/framework/src/main/resources/log4j2-test.properties b/test/framework/src/main/resources/log4j2-test.properties index 828555ed52e..842c9c79d79 100644 --- a/test/framework/src/main/resources/log4j2-test.properties +++ b/test/framework/src/main/resources/log4j2-test.properties @@ -1,7 +1,7 @@ appender.console.type = Console appender.console.name = console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n +appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%test_thread_info]%marker %m%n rootLogger.level = ${sys:tests.es.logger.level:-info} rootLogger.appenderRef.console.ref = console diff --git a/test/framework/src/main/resources/log4j2.component.properties b/test/framework/src/main/resources/log4j2.component.properties new file mode 100644 index 00000000000..8ce0fb493b7 --- /dev/null +++ b/test/framework/src/main/resources/log4j2.component.properties @@ -0,0 +1,9 @@ +# RandomizedRunner renames threads and we'd like to look at accurate thread +# names. Also, this is the default in log4j2's master branch for versions of +# Java after 1.8_102 which most of the versions we use are anyway. After that +# version of Java calling `Thread.getName()` doesn't allocate so UNCACHED +# ought to be faster than CACHED because it doesn't have to deal with +# ThreadLocals. +# While we don't use AsyncLogger, we do end up with mutable log events and +# those use this key for configuration. +AsyncLogger.ThreadNameStrategy = UNCACHED diff --git a/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java b/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java new file mode 100644 index 00000000000..8a98b867a46 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.logging; + +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import static org.elasticsearch.common.logging.TestThreadInfoPatternConverter.threadInfo; + +public class TestThreadInfoPatternConverterTests extends ESTestCase { + private static String suiteInfo; + + @BeforeClass + public static void captureSuiteInfo() { + suiteInfo = threadInfo(Thread.currentThread().getName()); + } + + public void testThreadInfo() { + // Threads that are part of a node get the node name + String nodeName = randomAlphaOfLength(5); + String threadName = EsExecutors.threadName(nodeName, randomAlphaOfLength(20)) + + "[T#" + between(0, 1000) + "]"; + assertEquals(nodeName, threadInfo(threadName)); + + // Test threads get the test name + assertEquals(getTestName(), threadInfo(Thread.currentThread().getName())); + + // Suite initalization gets "suite" + assertEquals("suite", suiteInfo); + + // And stuff that doesn't match anything gets wrapped in [] so we can see it + String unmatched = randomAlphaOfLength(5); + assertEquals("[" + unmatched + "]", threadInfo(unmatched)); + } +} diff --git a/x-pack/docs/en/rest-api/ml/jobresource.asciidoc b/x-pack/docs/en/rest-api/ml/jobresource.asciidoc index fb3424cab3f..e2d35082fb8 100644 --- a/x-pack/docs/en/rest-api/ml/jobresource.asciidoc +++ b/x-pack/docs/en/rest-api/ml/jobresource.asciidoc @@ -467,7 +467,7 @@ A condition has the following properties: (string) Specifies the condition operator. The available options are `gt` (greater than), `gte` (greater than or equals), `lt` (less than) and `lte` (less than or equals). `value`::: - (double) The value that is compared against the `applied_to` field using the `operator`. + (double) The value that is compared against the `applies_to` field using the `operator`. -- A rule is required to either have a non-empty scope or at least one condition. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java index 505d2c55fa6..d299406aad0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java @@ -209,38 +209,44 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste } } - if (newLicense.isProductionLicense() - && XPackSettings.SECURITY_ENABLED.get(settings) - && XPackSettings.TRANSPORT_SSL_ENABLED.get(settings) == false - && isProductionMode(settings, clusterService.localNode())) { - // security is on but TLS is not configured we gonna fail the entire request and throw an exception - throw new IllegalStateException("Cannot install a [" + newLicense.operationMode() + - "] license unless TLS is configured or security is disabled"); + if (XPackSettings.SECURITY_ENABLED.get(settings)) { // TODO we should really validate that all nodes have xpack installed and are consistently configured but this // should happen on a different level and not in this code - } else { - clusterService.submitStateUpdateTask("register license [" + newLicense.uid() + "]", new - AckedClusterStateUpdateTask(request, listener) { - @Override - protected PutLicenseResponse newResponse(boolean acknowledged) { - return new PutLicenseResponse(acknowledged, LicensesStatus.VALID); - } - - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - XPackPlugin.checkReadyForXPackCustomMetadata(currentState); - MetaData currentMetadata = currentState.metaData(); - LicensesMetaData licensesMetaData = currentMetadata.custom(LicensesMetaData.TYPE); - Version trialVersion = null; - if (licensesMetaData != null) { - trialVersion = licensesMetaData.getMostRecentTrialVersion(); - } - MetaData.Builder mdBuilder = MetaData.builder(currentMetadata); - mdBuilder.putCustom(LicensesMetaData.TYPE, new LicensesMetaData(newLicense, trialVersion)); - return ClusterState.builder(currentState).metaData(mdBuilder).build(); - } - }); + if (newLicense.isProductionLicense() + && XPackSettings.TRANSPORT_SSL_ENABLED.get(settings) == false + && isProductionMode(settings, clusterService.localNode())) { + // security is on but TLS is not configured we gonna fail the entire request and throw an exception + throw new IllegalStateException("Cannot install a [" + newLicense.operationMode() + + "] license unless TLS is configured or security is disabled"); + } else if (XPackSettings.FIPS_MODE_ENABLED.get(settings) + && newLicense.operationMode() != License.OperationMode.PLATINUM + && newLicense.operationMode() != License.OperationMode.TRIAL) { + throw new IllegalStateException("Cannot install a [" + newLicense.operationMode() + + "] license unless FIPS mode is disabled"); + } } + + clusterService.submitStateUpdateTask("register license [" + newLicense.uid() + "]", new + AckedClusterStateUpdateTask(request, listener) { + @Override + protected PutLicenseResponse newResponse(boolean acknowledged) { + return new PutLicenseResponse(acknowledged, LicensesStatus.VALID); + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + XPackPlugin.checkReadyForXPackCustomMetadata(currentState); + MetaData currentMetadata = currentState.metaData(); + LicensesMetaData licensesMetaData = currentMetadata.custom(LicensesMetaData.TYPE); + Version trialVersion = null; + if (licensesMetaData != null) { + trialVersion = licensesMetaData.getMostRecentTrialVersion(); + } + MetaData.Builder mdBuilder = MetaData.builder(currentMetadata); + mdBuilder.putCustom(LicensesMetaData.TYPE, new LicensesMetaData(newLicense, trialVersion)); + return ClusterState.builder(currentState).metaData(mdBuilder).build(); + } + }); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index e3a3e5a2fbd..31a91a61e45 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -93,6 +93,10 @@ public class XPackSettings { public static final Setting TOKEN_SERVICE_ENABLED_SETTING = Setting.boolSetting("xpack.security.authc.token.enabled", XPackSettings.HTTP_SSL_ENABLED::getRaw, Setting.Property.NodeScope); + /** Setting for enabling or disabling FIPS mode. Defaults to false */ + public static final Setting FIPS_MODE_ENABLED = + Setting.boolSetting("xpack.security.fips_mode.enabled", false, Property.NodeScope); + /** Setting for enabling or disabling sql. Defaults to true. */ public static final Setting SQL_ENABLED = Setting.boolSetting("xpack.sql.enabled", true, Setting.Property.NodeScope); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java index 559e0ab8d98..585fb372d32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java @@ -50,7 +50,7 @@ public final class GetPrivilegesRequest extends ActionRequest implements Applica @Override public Collection getApplicationNames() { - return Collections.singleton(application); + return application == null ? Collections.emptySet() : Collections.singleton(application); } public void privileges(String... privileges) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java index c068c77781b..e204d89b1c0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java @@ -135,7 +135,9 @@ public final class ConditionalClusterPrivileges { this.requestPredicate = request -> { if (request instanceof ApplicationPrivilegesRequest) { final ApplicationPrivilegesRequest privRequest = (ApplicationPrivilegesRequest) request; - return privRequest.getApplicationNames().stream().allMatch(application -> applicationPredicate.test(application)); + final Collection requestApplicationNames = privRequest.getApplicationNames(); + return requestApplicationNames.isEmpty() ? this.applicationNames.contains("*") + : requestApplicationNames.stream().allMatch(application -> applicationPredicate.test(application)); } return false; }; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java index 063f1f655a4..b9458c23ddc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java @@ -19,9 +19,9 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.activate.ActivateW import org.elasticsearch.xpack.core.watcher.transport.actions.activate.ActivateWatchRequestBuilder; import org.elasticsearch.xpack.core.watcher.transport.actions.activate.ActivateWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchAction; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequestBuilder; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchAction; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchRequestBuilder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java index 8a16755a6db..eb440ddc251 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.delete; import org.elasticsearch.action.Action; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; /** * This action deletes an watch from in memory, the scheduler and the index diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java index afea505aa4a..6779eec1ddf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.delete; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; /** * A delete document action request builder. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchResponse.java deleted file mode 100644 index bda5ecabaa0..00000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchResponse.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.watcher.transport.actions.delete; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; - -public class DeleteWatchResponse extends ActionResponse { - - private String id; - private long version; - private boolean found; - - public DeleteWatchResponse() { - } - - public DeleteWatchResponse(String id, long version, boolean found) { - this.id = id; - this.version = version; - this.found = found; - } - - public String getId() { - return id; - } - - public long getVersion() { - return version; - } - - public boolean isFound() { - return found; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - id = in.readString(); - version = in.readVLong(); - found = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(id); - out.writeVLong(version); - out.writeBoolean(found); - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseFIPSTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseFIPSTests.java new file mode 100644 index 00000000000..c432a207fcb --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseFIPSTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.license; + +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; + +import static org.hamcrest.Matchers.containsString; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.verify; + +public class LicenseFIPSTests extends AbstractLicenseServiceTestCase { + + public void testFIPSCheckWithAllowedLicense() throws Exception { + License newLicense = TestUtils.generateSignedLicense(randomFrom("trial", "platinum"), TimeValue.timeValueHours(24L)); + PutLicenseRequest request = new PutLicenseRequest(); + request.acknowledge(true); + request.license(newLicense); + Settings settings = Settings.builder() + .put("xpack.security.enabled", true) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.security.fips_mode.enabled", randomBoolean()) + .build(); + XPackLicenseState licenseState = new XPackLicenseState(settings); + + setInitialState(null, licenseState, settings); + licenseService.start(); + PlainActionFuture responseFuture = new PlainActionFuture<>(); + licenseService.registerLicense(request, responseFuture); + verify(clusterService).submitStateUpdateTask(any(String.class), any(ClusterStateUpdateTask.class)); + } + + public void testFIPSCheckWithoutAllowedLicense() throws Exception { + License newLicense = TestUtils.generateSignedLicense(randomFrom("gold", "standard"), TimeValue.timeValueHours(24L)); + PutLicenseRequest request = new PutLicenseRequest(); + request.acknowledge(true); + request.license(newLicense); + Settings settings = Settings.builder() + .put("xpack.security.enabled", true) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.security.fips_mode.enabled", true) + .build(); + XPackLicenseState licenseState = new XPackLicenseState(settings); + + setInitialState(null, licenseState, settings); + licenseService.start(); + PlainActionFuture responseFuture = new PlainActionFuture<>(); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> licenseService.registerLicense(request, responseFuture)); + assertThat(e.getMessage(), + containsString("Cannot install a [" + newLicense.operationMode() + "] license unless FIPS mode is disabled")); + licenseService.stop(); + + settings = Settings.builder() + .put("xpack.security.enabled", true) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.security.fips_mode.enabled", false) + .build(); + licenseState = new XPackLicenseState(settings); + + setInitialState(null, licenseState, settings); + licenseService.start(); + licenseService.registerLicense(request, responseFuture); + verify(clusterService).submitStateUpdateTask(any(String.class), any(ClusterStateUpdateTask.class)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java index a5c1bbc98d1..9c113d4ff0f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java @@ -50,6 +50,7 @@ import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class ManageApplicationPrivilegesTests extends ESTestCase { @@ -140,6 +141,19 @@ public class ManageApplicationPrivilegesTests extends ESTestCase { assertThat(cloudAndSwiftypePredicate, not(predicateMatches(putKibana))); } + public void testSecurityForGetAllApplicationPrivileges() { + final GetPrivilegesRequest getAll = new GetPrivilegesRequest(); + getAll.application(null); + getAll.privileges(new String[0]); + + assertThat(getAll.validate(), nullValue()); + + final ManageApplicationPrivileges kibanaOnly = new ManageApplicationPrivileges(Sets.newHashSet("kibana-*")); + final ManageApplicationPrivileges allApps = new ManageApplicationPrivileges(Sets.newHashSet("*")); + + assertThat(kibanaOnly.getRequestPredicate(), not(predicateMatches(getAll))); + assertThat(allApps.getRequestPredicate(), predicateMatches(getAll)); + } private ManageApplicationPrivileges clone(ManageApplicationPrivileges original) { return new ManageApplicationPrivileges(new LinkedHashSet<>(original.getApplicationNames())); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index 08618b45cf5..6183dd8665f 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -46,7 +46,7 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140JKSKeystoreBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140JKSKeystoreBootstrapCheck.java index 4a2c7b97195..cd5a720eef1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140JKSKeystoreBootstrapCheck.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140JKSKeystoreBootstrapCheck.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.security; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.bootstrap.BootstrapContext; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.core.XPackSettings; public class FIPS140JKSKeystoreBootstrapCheck implements BootstrapCheck { @@ -15,7 +16,7 @@ public class FIPS140JKSKeystoreBootstrapCheck implements BootstrapCheck { private final boolean fipsModeEnabled; FIPS140JKSKeystoreBootstrapCheck(Settings settings) { - this.fipsModeEnabled = Security.FIPS_MODE_ENABLED.get(settings); + this.fipsModeEnabled = XPackSettings.FIPS_MODE_ENABLED.get(settings); } /** diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140LicenseBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140LicenseBootstrapCheck.java new file mode 100644 index 00000000000..d1bce0dcdd2 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140LicenseBootstrapCheck.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.bootstrap.BootstrapContext; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseService; + +import java.util.EnumSet; + +/** + * A bootstrap check which enforces the licensing of FIPS + */ +final class FIPS140LicenseBootstrapCheck implements BootstrapCheck { + + static final EnumSet ALLOWED_LICENSE_OPERATION_MODES = + EnumSet.of(License.OperationMode.PLATINUM, License.OperationMode.TRIAL); + + private final boolean isInFipsMode; + + FIPS140LicenseBootstrapCheck(boolean isInFipsMode) { + this.isInFipsMode = isInFipsMode; + } + + @Override + public BootstrapCheckResult check(BootstrapContext context) { + if (isInFipsMode) { + License license = LicenseService.getLicense(context.metaData); + if (license != null && ALLOWED_LICENSE_OPERATION_MODES.contains(license.operationMode()) == false) { + return BootstrapCheckResult.failure("FIPS mode is only allowed with a Platinum or Trial license"); + } + } + return BootstrapCheckResult.success(); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheck.java index 57e12a211a3..751d63be4fb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheck.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheck.java @@ -17,7 +17,7 @@ public class FIPS140PasswordHashingAlgorithmBootstrapCheck implements BootstrapC private final boolean fipsModeEnabled; FIPS140PasswordHashingAlgorithmBootstrapCheck(final Settings settings) { - this.fipsModeEnabled = Security.FIPS_MODE_ENABLED.get(settings); + this.fipsModeEnabled = XPackSettings.FIPS_MODE_ENABLED.get(settings); } /** diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140SecureSettingsBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140SecureSettingsBootstrapCheck.java index c766dd0ffaa..a8cb32d545e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140SecureSettingsBootstrapCheck.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140SecureSettingsBootstrapCheck.java @@ -10,6 +10,7 @@ import org.elasticsearch.bootstrap.BootstrapContext; import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.xpack.core.XPackSettings; import java.io.IOException; import java.io.UncheckedIOException; @@ -20,7 +21,7 @@ public class FIPS140SecureSettingsBootstrapCheck implements BootstrapCheck { private final Environment environment; FIPS140SecureSettingsBootstrapCheck(Settings settings, Environment environment) { - this.fipsModeEnabled = Security.FIPS_MODE_ENABLED.get(settings); + this.fipsModeEnabled = XPackSettings.FIPS_MODE_ENABLED.get(settings); this.environment = environment; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index f4bb4b7eb3b..18bcfeb94a5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -176,7 +176,6 @@ import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; -import org.elasticsearch.xpack.security.authc.kerberos.KerberosRealmBootstrapCheck; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener; @@ -256,8 +255,6 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw DiscoveryPlugin, MapperPlugin, ExtensiblePlugin { private static final Logger logger = Loggers.getLogger(Security.class); - static final Setting FIPS_MODE_ENABLED = - Setting.boolSetting("xpack.security.fips_mode.enabled", false, Property.NodeScope); static final Setting> AUDIT_OUTPUTS_SETTING = Setting.listSetting(SecurityField.setting("audit.outputs"), @@ -306,8 +303,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw new PasswordHashingAlgorithmBootstrapCheck(), new FIPS140SecureSettingsBootstrapCheck(settings, env), new FIPS140JKSKeystoreBootstrapCheck(settings), - new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings), - new KerberosRealmBootstrapCheck(env))); + new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings))); checks.addAll(InternalRealms.getBootstrapChecks(settings, env)); this.bootstrapChecks = Collections.unmodifiableList(checks); Automatons.updateMaxDeterminizedStates(settings); @@ -595,7 +591,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw } // The following just apply in node mode - settingsList.add(FIPS_MODE_ENABLED); + settingsList.add(XPackSettings.FIPS_MODE_ENABLED); // IP Filter settings IPFilter.addSettings(settingsList); @@ -1002,7 +998,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw return new ValidateTLSOnJoin(XPackSettings.TRANSPORT_SSL_ENABLED.get(settings), DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings)) .andThen(new ValidateUpgradedSecurityIndex()) - .andThen(new ValidateLicenseCanBeDeserialized()); + .andThen(new ValidateLicenseCanBeDeserialized()) + .andThen(new ValidateLicenseForFIPS(XPackSettings.FIPS_MODE_ENABLED.get(settings))); } return null; } @@ -1050,6 +1047,27 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw } } + static final class ValidateLicenseForFIPS implements BiConsumer { + private final boolean inFipsMode; + + ValidateLicenseForFIPS(boolean inFipsMode) { + this.inFipsMode = inFipsMode; + } + + @Override + public void accept(DiscoveryNode node, ClusterState state) { + if (inFipsMode) { + License license = LicenseService.getLicense(state.metaData()); + if (license != null && + FIPS140LicenseBootstrapCheck.ALLOWED_LICENSE_OPERATION_MODES.contains(license.operationMode()) == false) { + throw new IllegalStateException("FIPS mode cannot be used with a [" + license.operationMode() + + "] license. It is only allowed with a Platinum or Trial license."); + + } + } + } + } + @Override public void reloadSPI(ClassLoader loader) { securityExtensions.addAll(SecurityExtension.loadExtensions(loader)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java index 0284ae9a05f..8b80c1f1d1c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; /** @@ -152,6 +153,7 @@ public class Realms extends AbstractComponent implements Iterable { Settings realmsSettings = RealmSettings.get(settings); Set internalTypes = new HashSet<>(); List realms = new ArrayList<>(); + List kerberosRealmNames = new ArrayList<>(); for (String name : realmsSettings.names()) { Settings realmSettings = realmsSettings.getAsSettings(name); String type = realmSettings.get("type"); @@ -178,6 +180,13 @@ public class Realms extends AbstractComponent implements Iterable { } internalTypes.add(type); } + if (KerberosRealmSettings.TYPE.equals(type)) { + kerberosRealmNames.add(name); + if (kerberosRealmNames.size() > 1) { + throw new IllegalArgumentException("multiple realms " + kerberosRealmNames.toString() + " configured of type [" + type + + "], [" + type + "] can only have one such realm configured"); + } + } realms.add(factory.create(config)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java index 4226607f192..f28fe1c297f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java @@ -363,9 +363,16 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand { final Logger logger = ESLoggerFactory.getLogger(ESNativeRealmMigrateTool.class); Loggers.setLevel(logger, Level.ALL); + final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); + final Configuration config = ctx.getConfiguration(); + // create appender final Appender appender = new AbstractAppender(ESNativeRealmMigrateTool.class.getName(), null, - PatternLayout.newBuilder().withPattern("%m").build()) { + PatternLayout.newBuilder() + // Specify the configuration so log4j doesn't re-initialize + .withConfiguration(config) + .withPattern("%m") + .build()) { @Override public void append(LogEvent event) { switch (event.getLevel().getStandardLevel()) { @@ -384,8 +391,6 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand { appender.start(); // get the config, detach from parent, remove appenders, add custom appender - final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); - final Configuration config = ctx.getConfiguration(); final LoggerConfig loggerConfig = config.getLoggerConfig(ESNativeRealmMigrateTool.class.getName()); loggerConfig.setParent(null); loggerConfig.getAppenders().forEach((s, a) -> Loggers.removeAppender(logger, a)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java index b4a8b6aabf0..71eeb8b2398 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.ietf.jgss.GSSException; +import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; import java.util.List; @@ -87,6 +88,16 @@ public final class KerberosRealm extends Realm implements CachingRealm { this.kerberosTicketValidator = kerberosTicketValidator; this.threadPool = threadPool; this.keytabPath = config.env().configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(config.settings())); + + if (Files.exists(keytabPath) == false) { + throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] does not exist"); + } + if (Files.isDirectory(keytabPath)) { + throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] is a directory"); + } + if (Files.isReadable(keytabPath) == false) { + throw new IllegalArgumentException("configured service key tab file [" + keytabPath + "] must have read permission"); + } this.enableKerberosDebug = KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.get(config.settings()); this.removeRealmName = KerberosRealmSettings.SETTING_REMOVE_REALM_NAME.get(config.settings()); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheck.java deleted file mode 100644 index bab899a8664..00000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheck.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.security.authc.kerberos; - -import org.elasticsearch.bootstrap.BootstrapCheck; -import org.elasticsearch.bootstrap.BootstrapContext; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.core.security.authc.RealmSettings; -import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; - -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Map; -import java.util.Map.Entry; - -/** - * This class is used to perform bootstrap checks for kerberos realm. - *

- * We use service keytabs for validating incoming kerberos tickets and is a - * required configuration. Due to JVM wide system properties for Kerberos we - * cannot support multiple Kerberos realms. This class adds checks for node to - * fail if service keytab does not exist or multiple kerberos realms have been - * configured. - */ -public class KerberosRealmBootstrapCheck implements BootstrapCheck { - private final Environment env; - - public KerberosRealmBootstrapCheck(final Environment env) { - this.env = env; - } - - @Override - public BootstrapCheckResult check(final BootstrapContext context) { - final Map realmsSettings = RealmSettings.getRealmSettings(context.settings); - boolean isKerberosRealmConfigured = false; - for (final Entry entry : realmsSettings.entrySet()) { - final String name = entry.getKey(); - final Settings realmSettings = entry.getValue(); - final String type = realmSettings.get("type"); - if (Strings.hasText(type) == false) { - return BootstrapCheckResult.failure("missing realm type for [" + name + "] realm"); - } - if (KerberosRealmSettings.TYPE.equals(type)) { - if (isKerberosRealmConfigured) { - return BootstrapCheckResult.failure( - "multiple [" + type + "] realms are configured. [" + type + "] can only have one such realm configured"); - } - isKerberosRealmConfigured = true; - - final Path keytabPath = env.configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(realmSettings)); - if (Files.exists(keytabPath) == false) { - return BootstrapCheckResult.failure("configured service key tab file [" + keytabPath + "] does not exist"); - } - } - } - return BootstrapCheckResult.success(); - } - - @Override - public boolean alwaysEnforce() { - return true; - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/BulkUpdateTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/BulkUpdateTests.java index 03ace7fb71d..bb0036e9f87 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/BulkUpdateTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/BulkUpdateTests.java @@ -5,15 +5,12 @@ */ package org.elasticsearch.integration; -import org.apache.http.Header; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.apache.http.message.BasicHeader; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.client.Response; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; @@ -24,10 +21,8 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import java.io.IOException; -import java.util.Collections; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; public class BulkUpdateTests extends SecurityIntegTestCase { @@ -77,46 +72,48 @@ public class BulkUpdateTests extends SecurityIntegTestCase { public void testThatBulkUpdateDoesNotLoseFieldsHttp() throws IOException { final String path = "/index1/type/1"; - final Header basicAuthHeader = new BasicHeader("Authorization", - UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + final RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); + optionsBuilder.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + final RequestOptions options = optionsBuilder.build(); - StringEntity body = new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON); - Response response = getRestClient().performRequest("PUT", path, Collections.emptyMap(), body, basicAuthHeader); - assertThat(response.getStatusLine().getStatusCode(), equalTo(201)); + Request createRequest = new Request("PUT", path); + createRequest.setOptions(options); + createRequest.setJsonEntity("{\"test\":\"test\"}"); + getRestClient().performRequest(createRequest); - response = getRestClient().performRequest("GET", path, basicAuthHeader); - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - assertThat(EntityUtils.toString(response.getEntity()), containsString("\"test\":\"test\"")); + Request getRequest = new Request("GET", path); + getRequest.setOptions(options); + assertThat(EntityUtils.toString(getRestClient().performRequest(getRequest).getEntity()), containsString("\"test\":\"test\"")); if (randomBoolean()) { flushAndRefresh(); } //update with new field - body = new StringEntity("{\"doc\": {\"not test\": \"not test\"}}", ContentType.APPLICATION_JSON); - response = getRestClient().performRequest("POST", path + "/_update", Collections.emptyMap(), body, basicAuthHeader); - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + Request updateRequest = new Request("POST", path + "/_update"); + updateRequest.setOptions(options); + updateRequest.setJsonEntity("{\"doc\": {\"not test\": \"not test\"}}"); + getRestClient().performRequest(updateRequest); - response = getRestClient().performRequest("GET", path, basicAuthHeader); - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - String responseBody = EntityUtils.toString(response.getEntity()); - assertThat(responseBody, containsString("\"test\":\"test\"")); - assertThat(responseBody, containsString("\"not test\":\"not test\"")); + String afterUpdate = EntityUtils.toString(getRestClient().performRequest(getRequest).getEntity()); + assertThat(afterUpdate, containsString("\"test\":\"test\"")); + assertThat(afterUpdate, containsString("\"not test\":\"not test\"")); // this part is important. Without this, the document may be read from the translog which would bypass the bug where // FLS kicks in because the request can't be found and only returns meta fields flushAndRefresh(); - body = new StringEntity("{\"update\": {\"_index\": \"index1\", \"_type\": \"type\", \"_id\": \"1\"}}\n" + - "{\"doc\": {\"bulk updated\":\"bulk updated\"}}\n", ContentType.APPLICATION_JSON); - response = getRestClient().performRequest("POST", "/_bulk", Collections.emptyMap(), body, basicAuthHeader); - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.setOptions(options); + bulkRequest.setJsonEntity( + "{\"update\": {\"_index\": \"index1\", \"_type\": \"type\", \"_id\": \"1\"}}\n" + + "{\"doc\": {\"bulk updated\":\"bulk updated\"}}\n"); + getRestClient().performRequest(bulkRequest); - response = getRestClient().performRequest("GET", path, basicAuthHeader); - responseBody = EntityUtils.toString(response.getEntity()); - assertThat(responseBody, containsString("\"test\":\"test\"")); - assertThat(responseBody, containsString("\"not test\":\"not test\"")); - assertThat(responseBody, containsString("\"bulk updated\":\"bulk updated\"")); + String afterBulk = EntityUtils.toString(getRestClient().performRequest(getRequest).getEntity()); + assertThat(afterBulk, containsString("\"test\":\"test\"")); + assertThat(afterBulk, containsString("\"not test\":\"not test\"")); + assertThat(afterBulk, containsString("\"bulk updated\":\"bulk updated\"")); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java index 8b9e1954264..fc02a5c4d62 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java @@ -5,10 +5,11 @@ */ package org.elasticsearch.integration; -import org.apache.http.message.BasicHeader; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; @@ -160,10 +161,15 @@ public class ClearRealmsCacheTests extends SecurityIntegTestCase { } static void executeHttpRequest(String path, Map params) throws Exception { - Response response = getRestClient().performRequest("POST", path, params, - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())))); + Request request = new Request("POST", path); + for (Map.Entry param : params.entrySet()) { + request.addParameter(param.getKey(), param.getValue()); + } + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + request.setOptions(options); + Response response = getRestClient().performRequest(request); assertNotNull(response.getEntity()); assertTrue(EntityUtils.toString(response.getEntity()).contains("cluster_name")); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java index 825bfcd432f..57262822982 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java @@ -5,7 +5,8 @@ */ package org.elasticsearch.integration; -import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.xpack.core.security.authc.support.Hasher; @@ -388,9 +389,12 @@ public class IndexPrivilegeTests extends AbstractPrivilegeTestCase { public void testThatUnknownUserIsRejectedProperly() throws Exception { try { - getRestClient().performRequest("GET", "/", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue("idonotexist", new SecureString("passwd".toCharArray())))); + Request request = new Request("GET", "/"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", + UsernamePasswordToken.basicAuthHeaderValue("idonotexist", new SecureString("passwd".toCharArray()))); + request.setOptions(options); + getRestClient().performRequest(request); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java index 012050f4259..7a35b0bc422 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.license; -import org.apache.http.message.BasicHeader; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; @@ -15,6 +14,8 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.transport.NoNodeAvailableException; @@ -189,31 +190,36 @@ public class LicensingTests extends SecurityIntegTestCase { } public void testRestAuthenticationByLicenseType() throws Exception { - Response response = getRestClient().performRequest("GET", "/"); + Response unauthorizedRootResponse = getRestClient().performRequest(new Request("GET", "/")); // the default of the licensing tests is basic - assertThat(response.getStatusLine().getStatusCode(), is(200)); + assertThat(unauthorizedRootResponse.getStatusLine().getStatusCode(), is(200)); ResponseException e = expectThrows(ResponseException.class, - () -> getRestClient().performRequest("GET", "/_xpack/security/_authenticate")); + () -> getRestClient().performRequest(new Request("GET", "/_xpack/security/_authenticate"))); assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); // generate a new license with a mode that enables auth License.OperationMode mode = randomFrom(License.OperationMode.GOLD, License.OperationMode.TRIAL, License.OperationMode.PLATINUM, License.OperationMode.STANDARD); enableLicensing(mode); - e = expectThrows(ResponseException.class, () -> getRestClient().performRequest("GET", "/")); + e = expectThrows(ResponseException.class, () -> getRestClient().performRequest(new Request("GET", "/"))); assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); e = expectThrows(ResponseException.class, - () -> getRestClient().performRequest("GET", "/_xpack/security/_authenticate")); + () -> getRestClient().performRequest(new Request("GET", "/_xpack/security/_authenticate"))); assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); - final String basicAuthValue = UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())); - response = getRestClient().performRequest("GET", "/", new BasicHeader("Authorization", basicAuthValue)); - assertThat(response.getStatusLine().getStatusCode(), is(200)); - response = getRestClient().performRequest("GET", "/_xpack/security/_authenticate", - new BasicHeader("Authorization", basicAuthValue)); - assertThat(response.getStatusLine().getStatusCode(), is(200)); + RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); + optionsBuilder.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + RequestOptions options = optionsBuilder.build(); + Request rootRequest = new Request("GET", "/"); + rootRequest.setOptions(options); + Response authorizedRootResponse = getRestClient().performRequest(rootRequest); + assertThat(authorizedRootResponse.getStatusLine().getStatusCode(), is(200)); + Request authenticateRequest = new Request("GET", "/_xpack/security/_authenticate"); + authenticateRequest.setOptions(options); + Response authorizedAuthenticateResponse = getRestClient().performRequest(authenticateRequest); + assertThat(authorizedAuthenticateResponse.getStatusLine().getStatusCode(), is(200)); } public void testSecurityActionsByLicenseType() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java index 72550832779..af5b73d889d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java @@ -5,12 +5,8 @@ */ package org.elasticsearch.test; - -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.message.BasicHeader; -import org.apache.http.nio.entity.NStringEntity; -import org.elasticsearch.client.Response; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.set.Sets; @@ -26,7 +22,6 @@ import org.junit.Before; import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.Set; /** @@ -82,23 +77,22 @@ public abstract class NativeRealmIntegTestCase extends SecurityIntegTestCase { public void setupReservedPasswords(RestClient restClient) throws IOException { logger.info("setting up reserved passwords for test"); { - String payload = "{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"; - HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); - BasicHeader authHeader = new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, BOOTSTRAP_PASSWORD)); - String route = "/_xpack/security/user/elastic/_password"; - Response response = restClient.performRequest("PUT", route, Collections.emptyMap(), entity, authHeader); - assertEquals(response.getStatusLine().getReasonPhrase(), 200, response.getStatusLine().getStatusCode()); + Request request = new Request("PUT", "/_xpack/security/user/elastic/_password"); + request.setJsonEntity("{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, BOOTSTRAP_PASSWORD)); + request.setOptions(options); + restClient.performRequest(request); } + RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); + optionsBuilder.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, reservedPassword)); + RequestOptions options = optionsBuilder.build(); for (String username : Arrays.asList(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME)) { - String payload = "{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"; - HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); - BasicHeader authHeader = new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, reservedPassword)); - String route = "/_xpack/security/user/" + username + "/_password"; - Response response = restClient.performRequest("PUT", route, Collections.emptyMap(), entity, authHeader); - assertEquals(response.getStatusLine().getReasonPhrase(), 200, response.getStatusLine().getStatusCode()); + Request request = new Request("PUT", "/_xpack/security/user/" + username + "/_password"); + request.setJsonEntity("{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"); + request.setOptions(options); + restClient.performRequest(request); } logger.info("setting up reserved passwords finished"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140LicenseBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140LicenseBootstrapCheckTests.java new file mode 100644 index 00000000000..a2ec8f9fb20 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140LicenseBootstrapCheckTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.bootstrap.BootstrapContext; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.license.License; +import org.elasticsearch.license.TestUtils; +import org.elasticsearch.test.ESTestCase; + +public class FIPS140LicenseBootstrapCheckTests extends ESTestCase { + + public void testBootstrapCheck() throws Exception { + assertTrue(new FIPS140LicenseBootstrapCheck(false) + .check(new BootstrapContext(Settings.EMPTY, MetaData.EMPTY_META_DATA)).isSuccess()); + assertTrue(new FIPS140LicenseBootstrapCheck(randomBoolean()) + .check(new BootstrapContext(Settings.EMPTY, MetaData.EMPTY_META_DATA)).isSuccess()); + + License license = TestUtils.generateSignedLicense(TimeValue.timeValueHours(24)); + MetaData.Builder builder = MetaData.builder(); + TestUtils.putLicense(builder, license); + MetaData metaData = builder.build(); + if (FIPS140LicenseBootstrapCheck.ALLOWED_LICENSE_OPERATION_MODES.contains(license.operationMode())) { + assertTrue(new FIPS140LicenseBootstrapCheck(true).check(new BootstrapContext( + Settings.builder().put("xpack.security.fips_mode.enabled", true).build(), metaData)).isSuccess()); + assertTrue(new FIPS140LicenseBootstrapCheck(false).check(new BootstrapContext( + Settings.builder().put("xpack.security.fips_mode.enabled", false).build(), metaData)).isSuccess()); + } else { + assertTrue(new FIPS140LicenseBootstrapCheck(false).check(new BootstrapContext( + Settings.builder().put("xpack.security.fips_mode.enabled", false).build(), metaData)).isSuccess()); + assertTrue(new FIPS140LicenseBootstrapCheck(true).check(new BootstrapContext( + Settings.builder().put("xpack.security.fips_mode.enabled", true).build(), metaData)).isFailure()); + assertEquals("FIPS mode is only allowed with a Platinum or Trial license", + new FIPS140LicenseBootstrapCheck(true).check(new BootstrapContext( + Settings.builder().put("xpack.security.fips_mode.enabled", true).build(), metaData)).getMessage()); + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheckTests.java index b424986876a..8632400866a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheckTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheckTests.java @@ -21,7 +21,7 @@ public class FIPS140PasswordHashingAlgorithmBootstrapCheckTests extends ESTestCa public void testPBKDF2AlgorithmIsAllowed() { { final Settings settings = Settings.builder() - .put(Security.FIPS_MODE_ENABLED.getKey(), true) + .put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true) .put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2_10000") .build(); final BootstrapCheck.BootstrapCheckResult result = @@ -31,7 +31,7 @@ public class FIPS140PasswordHashingAlgorithmBootstrapCheckTests extends ESTestCa { final Settings settings = Settings.builder() - .put(Security.FIPS_MODE_ENABLED.getKey(), true) + .put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true) .put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2") .build(); final BootstrapCheck.BootstrapCheckResult result = @@ -49,7 +49,7 @@ public class FIPS140PasswordHashingAlgorithmBootstrapCheckTests extends ESTestCa } private void runBCRYPTTest(final boolean fipsModeEnabled, final String passwordHashingAlgorithm) { - final Settings.Builder builder = Settings.builder().put(Security.FIPS_MODE_ENABLED.getKey(), fipsModeEnabled); + final Settings.Builder builder = Settings.builder().put(XPackSettings.FIPS_MODE_ENABLED.getKey(), fipsModeEnabled); if (passwordHashingAlgorithm != null) { builder.put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), passwordHashingAlgorithm); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityPluginTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityPluginTests.java index 368449adeed..d57a5d151a9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityPluginTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityPluginTests.java @@ -5,14 +5,14 @@ */ package org.elasticsearch.xpack.security; -import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; -import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import java.io.IOException; @@ -31,17 +31,20 @@ public class SecurityPluginTests extends SecurityIntegTestCase { public void testThatPluginIsLoaded() throws IOException { try { logger.info("executing unauthorized request to /_xpack info"); - getRestClient().performRequest("GET", "/_xpack"); + getRestClient().performRequest(new Request("GET", "/_xpack")); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(UNAUTHORIZED.getStatus())); } logger.info("executing authorized request to /_xpack infos"); - Response response = getRestClient().performRequest("GET", "/_xpack", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())))); + + Request request = new Request("GET", "/_xpack"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + request.setOptions(options); + Response response = getRestClient().performRequest(request); assertThat(response.getStatusLine().getStatusCode(), is(OK.getStatus())); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index e88b1905a7a..857b1694ac8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -292,6 +292,26 @@ public class SecurityTests extends ESTestCase { assertThat(e.getMessage(), containsString("cannot deserialize the license format")); } + public void testJoinValidatorForFIPSLicense() throws Exception { + DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), + VersionUtils.randomVersionBetween(random(), null, Version.CURRENT)); + MetaData.Builder builder = MetaData.builder(); + License license = TestUtils.generateSignedLicense(TimeValue.timeValueHours(24)); + TestUtils.putLicense(builder, license); + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metaData(builder.build()).build(); + new Security.ValidateLicenseForFIPS(false).accept(node, state); + + final boolean isLicenseValidForFips = + FIPS140LicenseBootstrapCheck.ALLOWED_LICENSE_OPERATION_MODES.contains(license.operationMode()); + if (isLicenseValidForFips) { + new Security.ValidateLicenseForFIPS(true).accept(node, state); + } else { + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> new Security.ValidateLicenseForFIPS(true).accept(node, state)); + assertThat(e.getMessage(), containsString("FIPS mode cannot be used")); + } + } + public void testIndexJoinValidator_Old_And_Rolling() throws Exception { createComponents(Settings.EMPTY); BiConsumer joinValidator = security.getJoinValidator(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index 453820ea519..ef3c6aa56ae 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -5,12 +5,13 @@ */ package org.elasticsearch.xpack.security.audit.index; -import org.apache.http.message.BasicHeader; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Requests; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; @@ -89,10 +90,12 @@ public class AuditTrailTests extends SecurityIntegTestCase { public void testAuditAccessDeniedWithRunAsUser() throws Exception { try { - getRestClient().performRequest("GET", "/.security/_search", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(AUTHENTICATE_USER, TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, EXECUTE_USER)); + Request request = new Request("GET", "/.security/_search"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(AUTHENTICATE_USER, TEST_PASSWORD_SECURE_STRING)); + options.addHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, EXECUTE_USER); + request.setOptions(options); + getRestClient().performRequest(request); fail("request should have failed"); } catch (final ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); @@ -111,10 +114,12 @@ public class AuditTrailTests extends SecurityIntegTestCase { public void testAuditRunAsDeniedEmptyUser() throws Exception { try { - getRestClient().performRequest("GET", "/.security/_search", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(AUTHENTICATE_USER, TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "")); + Request request = new Request("GET", "/.security/_search"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(AUTHENTICATE_USER, TEST_PASSWORD_SECURE_STRING)); + options.addHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, ""); + request.setOptions(options); + getRestClient().performRequest(request); fail("request should have failed"); } catch (final ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java index a71f5cb1cf7..9d795826298 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.junit.Before; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -51,13 +52,16 @@ public class RealmsTests extends ESTestCase { private XPackLicenseState licenseState; private ThreadContext threadContext; private ReservedRealm reservedRealm; + private int randomRealmTypesCount; @Before public void init() throws Exception { factories = new HashMap<>(); factories.put(FileRealmSettings.TYPE, config -> new DummyRealm(FileRealmSettings.TYPE, config)); factories.put(NativeRealmSettings.TYPE, config -> new DummyRealm(NativeRealmSettings.TYPE, config)); - for (int i = 0; i < randomIntBetween(1, 5); i++) { + factories.put(KerberosRealmSettings.TYPE, config -> new DummyRealm(KerberosRealmSettings.TYPE, config)); + randomRealmTypesCount = randomIntBetween(1, 5); + for (int i = 0; i < randomRealmTypesCount; i++) { String name = "type_" + i; factories.put(name, config -> new DummyRealm(name, config)); } @@ -73,13 +77,13 @@ public class RealmsTests extends ESTestCase { public void testWithSettings() throws Exception { Settings.Builder builder = Settings.builder() .put("path.home", createTempDir()); - List orders = new ArrayList<>(factories.size() - 2); - for (int i = 0; i < factories.size() - 2; i++) { + List orders = new ArrayList<>(randomRealmTypesCount); + for (int i = 0; i < randomRealmTypesCount; i++) { orders.add(i); } Collections.shuffle(orders, random()); Map orderToIndex = new HashMap<>(); - for (int i = 0; i < factories.size() - 2; i++) { + for (int i = 0; i < randomRealmTypesCount; i++) { builder.put("xpack.security.authc.realms.realm_" + i + ".type", "type_" + i); builder.put("xpack.security.authc.realms.realm_" + i + ".order", orders.get(i)); orderToIndex.put(orders.get(i), i); @@ -107,14 +111,14 @@ public class RealmsTests extends ESTestCase { public void testWithSettingsWhereDifferentRealmsHaveSameOrder() throws Exception { Settings.Builder builder = Settings.builder() .put("path.home", createTempDir()); - List randomSeq = new ArrayList<>(factories.size() - 2); - for (int i = 0; i < factories.size() - 2; i++) { + List randomSeq = new ArrayList<>(randomRealmTypesCount); + for (int i = 0; i < randomRealmTypesCount; i++) { randomSeq.add(i); } Collections.shuffle(randomSeq, random()); TreeMap nameToRealmId = new TreeMap<>(); - for (int i = 0; i < factories.size() - 2; i++) { + for (int i = 0; i < randomRealmTypesCount; i++) { int randomizedRealmId = randomSeq.get(i); String randomizedRealmName = randomAlphaOfLengthBetween(12,32); nameToRealmId.put("realm_" + randomizedRealmName, randomizedRealmId); @@ -181,13 +185,13 @@ public class RealmsTests extends ESTestCase { public void testUnlicensedWithOnlyCustomRealms() throws Exception { Settings.Builder builder = Settings.builder() .put("path.home", createTempDir()); - List orders = new ArrayList<>(factories.size() - 2); - for (int i = 0; i < factories.size() - 2; i++) { + List orders = new ArrayList<>(randomRealmTypesCount); + for (int i = 0; i < randomRealmTypesCount; i++) { orders.add(i); } Collections.shuffle(orders, random()); Map orderToIndex = new HashMap<>(); - for (int i = 0; i < factories.size() - 2; i++) { + for (int i = 0; i < randomRealmTypesCount; i++) { builder.put("xpack.security.authc.realms.realm_" + i + ".type", "type_" + i); builder.put("xpack.security.authc.realms.realm_" + i + ".order", orders.get(i)); orderToIndex.put(orders.get(i), i); @@ -384,13 +388,13 @@ public class RealmsTests extends ESTestCase { public void testDisabledRealmsAreNotAdded() throws Exception { Settings.Builder builder = Settings.builder() .put("path.home", createTempDir()); - List orders = new ArrayList<>(factories.size() - 2); - for (int i = 0; i < factories.size() - 2; i++) { + List orders = new ArrayList<>(randomRealmTypesCount); + for (int i = 0; i < randomRealmTypesCount; i++) { orders.add(i); } Collections.shuffle(orders, random()); Map orderToIndex = new HashMap<>(); - for (int i = 0; i < factories.size() - 2; i++) { + for (int i = 0; i < randomRealmTypesCount; i++) { builder.put("xpack.security.authc.realms.realm_" + i + ".type", "type_" + i); builder.put("xpack.security.authc.realms.realm_" + i + ".order", orders.get(i)); boolean enabled = randomBoolean(); @@ -520,6 +524,20 @@ public class RealmsTests extends ESTestCase { } } + public void testInitRealmsFailsForMultipleKerberosRealms() throws IOException { + final Settings.Builder builder = Settings.builder().put("path.home", createTempDir()); + builder.put("xpack.security.authc.realms.realm_1.type", "kerberos"); + builder.put("xpack.security.authc.realms.realm_1.order", 1); + builder.put("xpack.security.authc.realms.realm_2.type", "kerberos"); + builder.put("xpack.security.authc.realms.realm_2.order", 2); + final Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, + () -> new Realms(settings, env, factories, licenseState, threadContext, reservedRealm)); + assertThat(iae.getMessage(), is(equalTo( + "multiple realms [realm_1, realm_2] configured of type [kerberos], [kerberos] can only have one such realm configured"))); + } + static class DummyRealm extends Realm { DummyRealm(String type, RealmConfig config) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java index 9311b9c02f6..6d5c6770bf2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.security.authc; -import org.apache.http.message.BasicHeader; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.client.Response; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.SecureString; @@ -126,11 +126,13 @@ public class RunAsIntegTests extends SecurityIntegTestCase { public void testUserImpersonationUsingHttp() throws Exception { // use the transport client user and try to run as try { - getRestClient().performRequest("GET", "/_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(TRANSPORT_CLIENT_USER, - TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME)); + Request request = new Request("GET", "/_nodes"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(TRANSPORT_CLIENT_USER, TEST_PASSWORD_SECURE_STRING)); + options.addHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME); + request.setOptions(options); + getRestClient().performRequest(request); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); @@ -139,10 +141,11 @@ public class RunAsIntegTests extends SecurityIntegTestCase { if (runAsHasSuperUserRole == false) { try { //the run as user shouldn't have access to the nodes api - getRestClient().performRequest("GET", "/_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - TEST_PASSWORD_SECURE_STRING))); + Request request = new Request("GET", "/_nodes"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, TEST_PASSWORD_SECURE_STRING)); + request.setOptions(options); + getRestClient().performRequest(request); fail("request should have failed"); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); @@ -150,12 +153,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { } // but when running as a different user it should work - Response response = getRestClient().performRequest("GET", "/_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME)); - assertThat(response.getStatusLine().getStatusCode(), is(200)); + getRestClient().performRequest(requestForUserRunAsUser(SecuritySettingsSource.TEST_USER_NAME)); } public void testEmptyUserImpersonationHeader() throws Exception { @@ -183,11 +181,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { public void testEmptyHeaderUsingHttp() throws Exception { try { - getRestClient().performRequest("GET", "/_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "")); + getRestClient().performRequest(requestForUserRunAsUser("")); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); @@ -219,17 +213,22 @@ public class RunAsIntegTests extends SecurityIntegTestCase { public void testNonExistentRunAsUserUsingHttp() throws Exception { try { - getRestClient().performRequest("GET", "/_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "idontexist")); + getRestClient().performRequest(requestForUserRunAsUser("idontexist")); fail("request should have failed"); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); } } + private static Request requestForUserRunAsUser(String user) { + Request request = new Request("GET", "/_nodes"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, TEST_PASSWORD_SECURE_STRING)); + options.addHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, user); + request.setOptions(options); + return request; + } + // build our own here to better mimic an actual client... TransportClient getTransportClient(Settings extraSettings) { NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheckTests.java deleted file mode 100644 index b6e1df9ddbb..00000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheckTests.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.security.authc.kerberos; - -import org.elasticsearch.bootstrap.BootstrapCheck; -import org.elasticsearch.bootstrap.BootstrapContext; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.security.authc.RealmSettings; -import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; -import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; - -import java.io.IOException; -import java.nio.file.Path; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; - -public class KerberosRealmBootstrapCheckTests extends ESTestCase { - - public void testBootstrapCheckFailsForMultipleKerberosRealms() throws IOException { - final Path tempDir = createTempDir(); - final Settings settings1 = buildKerberosRealmSettings("kerb1", false, tempDir); - final Settings settings2 = buildKerberosRealmSettings("kerb2", false, tempDir); - final Settings settings3 = realm("pki1", PkiRealmSettings.TYPE, Settings.builder()).build(); - final Settings settings = - Settings.builder().put("path.home", tempDir).put(settings1).put(settings2).put(settings3).build(); - final BootstrapContext context = new BootstrapContext(settings, null); - final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = - new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(settings)); - final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); - assertThat(result, is(notNullValue())); - assertThat(result.isFailure(), is(true)); - assertThat(result.getMessage(), equalTo("multiple [" + KerberosRealmSettings.TYPE + "] realms are configured. [" - + KerberosRealmSettings.TYPE + "] can only have one such realm configured")); - } - - public void testBootstrapCheckFailsForMissingKeytabFile() throws IOException { - final Path tempDir = createTempDir(); - final Settings settings = - Settings.builder().put("path.home", tempDir).put(buildKerberosRealmSettings("kerb1", true, tempDir)).build(); - final BootstrapContext context = new BootstrapContext(settings, null); - final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = - new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(settings)); - final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); - assertThat(result, is(notNullValue())); - assertThat(result.isFailure(), is(true)); - assertThat(result.getMessage(), - equalTo("configured service key tab file [" + tempDir.resolve("kerb1.keytab").toString() + "] does not exist")); - } - - public void testBootstrapCheckFailsForMissingRealmType() throws IOException { - final Path tempDir = createTempDir(); - final String name = "kerb1"; - final Settings settings1 = buildKerberosRealmSettings("kerb1", false, tempDir); - final Settings settings2 = realm(name, randomFrom("", " "), Settings.builder()).build(); - final Settings settings = - Settings.builder().put("path.home", tempDir).put(settings1).put(settings2).build(); - final BootstrapContext context = new BootstrapContext(settings, null); - final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = - new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(settings)); - final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); - assertThat(result, is(notNullValue())); - assertThat(result.isFailure(), is(true)); - assertThat(result.getMessage(), equalTo("missing realm type for [" + name + "] realm")); - } - - public void testBootstrapCheckSucceedsForCorrectConfiguration() throws IOException { - final Path tempDir = createTempDir(); - final Settings finalSettings = - Settings.builder().put("path.home", tempDir).put(buildKerberosRealmSettings("kerb1", false, tempDir)).build(); - final BootstrapContext context = new BootstrapContext(finalSettings, null); - final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = - new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(finalSettings)); - final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); - assertThat(result, is(notNullValue())); - assertThat(result.isSuccess(), is(true)); - } - - public void testBootstrapCheckSucceedsForNoKerberosRealms() throws IOException { - final Path tempDir = createTempDir(); - final Settings finalSettings = Settings.builder().put("path.home", tempDir).build(); - final BootstrapContext context = new BootstrapContext(finalSettings, null); - final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = - new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(finalSettings)); - final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); - assertThat(result, is(notNullValue())); - assertThat(result.isSuccess(), is(true)); - } - - private Settings buildKerberosRealmSettings(final String name, final boolean missingKeytab, final Path tempDir) throws IOException { - final Settings.Builder builder = Settings.builder(); - if (missingKeytab == false) { - KerberosTestCase.writeKeyTab(tempDir.resolve(name + ".keytab"), null); - } - builder.put(KerberosTestCase.buildKerberosRealmSettings(tempDir.resolve(name + ".keytab").toString())); - return realm(name, KerberosRealmSettings.TYPE, builder).build(); - } - - private Settings.Builder realm(final String name, final String type, final Settings.Builder settings) { - final String prefix = RealmSettings.PREFIX + name + "."; - if (type != null) { - settings.put("type", type); - } - final Settings.Builder builder = Settings.builder().put(settings.normalizePrefix(prefix).build(), false); - return builder; - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java index 43536abaf29..0f972498ab3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java @@ -11,17 +11,30 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper.UserData; import org.ietf.jgss.GSSException; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.SeekableByteChannel; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; import java.util.Arrays; +import java.util.EnumSet; +import java.util.Set; import javax.security.auth.login.LoginException; @@ -31,6 +44,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.mockito.AdditionalMatchers.aryEq; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; @@ -94,4 +108,44 @@ public class KerberosRealmTests extends KerberosRealmTestCase { assertThat(future.actionGet(), is(nullValue())); } + public void testKerberosRealmWithInvalidKeytabPathConfigurations() throws IOException { + final String keytabPathCase = randomFrom("keytabPathAsDirectory", "keytabFileDoesNotExist", "keytabPathWithNoReadPermissions"); + final String expectedErrorMessage; + final String keytabPath; + final Set filePerms; + switch (keytabPathCase) { + case "keytabPathAsDirectory": + final String dirName = randomAlphaOfLength(5); + Files.createDirectory(dir.resolve(dirName)); + keytabPath = dir.resolve(dirName).toString(); + expectedErrorMessage = "configured service key tab file [" + keytabPath + "] is a directory"; + break; + case "keytabFileDoesNotExist": + keytabPath = dir.resolve(randomAlphaOfLength(5) + ".keytab").toString(); + expectedErrorMessage = "configured service key tab file [" + keytabPath + "] does not exist"; + break; + case "keytabPathWithNoReadPermissions": + filePerms = PosixFilePermissions.fromString("---------"); + final String keytabFileName = randomAlphaOfLength(5) + ".keytab"; + final FileAttribute> fileAttributes = PosixFilePermissions.asFileAttribute(filePerms); + try (SeekableByteChannel byteChannel = Files.newByteChannel(dir.resolve(keytabFileName), + EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), fileAttributes)) { + byteChannel.write(ByteBuffer.wrap(randomByteArrayOfLength(10))); + } + keytabPath = dir.resolve(keytabFileName).toString(); + expectedErrorMessage = "configured service key tab file [" + keytabPath + "] must have read permission"; + break; + default: + throw new IllegalArgumentException("Unknown test case :" + keytabPathCase); + } + + settings = KerberosTestCase.buildKerberosRealmSettings(keytabPath, 100, "10m", true, randomBoolean()); + config = new RealmConfig("test-kerb-realm", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), + new ThreadContext(globalSettings)); + mockNativeRoleMappingStore = roleMappingStore(Arrays.asList("user")); + mockKerberosTicketValidator = mock(KerberosTicketValidator.class); + final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, + () -> new KerberosRealm(config, mockNativeRoleMappingStore, mockKerberosTicketValidator, threadPool, null)); + assertThat(iae.getMessage(), is(equalTo(expectedErrorMessage))); + } } \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java index 891f400c7be..cad9f689d3d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java @@ -133,7 +133,7 @@ public abstract class KerberosTestCase extends ESTestCase { * @param dir Directory where the key tab would be created. * @param princNames principal names to be created * @return {@link Path} to key tab file. - * @throws Exception + * @throws Exception thrown if principal or keytab could not be created */ protected Path createPrincipalKeyTab(final Path dir, final String... princNames) throws Exception { final Path path = dir.resolve(randomAlphaOfLength(10) + ".keytab"); @@ -146,7 +146,7 @@ public abstract class KerberosTestCase extends ESTestCase { * * @param principalName Principal name * @param password Password - * @throws Exception + * @throws Exception thrown if principal could not be created */ protected void createPrincipal(final String principalName, final char[] password) throws Exception { simpleKdcLdapServer.createPrincipal(principalName, new String(password)); @@ -168,8 +168,8 @@ public abstract class KerberosTestCase extends ESTestCase { * @param subject {@link Subject} * @param action {@link PrivilegedExceptionAction} action for performing inside * Subject.doAs - * @return Type of value as returned by PrivilegedAction - * @throws PrivilegedActionException + * @return T Type of value as returned by PrivilegedAction + * @throws PrivilegedActionException when privileged action threw exception */ static T doAsWrapper(final Subject subject, final PrivilegedExceptionAction action) throws PrivilegedActionException { return AccessController.doPrivileged((PrivilegedExceptionAction) () -> Subject.doAs(subject, action)); @@ -181,7 +181,7 @@ public abstract class KerberosTestCase extends ESTestCase { * @param keytabPath {@link Path} to keytab file. * @param content Content for keytab * @return key tab path - * @throws IOException + * @throws IOException if I/O error occurs while writing keytab file */ public static Path writeKeyTab(final Path keytabPath, final String content) throws IOException { try (BufferedWriter bufferedWriter = Files.newBufferedWriter(keytabPath, StandardCharsets.US_ASCII)) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java index 426cacb1a03..53bf9e2b78d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java @@ -66,7 +66,7 @@ public class SimpleKdcLdapServer { * @param orgName Org name for base dn * @param domainName domain name for base dn * @param ldiff for ldap directory. - * @throws Exception + * @throws Exception when KDC or Ldap server initialization fails */ public SimpleKdcLdapServer(final Path workDir, final String orgName, final String domainName, final Path ldiff) throws Exception { this.workDir = workDir; @@ -194,7 +194,7 @@ public class SimpleKdcLdapServer { /** * Stop Simple Kdc Server * - * @throws PrivilegedActionException + * @throws PrivilegedActionException when privileged action threw exception */ public synchronized void stop() throws PrivilegedActionException { AccessController.doPrivileged(new PrivilegedExceptionAction() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java index 1f883b928bd..7fcca3d9f58 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java @@ -11,7 +11,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.security.authc.kerberos.KerberosTicketValidator; import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSCredential; @@ -67,8 +66,8 @@ class SpnegoClient implements AutoCloseable { * @param password password for client * @param servicePrincipalName Service principal name with whom this client * interacts with. - * @throws PrivilegedActionException - * @throws GSSException + * @throws PrivilegedActionException when privileged action threw exception + * @throws GSSException thrown when GSS API error occurs */ SpnegoClient(final String userPrincipalName, final SecureString password, final String servicePrincipalName) throws PrivilegedActionException, GSSException { @@ -99,7 +98,7 @@ class SpnegoClient implements AutoCloseable { * base64 encoded token to be sent to server. * * @return Base64 encoded token - * @throws PrivilegedActionException + * @throws PrivilegedActionException when privileged action threw exception */ String getBase64EncodedTokenForSpnegoHeader() throws PrivilegedActionException { final byte[] outToken = KerberosTestCase.doAsWrapper(loginContext.getSubject(), @@ -114,7 +113,7 @@ class SpnegoClient implements AutoCloseable { * gss negotiation * @return Base64 encoded token to be sent to server. May return {@code null} if * nothing to be sent. - * @throws PrivilegedActionException + * @throws PrivilegedActionException when privileged action threw exception */ String handleResponse(final String base64Token) throws PrivilegedActionException { if (gssContext.isEstablished()) { @@ -160,10 +159,9 @@ class SpnegoClient implements AutoCloseable { * * @param principal Principal name * @param password {@link SecureString} - * @param settings {@link Settings} * @return authenticated {@link LoginContext} instance. Note: This needs to be * closed {@link LoginContext#logout()} after usage. - * @throws LoginException + * @throws LoginException thrown if problem with login configuration or when login fails */ private static LoginContext loginUsingPassword(final String principal, final SecureString password) throws LoginException { final Set principals = Collections.singleton(new KerberosPrincipal(principal)); @@ -182,8 +180,8 @@ class SpnegoClient implements AutoCloseable { * Instead of an additional file setting as we do not want the options to be * customizable we are constructing it in memory. *

- * As we are uing this instead of jaas.conf, this requires refresh of - * {@link Configuration} and reqires appropriate security permissions to do so. + * As we are using this instead of jaas.conf, this requires refresh of + * {@link Configuration} and requires appropriate security permissions to do so. */ static class PasswordJaasConf extends Configuration { private final String principal; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiOptionalClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiOptionalClientAuthTests.java index d47ffe4a344..4fb94c74949 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiOptionalClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiOptionalClientAuthTests.java @@ -5,8 +5,9 @@ */ package org.elasticsearch.xpack.security.authc.pki; -import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -76,13 +77,15 @@ public class PkiOptionalClientAuthTests extends SecuritySingleNodeTestCase { public void testRestClientWithoutClientCertificate() throws Exception { SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(getSSLContext()); try (RestClient restClient = createRestClient(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy), "https")) { - ResponseException e = expectThrows(ResponseException.class, () -> restClient.performRequest("GET", "_nodes")); + ResponseException e = expectThrows(ResponseException.class, () -> restClient.performRequest(new Request("GET", "_nodes"))); assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); - Response response = restClient.performRequest("GET", "_nodes", - new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())))); + Request request = new Request("GET", "_nodes"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + request.setOptions(options); + Response response = restClient.performRequest(request); assertThat(response.getStatusLine().getStatusCode(), is(200)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java index 67bfc2ecdcb..13a124e4bdc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java @@ -5,7 +5,8 @@ */ package org.elasticsearch.xpack.security.rest.action; -import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.SecureString; @@ -52,11 +53,12 @@ public class RestAuthenticateActionTests extends SecurityIntegTestCase { } public void testAuthenticateApi() throws Exception { - Response response = getRestClient().performRequest("GET", "/_xpack/security/_authenticate", - new BasicHeader("Authorization", basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())))); - assertThat(response.getStatusLine().getStatusCode(), is(200)); - ObjectPath objectPath = ObjectPath.createFromResponse(response); + Request request = new Request("GET", "/_xpack/security/_authenticate"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", basicAuthHeaderValue(SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); + request.setOptions(options); + ObjectPath objectPath = ObjectPath.createFromResponse(getRestClient().performRequest(request)); assertThat(objectPath.evaluate("username").toString(), equalTo(SecuritySettingsSource.TEST_USER_NAME)); List roles = objectPath.evaluate("roles"); assertThat(roles.size(), is(1)); @@ -65,7 +67,7 @@ public class RestAuthenticateActionTests extends SecurityIntegTestCase { public void testAuthenticateApiWithoutAuthentication() throws Exception { try { - Response response = getRestClient().performRequest("GET", "/_xpack/security/_authenticate"); + Response response = getRestClient().performRequest(new Request("GET", "/_xpack/security/_authenticate")); if (anonymousEnabled) { assertThat(response.getStatusLine().getStatusCode(), is(200)); ObjectPath objectPath = ObjectPath.createFromResponse(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserIntegTests.java index 9529a12a30a..431b3e855c6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/AnonymousUserIntegTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.user; import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; @@ -46,7 +47,7 @@ public class AnonymousUserIntegTests extends SecurityIntegTestCase { public void testAnonymousViaHttp() throws Exception { try { - getRestClient().performRequest("GET", "/_nodes"); + getRestClient().performRequest(new Request("GET", "/_nodes")); fail("request should have failed"); } catch(ResponseException e) { int statusCode = e.getResponse().getStatusLine().getStatusCode(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java index d205c7cd933..21da604374f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java @@ -6,12 +6,13 @@ package org.elasticsearch.xpack.ssl; import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; import org.apache.http.util.EntityUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.transport.TransportClient; @@ -71,7 +72,7 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { public void testThatHttpFailsWithoutSslClientAuth() throws IOException { SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(SSLContexts.createDefault(), NoopHostnameVerifier.INSTANCE); try (RestClient restClient = createRestClient(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy), "https")) { - restClient.performRequest("GET", "/"); + restClient.performRequest(new Request("GET", "/")); fail("Expected SSLHandshakeException"); } catch (IOException e) { Throwable t = ExceptionsHelper.unwrap(e, CertPathBuilderException.class); @@ -87,8 +88,11 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { public void testThatHttpWorksWithSslClientAuth() throws IOException { SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(getSSLContext(), NoopHostnameVerifier.INSTANCE); try (RestClient restClient = createRestClient(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy), "https")) { - Response response = restClient.performRequest("GET", "/", - new BasicHeader("Authorization", basicAuthHeaderValue(transportClientUsername(), transportClientPassword()))); + Request request = new Request("GET", "/"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Authorization", basicAuthHeaderValue(transportClientUsername(), transportClientPassword())); + request.setOptions(options); + Response response = restClient.performRequest(request); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); assertThat(EntityUtils.toString(response.getEntity()), containsString("You Know, for Search")); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java index cf77ca975a4..e1896e01365 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ssl; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -173,6 +174,8 @@ public class SSLTrustRestrictionsTests extends SecurityIntegTestCase { } public void testCertificateWithUntrustedNameFails() throws Exception { + // see https://github.com/elastic/elasticsearch/issues/29989 + assumeTrue("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0); writeRestrictions("*.trusted"); try { tryConnect(untrustedCert); @@ -183,6 +186,8 @@ public class SSLTrustRestrictionsTests extends SecurityIntegTestCase { } public void testRestrictionsAreReloaded() throws Exception { + // see https://github.com/elastic/elasticsearch/issues/29989 + assumeTrue("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0); writeRestrictions("*"); assertBusy(() -> { try { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java index e41d52c1954..28eb884bbe8 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestDeleteWatchAction.java @@ -14,8 +14,8 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler; import java.io.IOException; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java index d7ff25b623f..9cc94820554 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java @@ -18,8 +18,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchAction; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; import java.util.function.Supplier; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java index 18f151b4f6a..c07e6ba3db1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.condition.CompareCondition; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java index 64b913f96a0..62985cad1f1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/WatchRequestValidationTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.execution.ActionExecutionMode; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.activate.ActivateWatchRequest; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java index 76f71b9c95e..2f502cb95aa 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; -import org.elasticsearch.xpack.core.watcher.transport.actions.delete.DeleteWatchResponse; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.execute.ExecuteWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java similarity index 67% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequest.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java index f8c1a71f1eb..1ec83a8c05a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchRequest.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java @@ -1,9 +1,22 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.xpack.core.watcher.transport.actions.delete; +package org.elasticsearch.protocol.xpack.watcher; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -11,7 +24,6 @@ import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.xpack.core.watcher.support.WatcherUtils; import java.io.IOException; @@ -50,7 +62,7 @@ public class DeleteWatchRequest extends ActionRequest { ActionRequestValidationException validationException = null; if (id == null){ validationException = ValidateActions.addValidationError("watch id is missing", validationException); - } else if (WatcherUtils.isValidId(id) == false) { + } else if (PutWatchRequest.isValidId(id) == false) { validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); } return validationException; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java new file mode 100644 index 00000000000..b644a6a854c --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java @@ -0,0 +1,123 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteWatchResponse extends ActionResponse implements ToXContentObject { + + private static final ObjectParser PARSER + = new ObjectParser<>("x_pack_delete_watch_response", DeleteWatchResponse::new); + static { + PARSER.declareString(DeleteWatchResponse::setId, new ParseField("_id")); + PARSER.declareLong(DeleteWatchResponse::setVersion, new ParseField("_version")); + PARSER.declareBoolean(DeleteWatchResponse::setFound, new ParseField("found")); + } + + private String id; + private long version; + private boolean found; + + public DeleteWatchResponse() { + } + + public DeleteWatchResponse(String id, long version, boolean found) { + this.id = id; + this.version = version; + this.found = found; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public boolean isFound() { + return found; + } + + private void setId(String id) { + this.id = id; + } + + private void setVersion(long version) { + this.version = version; + } + + private void setFound(boolean found) { + this.found = found; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + DeleteWatchResponse that = (DeleteWatchResponse) o; + + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(found, that.found); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, found); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readVLong(); + found = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeVLong(version); + out.writeBoolean(found); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("_id", id) + .field("_version", version) + .field("found", found) + .endObject(); + } + + public static DeleteWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java new file mode 100644 index 00000000000..1dbc4cec321 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class DeleteWatchResponseTests extends AbstractXContentTestCase { + + @Override + protected DeleteWatchResponse createTestInstance() { + String id = randomAlphaOfLength(10); + long version = randomLongBetween(1, 10); + boolean found = randomBoolean(); + return new DeleteWatchResponse(id, version, found); + } + + @Override + protected DeleteWatchResponse doParseInstance(XContentParser parser) throws IOException { + return DeleteWatchResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java b/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java index d5928cb58f6..a36040d1ba8 100644 --- a/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java +++ b/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java @@ -82,6 +82,7 @@ public class KerberosAuthenticationIT extends ESRestTestCase { assertOK(response); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32498") public void testLoginByKeytab() throws IOException, PrivilegedActionException { final String userPrincipalName = System.getProperty(TEST_USER_WITH_KEYTAB_KEY); final String keytabPath = System.getProperty(TEST_USER_WITH_KEYTAB_PATH_KEY); @@ -91,6 +92,7 @@ public class KerberosAuthenticationIT extends ESRestTestCase { executeRequestAndVerifyResponse(userPrincipalName, callbackHandler); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32498") public void testLoginByUsernamePassword() throws IOException, PrivilegedActionException { final String userPrincipalName = System.getProperty(TEST_USER_WITH_PWD_KEY); final String password = System.getProperty(TEST_USER_WITH_PWD_PASSWD_KEY); @@ -100,6 +102,10 @@ public class KerberosAuthenticationIT extends ESRestTestCase { executeRequestAndVerifyResponse(userPrincipalName, callbackHandler); } + public void testSoDoesNotFailWithNoTests() { + // intentionally empty - this is just needed to ensure the build does not fail because we mute its only test. + } + private void executeRequestAndVerifyResponse(final String userPrincipalName, final SpnegoHttpClientConfigCallbackHandler callbackHandler) throws PrivilegedActionException, IOException { final Request request = new Request("GET", "/_xpack/security/_authenticate"); diff --git a/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java b/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java index 65ec595a0d4..f7bdb0d0baa 100644 --- a/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java +++ b/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java @@ -5,10 +5,11 @@ */ package org.elasticsearch.example.realm; -import org.apache.http.message.BasicHeader; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.transport.NoNodeAvailableException; @@ -50,7 +51,7 @@ public class CustomRealmIT extends ESIntegTestCase { public void testHttpConnectionWithNoAuthentication() throws Exception { try { - getRestClient().performRequest("GET", "/"); + getRestClient().performRequest(new Request("GET", "/")); fail("request should have failed"); } catch(ResponseException e) { Response response = e.getResponse(); @@ -61,9 +62,12 @@ public class CustomRealmIT extends ESIntegTestCase { } public void testHttpAuthentication() throws Exception { - Response response = getRestClient().performRequest("GET", "/", - new BasicHeader(CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER), - new BasicHeader(CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW.toString())); + Request request = new Request("GET", "/"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader(CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER); + options.addHeader(CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW.toString()); + request.setOptions(options); + Response response = getRestClient().performRequest(request); assertThat(response.getStatusLine().getStatusCode(), is(200)); } diff --git a/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java b/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java index bd3c53a3b41..57a895848e3 100644 --- a/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java +++ b/x-pack/qa/security-example-spi-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java @@ -5,7 +5,8 @@ */ package org.elasticsearch.example.role; -import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.network.NetworkModule; @@ -33,10 +34,17 @@ import static org.hamcrest.Matchers.is; * Integration test for custom roles providers. */ public class CustomRolesProviderIT extends ESIntegTestCase { - private static final String TEST_USER = "test_user"; private static final String TEST_PWD = "change_me"; + private static final RequestOptions AUTH_OPTIONS; + static { + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.addHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, + basicAuthHeaderValue(TEST_USER, new SecureString(TEST_PWD.toCharArray()))); + AUTH_OPTIONS = options.build(); + } + @Override protected Settings externalClusterClientSettings() { return Settings.builder() @@ -59,7 +67,9 @@ public class CustomRolesProviderIT extends ESIntegTestCase { public void testAuthorizedCustomRoleSucceeds() throws Exception { setupTestUser(ROLE_B); // roleB has all permissions on index "foo", so creating "foo" should succeed - Response response = getRestClient().performRequest("PUT", "/" + INDEX, authHeader()); + Request request = new Request("PUT", "/" + INDEX); + request.setOptions(AUTH_OPTIONS); + Response response = getRestClient().performRequest(request); assertThat(response.getStatusLine().getStatusCode(), is(200)); } @@ -71,7 +81,9 @@ public class CustomRolesProviderIT extends ESIntegTestCase { setupTestUser(ROLE_A); // roleB has all permissions on index "foo", so creating "foo" should succeed try { - getRestClient().performRequest("PUT", "/" + INDEX, authHeader()); + Request request = new Request("PUT", "/" + INDEX); + request.setOptions(AUTH_OPTIONS); + getRestClient().performRequest(request); fail(ROLE_A + " should not be authorized to create index " + INDEX); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); @@ -82,15 +94,12 @@ public class CustomRolesProviderIT extends ESIntegTestCase { setupTestUser("unknown"); // roleB has all permissions on index "foo", so creating "foo" should succeed try { - getRestClient().performRequest("PUT", "/" + INDEX, authHeader()); + Request request = new Request("PUT", "/" + INDEX); + request.setOptions(AUTH_OPTIONS); + getRestClient().performRequest(request); fail(ROLE_A + " should not be authorized to create index " + INDEX); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); } } - - private BasicHeader authHeader() { - return new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - basicAuthHeaderValue(TEST_USER, new SecureString(TEST_PWD.toCharArray()))); - } } diff --git a/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcShardFailureIT.java b/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcShardFailureIT.java new file mode 100644 index 00000000000..e88b6b94d20 --- /dev/null +++ b/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcShardFailureIT.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.qa.sql.nosecurity; + +import org.elasticsearch.client.Request; +import org.elasticsearch.xpack.qa.sql.jdbc.JdbcIntegrationTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; + +import static org.hamcrest.Matchers.containsString; + +public class JdbcShardFailureIT extends JdbcIntegrationTestCase { + @Before + public void createTestIndex() throws IOException { + Request createTest1 = new Request("PUT", "/test1"); + String body1 = "{\"aliases\":{\"test\":{}}, \"mappings\": {\"doc\": {\"properties\": {\"test_field\":{\"type\":\"integer\"}}}}}"; + createTest1.setJsonEntity(body1); + client().performRequest(createTest1); + + Request createTest2 = new Request("PUT", "/test2"); + String body2 = "{\"aliases\":{\"test\":{}}, \"mappings\": {\"doc\": {\"properties\": {\"test_field\":{\"type\":\"integer\"}}}}," + + "\"settings\": {\"index.routing.allocation.include.node\": \"nowhere\"}}"; + createTest2.setJsonEntity(body2); + createTest2.addParameter("timeout", "100ms"); + client().performRequest(createTest2); + + Request request = new Request("PUT", "/test1/doc/_bulk"); + request.addParameter("refresh", "true"); + StringBuilder bulk = new StringBuilder(); + for (int i = 0; i < 20; i++) { + bulk.append("{\"index\":{}}\n"); + bulk.append("{\"test_field\":" + i + "}\n"); + } + request.setJsonEntity(bulk.toString()); + client().performRequest(request); + } + + public void testPartialResponseHandling() throws SQLException { + try (Connection c = esJdbc(); Statement s = c.createStatement()) { + SQLException exception = expectThrows(SQLException.class, () -> s.executeQuery("SELECT * FROM test ORDER BY test_field ASC")); + assertThat(exception.getMessage(), containsString("Search rejected due to missing shards")); + } + } +}