diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 6e628eab0cb..55fdcecb084 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -64,17 +64,11 @@ - - - - - - diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 6791b5f8259..c7a54a9ac32 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -268,8 +268,14 @@ final class RequestConverters { } static Request sourceExists(GetRequest getRequest) { - Request request = new Request(HttpHead.METHOD_NAME, endpoint(getRequest.index(), getRequest.type(), getRequest.id(), "_source")); - + String optionalType = getRequest.type(); + String endpoint; + if (optionalType.equals(MapperService.SINGLE_MAPPING_NAME)) { + endpoint = endpoint(getRequest.index(), "_source", getRequest.id()); + } else { + endpoint = endpoint(getRequest.index(), optionalType, getRequest.id(), "_source"); + } + Request request = new Request(HttpHead.METHOD_NAME, endpoint); Params parameters = new Params(request); parameters.withPreference(getRequest.preference()); parameters.withRouting(getRequest.routing()); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java index 09b57e68ff5..b442336ca4d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java @@ -39,6 +39,10 @@ public final class AutoFollowStats { static final ParseField RECENT_AUTO_FOLLOW_ERRORS = new ParseField("recent_auto_follow_errors"); static final ParseField LEADER_INDEX = new ParseField("leader_index"); static final ParseField AUTO_FOLLOW_EXCEPTION = new ParseField("auto_follow_exception"); + static final ParseField AUTO_FOLLOWED_CLUSTERS = new ParseField("auto_followed_clusters"); + static final ParseField CLUSTER_NAME = new ParseField("cluster_name"); + static final ParseField TIME_SINCE_LAST_CHECK_MILLIS = new ParseField("time_since_last_check_millis"); + static final ParseField LAST_SEEN_METADATA_VERSION = new ParseField("last_seen_metadata_version"); @SuppressWarnings("unchecked") static final ConstructingObjectParser STATS_PARSER = new ConstructingObjectParser<>("auto_follow_stats", @@ -48,6 +52,10 @@ public final class AutoFollowStats { (Long) args[2], new TreeMap<>( ((List>) args[3]) + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))), + new TreeMap<>( + ((List>) args[4]) .stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) )); @@ -57,6 +65,11 @@ public final class AutoFollowStats { "auto_follow_stats_errors", args -> new AbstractMap.SimpleEntry<>((String) args[0], (ElasticsearchException) args[1])); + private static final ConstructingObjectParser, Void> AUTO_FOLLOWED_CLUSTERS_PARSER = + new ConstructingObjectParser<>( + "auto_followed_clusters", + args -> new AbstractMap.SimpleEntry<>((String) args[0], new AutoFollowedCluster((Long) args[1], (Long) args[2]))); + static { AUTO_FOLLOW_EXCEPTIONS_PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_INDEX); AUTO_FOLLOW_EXCEPTIONS_PARSER.declareObject( @@ -64,26 +77,35 @@ public final class AutoFollowStats { (p, c) -> ElasticsearchException.fromXContent(p), AUTO_FOLLOW_EXCEPTION); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareString(ConstructingObjectParser.constructorArg(), CLUSTER_NAME); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_SINCE_LAST_CHECK_MILLIS); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_SEEN_METADATA_VERSION); + STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED); STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS); STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED); STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOW_EXCEPTIONS_PARSER, RECENT_AUTO_FOLLOW_ERRORS); + STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOWED_CLUSTERS_PARSER, + AUTO_FOLLOWED_CLUSTERS); } private final long numberOfFailedFollowIndices; private final long numberOfFailedRemoteClusterStateRequests; private final long numberOfSuccessfulFollowIndices; private final NavigableMap recentAutoFollowErrors; + private final NavigableMap autoFollowedClusters; AutoFollowStats(long numberOfFailedFollowIndices, long numberOfFailedRemoteClusterStateRequests, long numberOfSuccessfulFollowIndices, - NavigableMap recentAutoFollowErrors) { + NavigableMap recentAutoFollowErrors, + NavigableMap autoFollowedClusters) { this.numberOfFailedFollowIndices = numberOfFailedFollowIndices; this.numberOfFailedRemoteClusterStateRequests = numberOfFailedRemoteClusterStateRequests; this.numberOfSuccessfulFollowIndices = numberOfSuccessfulFollowIndices; this.recentAutoFollowErrors = recentAutoFollowErrors; + this.autoFollowedClusters = autoFollowedClusters; } public long getNumberOfFailedFollowIndices() { @@ -102,4 +124,27 @@ public final class AutoFollowStats { return recentAutoFollowErrors; } + public NavigableMap getAutoFollowedClusters() { + return autoFollowedClusters; + } + + public static class AutoFollowedCluster { + + private final long timeSinceLastCheckMillis; + private final long lastSeenMetadataVersion; + + public AutoFollowedCluster(long timeSinceLastCheckMillis, long lastSeenMetadataVersion) { + this.timeSinceLastCheckMillis = timeSinceLastCheckMillis; + this.lastSeenMetadataVersion = lastSeenMetadataVersion; + } + + public long getTimeSinceLastCheckMillis() { + return timeSinceLastCheckMillis; + } + + public long getLastSeenMetadataVersion() { + return lastSeenMetadataVersion; + } + } + } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index b50d2c1265e..fa0f1c5708c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -73,6 +73,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.rankeval.PrecisionAtK; @@ -115,6 +116,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.StringJoiner; +import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; @@ -156,6 +158,58 @@ public class RequestConvertersTests extends ESTestCase { getAndExistsWithTypeTest(RequestConverters::get, HttpGet.METHOD_NAME); } + public void testSourceExists() throws IOException { + doTestSourceExists((index, id) -> new GetRequest(index, id)); + } + + public void testSourceExistsWithType() throws IOException { + String type = frequently() ? randomAlphaOfLengthBetween(3, 10) : MapperService.SINGLE_MAPPING_NAME; + doTestSourceExists((index, id) -> new GetRequest(index, type, id)); + } + + private static void doTestSourceExists(BiFunction requestFunction) throws IOException { + String index = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); + final GetRequest getRequest = requestFunction.apply(index, id); + + Map expectedParams = new HashMap<>(); + if (randomBoolean()) { + String preference = randomAlphaOfLengthBetween(3, 10); + getRequest.preference(preference); + expectedParams.put("preference", preference); + } + if (randomBoolean()) { + String routing = randomAlphaOfLengthBetween(3, 10); + getRequest.routing(routing); + expectedParams.put("routing", routing); + } + if (randomBoolean()) { + boolean realtime = randomBoolean(); + getRequest.realtime(realtime); + if (realtime == false) { + expectedParams.put("realtime", "false"); + } + } + if (randomBoolean()) { + boolean refresh = randomBoolean(); + getRequest.refresh(refresh); + if (refresh) { + expectedParams.put("refresh", "true"); + } + } + Request request = RequestConverters.sourceExists(getRequest); + assertEquals(HttpHead.METHOD_NAME, request.getMethod()); + String type = getRequest.type(); + if (type.equals(MapperService.SINGLE_MAPPING_NAME)) { + assertEquals("/" + index + "/_source/" + id, request.getEndpoint()); + } else { + assertEquals("/" + index + "/" + type + "/" + id + "/_source", request.getEndpoint()); + } + + assertEquals(expectedParams, request.getParameters()); + assertNull(request.getEntity()); + } + public void testMultiGet() throws IOException { Map expectedParams = new HashMap<>(); MultiGetRequest multiGetRequest = new MultiGetRequest(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java index 039e31151c4..8d53b5cde08 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.client.ccr; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.client.ccr.AutoFollowStats.AutoFollowedCluster; import org.elasticsearch.client.ccr.IndicesFollowStats.ShardFollowStats; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -185,6 +186,19 @@ public class CcrStatsResponseTests extends ESTestCase { builder.endObject(); } builder.endArray(); + builder.startArray(AutoFollowStats.AUTO_FOLLOWED_CLUSTERS.getPreferredName()); + for (Map.Entry entry : autoFollowStats.getAutoFollowedClusters().entrySet()) { + builder.startObject(); + { + builder.field(AutoFollowStats.CLUSTER_NAME.getPreferredName(), entry.getKey()); + builder.field(AutoFollowStats.TIME_SINCE_LAST_CHECK_MILLIS.getPreferredName(), + entry.getValue().getTimeSinceLastCheckMillis()); + builder.field(AutoFollowStats.LAST_SEEN_METADATA_VERSION.getPreferredName(), + entry.getValue().getLastSeenMetadataVersion()); + } + builder.endObject(); + } + builder.endArray(); } builder.endObject(); @@ -315,11 +329,16 @@ public class CcrStatsResponseTests extends ESTestCase { for (int i = 0; i < count; i++) { readExceptions.put("" + i, new ElasticsearchException(new IllegalStateException("index [" + i + "]"))); } + final NavigableMap autoFollowClusters = new TreeMap<>(); + for (int i = 0; i < count; i++) { + autoFollowClusters.put("" + i, new AutoFollowedCluster(randomLong(), randomNonNegativeLong())); + } return new AutoFollowStats( randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), - readExceptions + readExceptions, + autoFollowClusters ); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index a9430b67aef..5279c19a415 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -1265,7 +1265,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { assertEquals(3, getResponse.getSourceAsMap().size()); //tag::get-response String index = getResponse.getIndex(); - String type = getResponse.getType(); String id = getResponse.getId(); if (getResponse.isExists()) { long version = getResponse.getVersion(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java index 8bd285cd31f..6cd56774086 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java @@ -1317,6 +1317,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/36362") public void testInvalidateToken() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/docs/java-rest/high-level/security/invalidate-token.asciidoc b/docs/java-rest/high-level/security/invalidate-token.asciidoc index ecb3fedb56f..65e0f15bd86 100644 --- a/docs/java-rest/high-level/security/invalidate-token.asciidoc +++ b/docs/java-rest/high-level/security/invalidate-token.asciidoc @@ -36,4 +36,4 @@ The returned +{response}+ contains a single property: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-response] --------------------------------------------------- +-------------------------------------------------- \ No newline at end of file diff --git a/docs/plugins/ingest-attachment.asciidoc b/docs/plugins/ingest-attachment.asciidoc index 2f9564294d0..a3d716ff2d9 100644 --- a/docs/plugins/ingest-attachment.asciidoc +++ b/docs/plugins/ingest-attachment.asciidoc @@ -63,6 +63,8 @@ Returns this: "_type": "_doc", "_id": "my_id", "_version": 1, + "_seq_no": 22, + "_primary_term": 1, "_source": { "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=", "attachment": { @@ -74,7 +76,7 @@ Returns this: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] To specify only some fields to be extracted: @@ -146,6 +148,8 @@ Returns this: "_type": "_doc", "_id": "my_id", "_version": 1, + "_seq_no": 35, + "_primary_term": 1, "_source": { "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=", "attachment": { @@ -157,7 +161,7 @@ Returns this: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] [source,js] @@ -194,6 +198,8 @@ Returns this: "_type": "_doc", "_id": "my_id_2", "_version": 1, + "_seq_no": 40, + "_primary_term": 1, "_source": { "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=", "max_size": 5, @@ -206,7 +212,7 @@ Returns this: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] [[ingest-attachment-with-arrays]] @@ -285,6 +291,8 @@ Returns this: "_type" : "_doc", "_id" : "my_id", "_version" : 1, + "_seq_no" : 50, + "_primary_term" : 1, "found" : true, "_source" : { "attachments" : [ @@ -312,7 +320,7 @@ Returns this: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Note that the `target_field` needs to be set, otherwise the diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc index 5d22a31baa8..f4795f6620a 100644 --- a/docs/plugins/ingest-geoip.asciidoc +++ b/docs/plugins/ingest-geoip.asciidoc @@ -75,6 +75,8 @@ Which returns: "_type": "_doc", "_id": "my_id", "_version": 1, + "_seq_no": 55, + "_primary_term": 1, "_source": { "ip": "8.8.8.8", "geoip": { @@ -85,7 +87,7 @@ Which returns: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term":1/"_primary_term" : $body._primary_term/] Here is an example that uses the default country database and adds the geographical information to the `geo` field based on the `ip` field`. Note that @@ -124,6 +126,8 @@ returns this: "_type": "_doc", "_id": "my_id", "_version": 1, + "_seq_no": 65, + "_primary_term": 1, "_source": { "ip": "8.8.8.8", "geo": { @@ -133,7 +137,7 @@ returns this: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Not all IP addresses find geo information from the database, When this @@ -174,13 +178,15 @@ Which returns: "_type" : "_doc", "_id" : "my_id", "_version" : 1, + "_seq_no" : 71, + "_primary_term": 1, "found" : true, "_source" : { "ip" : "80.231.5.0" } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] [[ingest-geoip-mappings-note]] ===== Recognizing Location as a Geopoint diff --git a/docs/plugins/ingest-user-agent.asciidoc b/docs/plugins/ingest-user-agent.asciidoc index 57594eab573..a0e6d3257f3 100644 --- a/docs/plugins/ingest-user-agent.asciidoc +++ b/docs/plugins/ingest-user-agent.asciidoc @@ -57,6 +57,8 @@ Which returns "_type": "_doc", "_id": "my_id", "_version": 1, + "_seq_no": 22, + "_primary_term": 1, "_source": { "agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36", "user_agent": { @@ -73,7 +75,7 @@ Which returns } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term": 1/"_primary_term" : $body._primary_term/] ===== Using a custom regex file To use a custom regex file for parsing the user agents, that file has to be put into the `config/ingest-user-agent` directory and diff --git a/docs/reference/ccr/apis/get-ccr-stats.asciidoc b/docs/reference/ccr/apis/get-ccr-stats.asciidoc index b8491e8a601..d849a99c459 100644 --- a/docs/reference/ccr/apis/get-ccr-stats.asciidoc +++ b/docs/reference/ccr/apis/get-ccr-stats.asciidoc @@ -105,7 +105,8 @@ The API returns the following results: "number_of_failed_follow_indices" : 0, "number_of_failed_remote_cluster_state_requests" : 0, "number_of_successful_follow_indices" : 1, - "recent_auto_follow_errors" : [] + "recent_auto_follow_errors" : [], + "auto_followed_clusters" : [] }, "follow_stats" : { "indices" : [ @@ -151,6 +152,7 @@ The API returns the following results: // TESTRESPONSE[s/"number_of_failed_remote_cluster_state_requests" : 0/"number_of_failed_remote_cluster_state_requests" : $body.auto_follow_stats.number_of_failed_remote_cluster_state_requests/] // TESTRESPONSE[s/"number_of_successful_follow_indices" : 1/"number_of_successful_follow_indices" : $body.auto_follow_stats.number_of_successful_follow_indices/] // TESTRESPONSE[s/"recent_auto_follow_errors" : \[\]/"recent_auto_follow_errors" : $body.auto_follow_stats.recent_auto_follow_errors/] +// TESTRESPONSE[s/"auto_followed_clusters" : \[\]/"auto_followed_clusters" : $body.auto_follow_stats.auto_followed_clusters/] // TESTRESPONSE[s/"leader_global_checkpoint" : 1024/"leader_global_checkpoint" : $body.follow_stats.indices.0.shards.0.leader_global_checkpoint/] // TESTRESPONSE[s/"leader_max_seq_no" : 1536/"leader_max_seq_no" : $body.follow_stats.indices.0.shards.0.leader_max_seq_no/] // TESTRESPONSE[s/"follower_global_checkpoint" : 768/"follower_global_checkpoint" : $body.follow_stats.indices.0.shards.0.follower_global_checkpoint/] diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index ec6ef28534f..5271b976f96 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -1,9 +1,9 @@ [[docs-get]] == Get API -The get API allows to get a typed JSON document from the index based on +The get API allows to get a JSON document from the index based on its id. The following example gets a JSON document from an index called -twitter, under a type called `_doc`, with id valued 0: +twitter with id valued 0: [source,js] -------------------------------------------------- @@ -21,6 +21,8 @@ The result of the above get operation is: "_type" : "_doc", "_id" : "0", "_version" : 1, + "_seq_no" : 10, + "_primary_term" : 1, "found": true, "_source" : { "user" : "kimchy", @@ -30,9 +32,9 @@ The result of the above get operation is: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] -The above result includes the `_index`, `_type`, `_id` and `_version` +The above result includes the `_index`, `_id` and `_version` of the document we wish to retrieve, including the actual `_source` of the document if it could be found (as indicated by the `found` field in the response). @@ -156,6 +158,8 @@ The result of the above get operation is: "_type": "_doc", "_id": "1", "_version": 1, + "_seq_no" : 22, + "_primary_term" : 1, "found": true, "fields": { "tags": [ @@ -164,7 +168,7 @@ The result of the above get operation is: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Field values fetched from the document itself are always returned as an array. @@ -199,6 +203,8 @@ The result of the above get operation is: "_type": "_doc", "_id": "2", "_version": 1, + "_seq_no" : 13, + "_primary_term" : 1, "_routing": "user1", "found": true, "fields": { @@ -208,7 +214,7 @@ The result of the above get operation is: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Also only leaf fields can be returned via the `stored_field` option. So object fields can't be returned and such requests will fail. @@ -217,13 +223,13 @@ will fail. [[_source]] === Getting the +_source+ directly -Use the `/{index}/{type}/{id}/_source` endpoint to get +Use the `/{index}/_source/{id}` endpoint to get just the `_source` field of the document, without any additional content around it. For example: [source,js] -------------------------------------------------- -GET twitter/_doc/1/_source +GET twitter/_source/1 -------------------------------------------------- // CONSOLE // TEST[continued] @@ -232,7 +238,7 @@ You can also use the same source filtering parameters to control which parts of [source,js] -------------------------------------------------- -GET twitter/_doc/1/_source?_source_includes=*.id&_source_excludes=entities' +GET twitter/_source/1/?_source_includes=*.id&_source_excludes=entities' -------------------------------------------------- // CONSOLE // TEST[continued] @@ -242,7 +248,7 @@ An existing document will not have a _source if it is disabled in the <>. - -Alternatively, you can use an <> to send data to +<>. Alternatively, you can use an <> to send data to a separate _monitoring cluster_. +IMPORTANT: The {es} {monitor-features} use ingest pipelines, therefore the +cluster that stores the monitoring data must have at least one +<>. + For more information about typical monitoring architectures, see {stack-ov}/how-monitoring-works.html[How Monitoring Works]. -- diff --git a/docs/reference/monitoring/configuring-metricbeat.asciidoc b/docs/reference/monitoring/configuring-metricbeat.asciidoc index 6098336538b..dd7811b3421 100644 --- a/docs/reference/monitoring/configuring-metricbeat.asciidoc +++ b/docs/reference/monitoring/configuring-metricbeat.asciidoc @@ -164,6 +164,10 @@ output.elasticsearch: <1> In this example, the data is stored on a monitoring cluster with nodes `es-mon-1` and `es-mon-2`. +IMPORTANT: The {es} {monitor-features} use ingest pipelines, therefore the +cluster that stores the monitoring data must have at least one +<>. + For more information about these configuration options, see {metricbeat-ref}/elasticsearch-output.html[Configure the {es} output]. -- diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml index 4695991f3c3..d4b39c5e99a 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml @@ -23,7 +23,7 @@ teardown: ] } - match: { acknowledged: true } - +# default pipeline via index - do: indices.create: index: test @@ -48,7 +48,7 @@ teardown: id: 1 - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } - +# default pipeline via alias - do: index: index: test_alias @@ -63,12 +63,101 @@ teardown: id: 2 - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } +# default pipeline via upsert + - do: + update: + index: test + type: test + id: 3 + body: + script: + source: "ctx._source.ran_script = true" + lang: "painless" + upsert: { "bytes_source_field":"1kb" } + - do: + get: + index: test + type: test + id: 3 + - match: { _source.bytes_source_field: "1kb" } + - match: { _source.bytes_target_field: 1024 } +# default pipeline via scripted upsert + - do: + update: + index: test + type: test + id: 4 + body: + script: + source: "ctx._source.bytes_source_field = '1kb'" + lang: "painless" + upsert : {} + scripted_upsert: true + - do: + get: + index: test + type: test + id: 4 + - match: { _source.bytes_source_field: "1kb" } + - match: { _source.bytes_target_field: 1024 } +# default pipeline via doc_as_upsert + - do: + update: + index: test + type: test + id: 5 + body: + doc: { "bytes_source_field":"1kb" } + doc_as_upsert: true + - do: + get: + index: test + type: test + id: 5 + - match: { _source.bytes_source_field: "1kb" } + - match: { _source.bytes_target_field: 1024 } +# default pipeline via bulk upsert +# note - bulk scripted upsert's execute the pipeline before the script, so any data referenced by the pipeline +# needs to be in the upsert, not the script + - do: + bulk: + refresh: true + body: | + {"update":{"_id":"6","_index":"test","_type":"test"}} + {"script":"ctx._source.ran_script = true","upsert":{"bytes_source_field":"1kb"}} + {"update":{"_id":"7","_index":"test","_type":"test"}} + {"doc":{"bytes_source_field":"2kb"}, "doc_as_upsert":true} + {"update":{"_id":"8","_index":"test","_type":"test"}} + {"script": "ctx._source.ran_script = true","upsert":{"bytes_source_field":"3kb"}, "scripted_upsert" : true} + - do: + mget: + body: + docs: + - { _index: "test", _type: "_doc", _id: "6" } + - { _index: "test", _type: "_doc", _id: "7" } + - { _index: "test", _type: "_doc", _id: "8" } + - match: { docs.0._index: "test" } + - match: { docs.0._id: "6" } + - match: { docs.0._source.bytes_source_field: "1kb" } + - match: { docs.0._source.bytes_target_field: 1024 } + - is_false: docs.0._source.ran_script + - match: { docs.1._index: "test" } + - match: { docs.1._id: "7" } + - match: { docs.1._source.bytes_source_field: "2kb" } + - match: { docs.1._source.bytes_target_field: 2048 } + - match: { docs.2._index: "test" } + - match: { docs.2._id: "8" } + - match: { docs.2._source.bytes_source_field: "3kb" } + - match: { docs.2._source.bytes_target_field: 3072 } + - match: { docs.2._source.ran_script: true } + +# explicit no default pipeline - do: index: index: test type: test - id: 3 + id: 9 pipeline: "_none" body: {bytes_source_field: "1kb"} @@ -76,15 +165,15 @@ teardown: get: index: test type: test - id: 3 + id: 9 - match: { _source.bytes_source_field: "1kb" } - is_false: _source.bytes_target_field - +# bad request - do: catch: bad_request index: index: test type: test - id: 4 + id: 10 pipeline: "" body: {bytes_source_field: "1kb"} diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml index 3be038aca24..accc30faa21 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -57,3 +57,44 @@ teardown: type: test id: 2 - match: { _source.foo: "blub" } + +--- +"Test Drop Processor On Failure": +- do: + ingest.put_pipeline: + id: "my_pipeline_with_failure" + body: > + { + "description" : "pipeline with on failure drop", + "processors": [ + { + "fail": { + "message": "failed", + "on_failure": [ + { + "drop": {} + } + ] + } + } + ] + } +- match: { acknowledged: true } + +- do: + index: + index: test + type: test + id: 3 + pipeline: "my_pipeline_with_failure" + body: { + foo: "bar" + } + +- do: + catch: missing + get: + index: test + type: test + id: 3 +- match: { found: false } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index f00a30a62c4..ac21be1f5c0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -73,38 +73,6 @@ public final class AnalyzerCaster { } else if (expected == Double.class) { return PainlessCast.originalTypetoTargetType(def.class, Double.class, explicit); } - } else if (actual == Object.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Byte.class, true, byte.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Short.class, true, short.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Character.class, true, char.class); - } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Integer.class, true, int.class); - } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Long.class, true, long.class); - } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Float.class, true, float.class); - } else if (expected == double.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Double.class, true, double.class); - } - } else if (actual == Number.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Byte.class, true, byte.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Short.class, true, short.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Character.class, true, char.class); - } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Integer.class, true, int.class); - } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Long.class, true, long.class); - } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Float.class, true, float.class); - } else if (expected == double.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Double.class, true, double.class); - } } else if (actual == String.class) { if (expected == char.class && explicit) { return PainlessCast.originalTypetoTargetType(String.class, char.class, true); @@ -140,8 +108,6 @@ public final class AnalyzerCaster { return PainlessCast.boxTargetType(byte.class, byte.class, explicit, byte.class); } else if (expected == Short.class && internal) { return PainlessCast.boxTargetType(byte.class, short.class, explicit, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(byte.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { return PainlessCast.boxTargetType(byte.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { @@ -170,12 +136,8 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(short.class, float.class, explicit); } else if (expected == double.class) { return PainlessCast.originalTypetoTargetType(short.class, double.class, explicit); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(short.class, byte.class, true, byte.class); } else if (expected == Short.class && internal) { return PainlessCast.boxTargetType(short.class, short.class, explicit, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(short.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { return PainlessCast.boxTargetType(short.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { @@ -206,10 +168,6 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(char.class, float.class, explicit); } else if (expected == double.class) { return PainlessCast.originalTypetoTargetType(char.class, double.class, explicit); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(char.class, byte.class, true, byte.class); - } else if (expected == Short.class && internal) { - return PainlessCast.boxTargetType(char.class, short.class, explicit, short.class); } else if (expected == Character.class && internal) { return PainlessCast.boxTargetType(char.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { @@ -240,12 +198,6 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(int.class, float.class, explicit); } else if (expected == double.class) { return PainlessCast.originalTypetoTargetType(int.class, double.class, explicit); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(int.class, byte.class, true, byte.class); - } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTargetType(int.class, short.class, true, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(int.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { return PainlessCast.boxTargetType(int.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { @@ -274,14 +226,6 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(long.class, float.class, explicit); } else if (expected == double.class) { return PainlessCast.originalTypetoTargetType(long.class, double.class, explicit); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(long.class, byte.class, true, byte.class); - } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTargetType(long.class, short.class, true, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(long.class, char.class, true, char.class); - } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTargetType(long.class, int.class, true, int.class); } else if (expected == Long.class && internal) { return PainlessCast.boxTargetType(long.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { @@ -308,16 +252,6 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(float.class, long.class, true); } else if (expected == double.class) { return PainlessCast.originalTypetoTargetType(float.class, double.class, explicit); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(float.class, byte.class, true, byte.class); - } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTargetType(float.class, short.class, true, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(float.class, char.class, true, char.class); - } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTargetType(float.class, int.class, true, int.class); - } else if (expected == Long.class && explicit && internal) { - return PainlessCast.boxTargetType(float.class, long.class, true, long.class); } else if (expected == Float.class && internal) { return PainlessCast.boxTargetType(float.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { @@ -342,18 +276,6 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(double.class, long.class, true); } else if (expected == float.class && explicit) { return PainlessCast.originalTypetoTargetType(double.class, float.class, true); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, byte.class, true, byte.class); - } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, short.class, true, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, char.class, true, char.class); - } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, int.class, true, int.class); - } else if (expected == Long.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, long.class, true, long.class); - } else if (expected == Float.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, float.class, true, float.class); } else if (expected == Double.class && internal) { return PainlessCast.boxTargetType(double.class, double.class, explicit, double.class); } @@ -366,8 +288,6 @@ public final class AnalyzerCaster { return PainlessCast.unboxOriginalType(byte.class, byte.class, explicit, byte.class); } else if (expected == short.class && internal) { return PainlessCast.unboxOriginalType(byte.class, short.class, explicit, byte.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(byte.class, char.class, true, byte.class); } else if (expected == int.class && internal) { return PainlessCast.unboxOriginalType(byte.class, int.class, explicit, byte.class); } else if (expected == long.class && internal) { @@ -376,14 +296,20 @@ public final class AnalyzerCaster { return PainlessCast.unboxOriginalType(byte.class, float.class, explicit, byte.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(byte.class, double.class, explicit, byte.class); + } else if (expected == Short.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, short.class); + } else if (expected == Integer.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, int.class); + } else if (expected == Long.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, long.class); + } else if (expected == Float.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, double.class); } } else if (actual == Short.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(short.class, byte.class, true, short.class); - } else if (expected == short.class && internal) { + if (expected == short.class && internal) { return PainlessCast.unboxOriginalType(short.class, short.class, explicit, short.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(short.class, char.class, true, short.class); } else if (expected == int.class && internal) { return PainlessCast.unboxOriginalType(short.class, int.class, explicit, short.class); } else if (expected == long.class && internal) { @@ -392,13 +318,17 @@ public final class AnalyzerCaster { return PainlessCast.unboxOriginalType(short.class, float.class, explicit, short.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(short.class, double.class, explicit, short.class); + } else if (expected == Integer.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, int.class); + } else if (expected == Long.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, long.class); + } else if (expected == Float.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, double.class); } } else if (actual == Character.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(char.class, byte.class, true, char.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxOriginalType(char.class, short.class, true, char.class); - } else if (expected == char.class && internal) { + if (expected == char.class && internal) { return PainlessCast.unboxOriginalType(char.class, char.class, explicit, char.class); } else if (expected == int.class && internal) { return PainlessCast.unboxOriginalType(char.class, int.class, explicit, char.class); @@ -408,15 +338,17 @@ public final class AnalyzerCaster { return PainlessCast.unboxOriginalType(char.class, float.class, explicit, char.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(char.class, double.class, explicit, char.class); + } else if (expected == Integer.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, int.class); + } else if (expected == Long.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, long.class); + } else if (expected == Float.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, double.class); } } else if (actual == Integer.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(int.class, byte.class, true, int.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxOriginalType(int.class, short.class, true, int.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(int.class, char.class, true, int.class); - } else if (expected == int.class && internal) { + if (expected == int.class && internal) { return PainlessCast.unboxOriginalType(int.class, int.class, explicit, int.class); } else if (expected == long.class && internal) { return PainlessCast.unboxOriginalType(int.class, long.class, explicit, int.class); @@ -424,61 +356,45 @@ public final class AnalyzerCaster { return PainlessCast.unboxOriginalType(int.class, float.class, explicit, int.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(int.class, double.class, explicit, int.class); + } else if (expected == Long.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, long.class); + } else if (expected == Float.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, double.class); } } else if (actual == Long.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(long.class, byte.class, true, long.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxOriginalType(long.class, short.class, true, long.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(long.class, char.class, true, long.class); - } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxOriginalType(long.class, int.class, true, long.class); - } else if (expected == long.class && internal) { + if (expected == long.class && internal) { return PainlessCast.unboxOriginalType(long.class, long.class, explicit, long.class); } else if (expected == float.class && internal) { return PainlessCast.unboxOriginalType(long.class, float.class, explicit, long.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(long.class, double.class, explicit, long.class); + } else if (expected == Float.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, long.class, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, long.class, double.class); } } else if (actual == Float.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(float.class, byte.class, true, float.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxOriginalType(float.class, short.class, true, float.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(float.class, char.class, true, float.class); - } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxOriginalType(float.class, int.class, true, float.class); - } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxOriginalType(float.class, long.class, true, float.class); - } else if (expected == float.class && internal) { + if (expected == float.class && internal) { return PainlessCast.unboxOriginalType(float.class, float.class, explicit, float.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(float.class, double.class, explicit, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, float.class, double.class); } } else if (actual == Double.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, byte.class, true, double.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, short.class, true, double.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, char.class, true, double.class); - } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, int.class, true, double.class); - } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, long.class, true, double.class); - } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, float.class, true, double.class); - } else if (expected == double.class && internal) { + if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(double.class, double.class, explicit, double.class); } } - if ( actual == def.class || + if ( + actual == def.class || (actual != void.class && expected == def.class) || - expected.isAssignableFrom(actual) || - (actual.isAssignableFrom(expected) && explicit)) { + expected.isAssignableFrom(actual) || + (actual.isAssignableFrom(expected) && explicit) + ) { return PainlessCast.originalTypetoTargetType(actual, expected, explicit); } else { throw location.createError(new ClassCastException("Cannot cast from " + diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java index a2433689db3..ea58e7df7b4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java @@ -154,6 +154,10 @@ public final class MethodWriter extends GeneratorAdapter { invokeStatic(UTILITY_TYPE, CHAR_TO_STRING); } else if (cast.originalType == String.class && cast.targetType == char.class) { invokeStatic(UTILITY_TYPE, STRING_TO_CHAR); + } else if (cast.unboxOriginalType != null && cast.boxTargetType != null) { + unbox(getType(cast.unboxOriginalType)); + writeCast(cast.unboxOriginalType, cast.boxTargetType); + box(getType(cast.boxTargetType)); } else if (cast.unboxOriginalType != null) { unbox(getType(cast.unboxOriginalType)); writeCast(cast.originalType, cast.targetType); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java index 98968465d34..5a3fb848a61 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java @@ -75,6 +75,15 @@ public class PainlessCast { return new PainlessCast(originalType, targetType, explicitCast, null, null, null, boxTargetType); } + /** Create a cast where the original type is unboxed, cast to a target type, and the target type is boxed. */ + public static PainlessCast unboxOriginalTypeToBoxTargetType(boolean explicitCast, Class unboxOriginalType, Class boxTargetType) { + + Objects.requireNonNull(unboxOriginalType); + Objects.requireNonNull(boxTargetType); + + return new PainlessCast(null, null, explicitCast, unboxOriginalType, null, null, boxTargetType); + } + public final Class originalType; public final Class targetType; public final boolean explicitCast; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BoxedCastTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BoxedCastTests.java new file mode 100644 index 00000000000..67a2b683ab6 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BoxedCastTests.java @@ -0,0 +1,511 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +public class BoxedCastTests extends ScriptTestCase { + + public void testMethodCallByteToBoxedCasts() { + assertEquals(0, exec("byte u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("byte u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Byte u = Byte.valueOf((byte)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + assertEquals(0, exec("byte u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("byte u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Byte u = Byte.valueOf((byte)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + assertEquals(0, exec("def u = (byte)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (byte)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + assertEquals(0, exec("def u = (byte)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (byte)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallShortToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("short u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("short u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("short u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("short u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (short)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (short)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (short)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (short)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallCharacterToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("char u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("char u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("char u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("char u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (char)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (char)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (char)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (char)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallIntegerToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallLongToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallFloatToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("float u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("float u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Float u = Float.valueOf((float)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Float u = Float.valueOf((float)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("float u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("float u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Float u = Float.valueOf((float)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Float u = Float.valueOf((float)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (float)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (float)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (float)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (float)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallDoubleToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("double u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Double u = Double.valueOf((double)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("double u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Double u = Double.valueOf((double)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (double)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (double)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } +} diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index be9c3f83f3f..d4fe0fe1ddd 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -54,6 +54,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.equalTo; public class PercolateQueryBuilderTests extends AbstractQueryTestCase { @@ -152,12 +153,13 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase listener) { final long startTime = relativeTime(); @@ -207,12 +225,12 @@ public class TransportBulkAction extends HandledTransportAction indicesMetaData = metaData.indices(); for (DocWriteRequest actionRequest : bulkRequest.requests) { - if (actionRequest instanceof IndexRequest) { - IndexRequest indexRequest = (IndexRequest) actionRequest; + IndexRequest indexRequest = getIndexWriteRequest(actionRequest); + if(indexRequest != null){ String pipeline = indexRequest.getPipeline(); if (pipeline == null) { - IndexMetaData indexMetaData = indicesMetaData.get(indexRequest.index()); - if (indexMetaData == null) { + IndexMetaData indexMetaData = indicesMetaData.get(actionRequest.index()); + if (indexMetaData == null && indexRequest.index() != null) { //check the alias AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(indexRequest.index()); if (indexOrAlias != null && indexOrAlias.isAlias()) { @@ -626,7 +644,7 @@ public class TransportBulkAction extends HandledTransportAction> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType()); updateResponse.setGetResult(UpdateHelper.extractGetResult(updateRequest, concreteIndex, + indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); } } else if (translatedResult == DocWriteResponse.Result.DELETED) { @@ -315,7 +316,8 @@ public class TransportShardBulkAction extends TransportWriteAction * The operation requires the {@link #index()}, {@link #type(String)} and {@link #id(String)} @@ -84,7 +84,6 @@ public class GetRequest extends SingleShardRequest implements Realti * @param index The index to get the document from * @param type The type of the document * @param id The id of the document - * * @deprecated Types are in the process of being removed, use {@link GetRequest(String, String)} instead. */ @Deprecated @@ -127,7 +126,6 @@ public class GetRequest extends SingleShardRequest implements Realti /** * Sets the type of the document to fetch. - * * @deprecated Types are in the process of being removed. */ @Deprecated diff --git a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java index b39ceb49c59..fbcb47b5fad 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java @@ -90,6 +90,20 @@ public class GetResponse extends ActionResponse implements Iterable> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true, upsertRequest.getContentType()); - update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), - sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes)); + update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), + response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), sourceAndContent.v2(), + sourceAndContent.v1(), upsertSourceBytes)); } else { update.setGetResult(null); } @@ -205,7 +206,8 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult()); - update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), + update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), + response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes)); update.setForcedRefresh(response.forcedRefresh()); listener.onResponse(update); @@ -216,10 +218,11 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio DeleteRequest deleteRequest = result.action(); client.bulk(toSingleItemBulkRequest(deleteRequest), wrapBulkResponse( ActionListener.wrap(response -> { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), - response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), - response.getVersion(), response.getResult()); - update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), + response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), + response.getResult()); + update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), + response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null)); update.setForcedRefresh(response.forcedRefresh()); listener.onResponse(update); diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 3ef89b997a1..255161c8f32 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -209,8 +209,8 @@ public class UpdateHelper { if (detectNoop && noop) { UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); - update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, - updateSourceContentType, getResult.internalSourceRef())); + update.setGetResult(extractGetResult(request, request.index(), getResult.getSeqNo(), getResult.getPrimaryTerm(), + getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType); } else { final IndexRequest finalIndexRequest = Requests.indexRequest(request.index()) @@ -270,10 +270,9 @@ public class UpdateHelper { // If it was neither an INDEX or DELETE operation, treat it as a noop UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); - update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, - updateSourceContentType, getResult.internalSourceRef())); + update.setGetResult(extractGetResult(request, request.index(), getResult.getSeqNo(), getResult.getPrimaryTerm(), + getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType); - } } @@ -293,7 +292,7 @@ public class UpdateHelper { /** * Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response. */ - public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version, + public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long seqNo, long primaryTerm, long version, final Map source, XContentType sourceContentType, @Nullable final BytesReference sourceAsBytes) { if (request.fetchSource() == null || request.fetchSource().fetchSource() == false) { @@ -318,7 +317,8 @@ public class UpdateHelper { } // TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType) - return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceFilteredAsBytes, Collections.emptyMap()); + return new GetResult(concreteIndex, request.type(), request.id(), seqNo, primaryTerm, version, true, sourceFilteredAsBytes, + Collections.emptyMap()); } public static class Result { diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java index 9e33e62622a..03d721b26fe 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java @@ -162,8 +162,9 @@ public class UpdateResponse extends DocWriteResponse { update = new UpdateResponse(shardId, type, id, version, result); } if (getResult != null) { - update.setGetResult(new GetResult(update.getIndex(), update.getType(), update.getId(), update.getVersion(), - getResult.isExists(),getResult.internalSourceRef(), getResult.getFields())); + update.setGetResult(new GetResult(update.getIndex(), update.getType(), update.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), update.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); } update.setForcedRefresh(forcedRefresh); return update; diff --git a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java index c229a826ee8..d71a3f94d40 100644 --- a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java @@ -46,7 +46,7 @@ import java.util.UUID; public class RestoreInProgress extends AbstractNamedDiffable implements Custom, Iterable { /** - * Fallback UUID used for restore operations that were started before v7.0 and don't have a uuid in the cluster state. + * Fallback UUID used for restore operations that were started before v6.6 and don't have a uuid in the cluster state. */ public static final String BWC_UUID = new UUID(0, 0).toString(); @@ -436,7 +436,7 @@ public class RestoreInProgress extends AbstractNamedDiffable implements final ImmutableOpenMap.Builder entriesBuilder = ImmutableOpenMap.builder(count); for (int i = 0; i < count; i++) { final String uuid; - if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + if (in.getVersion().onOrAfter(Version.V_6_6_0)) { uuid = in.readString(); } else { uuid = BWC_UUID; @@ -468,7 +468,7 @@ public class RestoreInProgress extends AbstractNamedDiffable implements out.writeVInt(entries.size()); for (ObjectCursor v : entries.values()) { Entry entry = v.value; - if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + if (out.getVersion().onOrAfter(Version.V_6_6_0)) { out.writeString(entry.uuid); } entry.snapshot().writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index f1dd843d798..5d23971dddb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -1531,14 +1531,14 @@ public class IndexMetaData implements Diffable, ToXContentFragmen if (sourceNumberOfShards < targetNumberOfShards) { // split factor = targetNumberOfShards / sourceNumberOfShards; if (factor * sourceNumberOfShards != targetNumberOfShards || factor <= 1) { - throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a must be a " + + throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a " + "factor of [" + targetNumberOfShards + "]"); } } else if (sourceNumberOfShards > targetNumberOfShards) { // shrink factor = sourceNumberOfShards / targetNumberOfShards; if (factor * targetNumberOfShards != sourceNumberOfShards || factor <= 1) { - throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a must be a " + + throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a " + "multiple of [" + targetNumberOfShards + "]"); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java index 3654d66ad58..25a605088ef 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java @@ -222,7 +222,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject { } SnapshotRecoverySource(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + if (in.getVersion().onOrAfter(Version.V_6_6_0)) { restoreUUID = in.readString(); } else { restoreUUID = RestoreInProgress.BWC_UUID; @@ -250,7 +250,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject { @Override protected void writeAdditionalFields(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + if (out.getVersion().onOrAfter(Version.V_6_6_0)) { out.writeString(restoreUUID); } snapshot.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index ba70c703550..b98d766dd4e 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.get; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; @@ -33,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -53,6 +55,8 @@ public class GetResult implements Streamable, Iterable, ToXConten public static final String _TYPE = "_type"; public static final String _ID = "_id"; private static final String _VERSION = "_version"; + private static final String _SEQ_NO = "_seq_no"; + private static final String _PRIMARY_TERM = "_primary_term"; private static final String FOUND = "found"; private static final String FIELDS = "fields"; @@ -60,6 +64,8 @@ public class GetResult implements Streamable, Iterable, ToXConten private String type; private String id; private long version; + private long seqNo; + private long primaryTerm; private boolean exists; private Map fields; private Map sourceAsMap; @@ -69,11 +75,17 @@ public class GetResult implements Streamable, Iterable, ToXConten GetResult() { } - public GetResult(String index, String type, String id, long version, boolean exists, BytesReference source, - Map fields) { + public GetResult(String index, String type, String id, long seqNo, long primaryTerm, long version, boolean exists, + BytesReference source, Map fields) { this.index = index; this.type = type; this.id = id; + this.seqNo = seqNo; + this.primaryTerm = primaryTerm; + assert (seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO && primaryTerm == 0) || (seqNo >= 0 && primaryTerm >= 1) : + "seqNo: " + seqNo + " primaryTerm: " + primaryTerm; + assert exists || (seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO && primaryTerm == 0) : + "doc not found but seqNo/primaryTerm are set"; this.version = version; this.exists = exists; this.source = source; @@ -118,6 +130,20 @@ public class GetResult implements Streamable, Iterable, ToXConten return version; } + /** + * The sequence number assigned to the last operation to have changed this document, if found. + */ + public long getSeqNo() { + return seqNo; + } + + /** + * The primary term of the last primary that has changed this document, if found. + */ + public long getPrimaryTerm() { + return primaryTerm; + } + /** * The source of the document if exists. */ @@ -213,6 +239,11 @@ public class GetResult implements Streamable, Iterable, ToXConten } public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params) throws IOException { + if (seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { // seqNo may not be assigned if read from an old node + builder.field(_SEQ_NO, seqNo); + builder.field(_PRIMARY_TERM, primaryTerm); + } + List metaFields = new ArrayList<>(); List otherFields = new ArrayList<>(); if (fields != null && !fields.isEmpty()) { @@ -282,6 +313,8 @@ public class GetResult implements Streamable, Iterable, ToXConten String currentFieldName = parser.currentName(); long version = -1; + long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + long primaryTerm = 0; Boolean found = null; BytesReference source = null; Map fields = new HashMap<>(); @@ -297,6 +330,10 @@ public class GetResult implements Streamable, Iterable, ToXConten id = parser.text(); } else if (_VERSION.equals(currentFieldName)) { version = parser.longValue(); + } else if (_SEQ_NO.equals(currentFieldName)) { + seqNo = parser.longValue(); + } else if (_PRIMARY_TERM.equals(currentFieldName)) { + primaryTerm = parser.longValue(); } else if (FOUND.equals(currentFieldName)) { found = parser.booleanValue(); } else { @@ -326,7 +363,7 @@ public class GetResult implements Streamable, Iterable, ToXConten } } } - return new GetResult(index, type, id, version, found, source, fields); + return new GetResult(index, type, id, seqNo, primaryTerm, version, found, source, fields); } public static GetResult fromXContent(XContentParser parser) throws IOException { @@ -347,6 +384,13 @@ public class GetResult implements Streamable, Iterable, ToXConten index = in.readString(); type = in.readOptionalString(); id = in.readString(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + seqNo = in.readZLong(); + primaryTerm = in.readVLong(); + } else { + seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + primaryTerm = 0L; + } version = in.readLong(); exists = in.readBoolean(); if (exists) { @@ -372,6 +416,10 @@ public class GetResult implements Streamable, Iterable, ToXConten out.writeString(index); out.writeOptionalString(type); out.writeString(id); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeZLong(seqNo); + out.writeVLong(primaryTerm); + } out.writeLong(version); out.writeBoolean(exists); if (exists) { @@ -397,6 +445,8 @@ public class GetResult implements Streamable, Iterable, ToXConten } GetResult getResult = (GetResult) o; return version == getResult.version && + seqNo == getResult.seqNo && + primaryTerm == getResult.primaryTerm && exists == getResult.exists && Objects.equals(index, getResult.index) && Objects.equals(type, getResult.type) && @@ -407,7 +457,7 @@ public class GetResult implements Streamable, Iterable, ToXConten @Override public int hashCode() { - return Objects.hash(version, exists, index, type, id, fields, sourceAsMap()); + return Objects.hash(version, seqNo, primaryTerm, exists, index, type, id, fields, sourceAsMap()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index fc1796dfcc5..6d58b981ddc 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -45,6 +45,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; @@ -112,7 +113,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { public GetResult get(Engine.GetResult engineGetResult, String id, String type, String[] fields, FetchSourceContext fetchSourceContext) { if (!engineGetResult.exists()) { - return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null); + return new GetResult(shardId.getIndexName(), type, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, -1, false, null, null); } currentMetric.inc(); @@ -168,7 +169,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { } if (get == null || get.exists() == false) { - return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null); + return new GetResult(shardId.getIndexName(), type, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, -1, false, null, null); } try { @@ -233,7 +234,8 @@ public final class ShardGetService extends AbstractIndexShardComponent { } } - return new GetResult(shardId.getIndexName(), type, id, get.version(), get.exists(), source, fields); + return new GetResult(shardId.getIndexName(), type, id, get.docIdAndVersion().seqNo, get.docIdAndVersion().primaryTerm, + get.version(), get.exists(), source, fields); } private static FieldsVisitor buildFieldsVisitors(String[] fields, FetchSourceContext fetchSourceContext) { diff --git a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java index 3b8281bd471..a095d7647d9 100644 --- a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java @@ -134,7 +134,9 @@ public class CompoundProcessor implements Processor { if (onFailureProcessors.isEmpty()) { throw compoundProcessorException; } else { - executeOnFailure(ingestDocument, compoundProcessorException); + if (executeOnFailure(ingestDocument, compoundProcessorException) == false) { + return null; + } break; } } finally { @@ -145,13 +147,17 @@ public class CompoundProcessor implements Processor { return ingestDocument; } - - void executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception { + /** + * @return true if execution should continue, false if document is dropped. + */ + boolean executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception { try { putFailureMetadata(ingestDocument, exception); for (Processor processor : onFailureProcessors) { try { - processor.execute(ingestDocument); + if (processor.execute(ingestDocument) == null) { + return false; + } } catch (Exception e) { throw newCompoundProcessorException(e, processor.getType(), processor.getTag()); } @@ -159,6 +165,7 @@ public class CompoundProcessor implements Processor { } finally { removeFailureMetadata(ingestDocument); } + return true; } private void putFailureMetadata(IngestDocument ingestDocument, ElasticsearchException cause) { diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 705e77028a1..6951e33d5e7 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -24,11 +24,11 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -388,13 +388,7 @@ public class IngestService implements ClusterStateApplier { @Override protected void doRun() { for (DocWriteRequest actionRequest : actionRequests) { - IndexRequest indexRequest = null; - if (actionRequest instanceof IndexRequest) { - indexRequest = (IndexRequest) actionRequest; - } else if (actionRequest instanceof UpdateRequest) { - UpdateRequest updateRequest = (UpdateRequest) actionRequest; - indexRequest = updateRequest.docAsUpsert() ? updateRequest.doc() : updateRequest.upsertRequest(); - } + IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(actionRequest); if (indexRequest == null) { continue; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java index c48529d420c..af376bf7c3c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java @@ -19,12 +19,14 @@ package org.elasticsearch.rest.action.document; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -49,8 +51,14 @@ import static org.elasticsearch.rest.RestStatus.OK; */ public class RestGetSourceAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetSourceAction.class)); + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get_source and exist_source" + + "requests is deprecated."; + public RestGetSourceAction(final Settings settings, final RestController controller) { super(settings); + controller.registerHandler(GET, "/{index}/_source/{id}", this); + controller.registerHandler(HEAD, "/{index}/_source/{id}", this); controller.registerHandler(GET, "/{index}/{type}/{id}/_source", this); controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this); } @@ -62,7 +70,13 @@ public class RestGetSourceAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final GetRequest getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id")); + final GetRequest getRequest; + if (request.hasParam("type")) { + deprecationLogger.deprecatedAndMaybeLog("get_source_with_types", TYPES_DEPRECATION_MESSAGE); + getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id")); + } else { + getRequest = new GetRequest(request.param("index"), request.param("id")); + } getRequest.refresh(request.paramAsBoolean("refresh", getRequest.refresh())); getRequest.routing(request.param("routing")); getRequest.preference(request.param("preference")); diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 3d8ea384546..7fd68852ce2 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -19,16 +19,6 @@ package org.elasticsearch.search; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; - import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.OriginalIndices; @@ -61,6 +51,16 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.transport.RemoteClusterAware; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; + import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; @@ -311,10 +311,17 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable 0) { - sortValues = new Object[size]; - for (int i = 0; i < sortValues.length; i++) { - byte type = in.readByte(); - if (type == 0) { - sortValues[i] = null; - } else if (type == 1) { - sortValues[i] = in.readString(); - } else if (type == 2) { - sortValues[i] = in.readInt(); - } else if (type == 3) { - sortValues[i] = in.readLong(); - } else if (type == 4) { - sortValues[i] = in.readFloat(); - } else if (type == 5) { - sortValues[i] = in.readDouble(); - } else if (type == 6) { - sortValues[i] = in.readByte(); - } else if (type == 7) { - sortValues[i] = in.readShort(); - } else if (type == 8) { - sortValues[i] = in.readBoolean(); - } else { - throw new IOException("Can't match type [" + type + "]"); - } - } + SearchSortValues(StreamInput in) throws IOException { + this.formattedSortValues = in.readArray(Lucene::readSortValue, Object[]::new); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + this.rawSortValues = in.readArray(Lucene::readSortValue, Object[]::new); } else { - sortValues = new Object[0]; + this.rawSortValues = EMPTY_ARRAY; } } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(sortValues.length); - for (Object sortValue : sortValues) { - if (sortValue == null) { - out.writeByte((byte) 0); - } else { - Class type = sortValue.getClass(); - if (type == String.class) { - out.writeByte((byte) 1); - out.writeString((String) sortValue); - } else if (type == Integer.class) { - out.writeByte((byte) 2); - out.writeInt((Integer) sortValue); - } else if (type == Long.class) { - out.writeByte((byte) 3); - out.writeLong((Long) sortValue); - } else if (type == Float.class) { - out.writeByte((byte) 4); - out.writeFloat((Float) sortValue); - } else if (type == Double.class) { - out.writeByte((byte) 5); - out.writeDouble((Double) sortValue); - } else if (type == Byte.class) { - out.writeByte((byte) 6); - out.writeByte((Byte) sortValue); - } else if (type == Short.class) { - out.writeByte((byte) 7); - out.writeShort((Short) sortValue); - } else if (type == Boolean.class) { - out.writeByte((byte) 8); - out.writeBoolean((Boolean) sortValue); - } else { - throw new IOException("Can't handle sort field value of type [" + type + "]"); - } - } + out.writeArray(Lucene::writeSortValue, this.formattedSortValues); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeArray(Lucene::writeSortValue, this.rawSortValues); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (sortValues.length > 0) { + if (formattedSortValues.length > 0) { builder.startArray(Fields.SORT); - for (Object sortValue : sortValues) { + for (Object sortValue : formattedSortValues) { builder.value(sortValue); } builder.endArray(); @@ -142,24 +99,37 @@ public class SearchSortValues implements ToXContentFragment, Writeable { return new SearchSortValues(parser.list().toArray()); } - public Object[] sortValues() { - return sortValues; + /** + * Returns the formatted version of the values that sorting was performed against + */ + public Object[] getFormattedSortValues() { + return formattedSortValues; + } + + /** + * Returns the raw version of the values that sorting was performed against + */ + public Object[] getRawSortValues() { + return rawSortValues; } @Override - public boolean equals(Object obj) { - if (this == obj) { + public boolean equals(Object o) { + if (this == o) { return true; } - if (obj == null || getClass() != obj.getClass()) { + if (o == null || getClass() != o.getClass()) { return false; } - SearchSortValues other = (SearchSortValues) obj; - return Arrays.equals(sortValues, other.sortValues); + SearchSortValues that = (SearchSortValues) o; + return Arrays.equals(formattedSortValues, that.formattedSortValues) && + Arrays.equals(rawSortValues, that.rawSortValues); } @Override public int hashCode() { - return Arrays.hashCode(sortValues); + int result = Arrays.hashCode(formattedSortValues); + result = 31 * result + Arrays.hashCode(rawSortValues); + return result; } } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index 2c36af8638f..237e73e572a 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -183,10 +183,11 @@ public abstract class RemoteClusterAware { * (ProxyAddresss, [SeedNodeSuppliers]). If a cluster is configured with a proxy address all seed nodes will point to * {@link TransportAddress#META_ADDRESS} and their configured address will be used as the hostname for the generated discovery node. */ - protected static Map>>> buildRemoteClustersDynamicConfig(Settings settings) { - final Map>>> remoteSeeds = + protected static Map>>>> buildRemoteClustersDynamicConfig( + final Settings settings) { + final Map>>>> remoteSeeds = buildRemoteClustersDynamicConfig(settings, REMOTE_CLUSTERS_SEEDS); - final Map>>> searchRemoteSeeds = + final Map>>>> searchRemoteSeeds = buildRemoteClustersDynamicConfig(settings, SEARCH_REMOTE_CLUSTERS_SEEDS); // sort the intersection for predictable output order final NavigableSet intersection = @@ -205,7 +206,7 @@ public abstract class RemoteClusterAware { .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } - private static Map>>> buildRemoteClustersDynamicConfig( + private static Map>>>> buildRemoteClustersDynamicConfig( final Settings settings, final Setting.AffixSetting> seedsSetting) { final Stream>> allConcreteSettings = seedsSetting.getAllConcreteSettings(settings); return allConcreteSettings.collect( @@ -214,9 +215,9 @@ public abstract class RemoteClusterAware { List addresses = concreteSetting.get(settings); final boolean proxyMode = REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).existsOrFallbackExists(settings); - List> nodes = new ArrayList<>(addresses.size()); + List>> nodes = new ArrayList<>(addresses.size()); for (String address : addresses) { - nodes.add(() -> buildSeedNode(clusterName, address, proxyMode)); + nodes.add(Tuple.tuple(address, () -> buildSeedNode(clusterName, address, proxyMode))); } return new Tuple<>(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).get(settings), nodes); })); @@ -304,16 +305,24 @@ public abstract class RemoteClusterAware { (namespace, value) -> {}); } - - protected static InetSocketAddress parseSeedAddress(String remoteHost) { - String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost)); + static InetSocketAddress parseSeedAddress(String remoteHost) { + final Tuple hostPort = parseHostPort(remoteHost); + final String host = hostPort.v1(); + assert hostPort.v2() != null : remoteHost; + final int port = hostPort.v2(); InetAddress hostAddress; try { hostAddress = InetAddress.getByName(host); } catch (UnknownHostException e) { throw new IllegalArgumentException("unknown host [" + host + "]", e); } - return new InetSocketAddress(hostAddress, parsePort(remoteHost)); + return new InetSocketAddress(hostAddress, port); + } + + public static Tuple parseHostPort(final String remoteHost) { + final String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost)); + final int port = parsePort(remoteHost); + return Tuple.tuple(host, port); } private static int parsePort(String remoteHost) { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 87dd99e6590..7ea55925262 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -35,6 +35,7 @@ import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -95,7 +96,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos private final Predicate nodePredicate; private final ThreadPool threadPool; private volatile String proxyAddress; - private volatile List> seedNodes; + private volatile List>> seedNodes; private volatile boolean skipUnavailable; private final ConnectHandler connectHandler; private final TimeValue initialConnectionTimeout; @@ -111,7 +112,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos * @param nodePredicate a predicate to filter eligible remote nodes to connect to * @param proxyAddress the proxy address */ - RemoteClusterConnection(Settings settings, String clusterAlias, List> seedNodes, + RemoteClusterConnection(Settings settings, String clusterAlias, List>> seedNodes, TransportService transportService, int maxNumRemoteConnections, Predicate nodePredicate, String proxyAddress) { this(settings, clusterAlias, seedNodes, transportService, maxNumRemoteConnections, nodePredicate, proxyAddress, @@ -119,7 +120,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos } // Public for tests to pass a StubbableConnectionManager - RemoteClusterConnection(Settings settings, String clusterAlias, List> seedNodes, + RemoteClusterConnection(Settings settings, String clusterAlias, List>> seedNodes, TransportService transportService, int maxNumRemoteConnections, Predicate nodePredicate, String proxyAddress, ConnectionManager connectionManager) { this.transportService = transportService; @@ -155,7 +156,10 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos /** * Updates the list of seed nodes for this cluster connection */ - synchronized void updateSeedNodes(String proxyAddress, List> seedNodes, ActionListener connectListener) { + synchronized void updateSeedNodes( + final String proxyAddress, + final List>> seedNodes, + final ActionListener connectListener) { this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes)); this.proxyAddress = proxyAddress; connectHandler.connect(connectListener); @@ -465,7 +469,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos maybeConnect(); } }); - collectRemoteNodes(seedNodes.iterator(), transportService, connectionManager, listener); + collectRemoteNodes(seedNodes.stream().map(Tuple::v2).iterator(), transportService, connectionManager, listener); } }); } @@ -672,10 +676,13 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos * Get the information about remote nodes to be rendered on {@code _remote/info} requests. */ public RemoteConnectionInfo getConnectionInfo() { - List seedNodeAddresses = seedNodes.stream().map(node -> node.get().getAddress()).collect - (Collectors.toList()); - return new RemoteConnectionInfo(clusterAlias, seedNodeAddresses, maxNumRemoteConnections, connectedNodes.size(), - initialConnectionTimeout, skipUnavailable); + return new RemoteConnectionInfo( + clusterAlias, + seedNodes.stream().map(Tuple::v1).collect(Collectors.toList()), + maxNumRemoteConnections, + connectedNodes.size(), + initialConnectionTimeout, + skipUnavailable); } int getNumNodesConnected() { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index fda0b90f19e..cb802f13fdb 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -201,7 +201,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl * @param seeds a cluster alias to discovery node mapping representing the remote clusters seeds nodes * @param connectionListener a listener invoked once every configured cluster has been connected to */ - private synchronized void updateRemoteClusters(Map>>> seeds, + private synchronized void updateRemoteClusters(Map>>>> seeds, ActionListener connectionListener) { if (seeds.containsKey(LOCAL_CLUSTER_GROUP_KEY)) { throw new IllegalArgumentException("remote clusters must not have the empty string as its key"); @@ -212,8 +212,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl } else { CountDown countDown = new CountDown(seeds.size()); remoteClusters.putAll(this.remoteClusters); - for (Map.Entry>>> entry : seeds.entrySet()) { - List> seedList = entry.getValue().v2(); + for (Map.Entry>>>> entry : seeds.entrySet()) { + List>> seedList = entry.getValue().v2(); String proxyAddress = entry.getValue().v1(); RemoteClusterConnection remote = this.remoteClusters.get(entry.getKey()); @@ -408,9 +408,10 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl final List addresses, final String proxyAddress, final ActionListener connectionListener) { - final List> nodes = addresses.stream().>map(address -> () -> - buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress)) - ).collect(Collectors.toList()); + final List>> nodes = + addresses.stream().>>map(address -> Tuple.tuple(address, () -> + buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress))) + ).collect(Collectors.toList()); updateRemoteClusters(Collections.singletonMap(clusterAlias, new Tuple<>(proxyAddress, nodes)), connectionListener); } @@ -421,7 +422,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl void initializeRemoteClusters() { final TimeValue timeValue = REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings); final PlainActionFuture future = new PlainActionFuture<>(); - Map>>> seeds = RemoteClusterAware.buildRemoteClustersDynamicConfig(settings); + Map>>>> seeds = + RemoteClusterAware.buildRemoteClustersDynamicConfig(settings); updateRemoteClusters(seeds, future); try { future.get(timeValue.millis(), TimeUnit.MILLISECONDS); diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java index c2024e39228..7c51ca7b9c8 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java @@ -16,9 +16,11 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.transport; import org.elasticsearch.Version; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -27,25 +29,29 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; -import static java.util.Collections.emptyList; - import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Arrays; import java.util.List; import java.util.Objects; +import java.util.stream.Collectors; + +import static java.util.Collections.emptyList; /** * This class encapsulates all remote cluster information to be rendered on * {@code _remote/info} requests. */ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable { - final List seedNodes; + final List seedNodes; final int connectionsPerCluster; final TimeValue initialConnectionTimeout; final int numNodesConnected; final String clusterAlias; final boolean skipUnavailable; - RemoteConnectionInfo(String clusterAlias, List seedNodes, + RemoteConnectionInfo(String clusterAlias, List seedNodes, int connectionsPerCluster, int numNodesConnected, TimeValue initialConnectionTimeout, boolean skipUnavailable) { this.clusterAlias = clusterAlias; @@ -57,7 +63,17 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable } public RemoteConnectionInfo(StreamInput input) throws IOException { - seedNodes = input.readList(TransportAddress::new); + if (input.getVersion().onOrAfter(Version.V_7_0_0)) { + seedNodes = Arrays.asList(input.readStringArray()); + } else { + // versions prior to 7.0.0 sent the resolved transport address of the seed nodes + final List transportAddresses = input.readList(TransportAddress::new); + seedNodes = + transportAddresses + .stream() + .map(a -> a.address().getHostString() + ":" + a.address().getPort()) + .collect(Collectors.toList()); + } if (input.getVersion().before(Version.V_7_0_0)) { /* * Versions before 7.0 sent the HTTP addresses of all nodes in the @@ -78,7 +94,26 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable @Override public void writeTo(StreamOutput out) throws IOException { - out.writeList(seedNodes); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeStringArray(seedNodes.toArray(new String[0])); + } else { + // versions prior to 7.0.0 received the resolved transport address of the seed nodes + out.writeList(seedNodes + .stream() + .map( + s -> { + final Tuple hostPort = RemoteClusterAware.parseHostPort(s); + assert hostPort.v2() != null : s; + try { + return new TransportAddress( + InetAddress.getByAddress(hostPort.v1(), TransportAddress.META_ADDRESS.getAddress()), + hostPort.v2()); + } catch (final UnknownHostException e) { + throw new AssertionError(e); + } + }) + .collect(Collectors.toList())); + } if (out.getVersion().before(Version.V_7_0_0)) { /* * Versions before 7.0 sent the HTTP addresses of all nodes in the @@ -104,8 +139,8 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable builder.startObject(clusterAlias); { builder.startArray("seeds"); - for (TransportAddress addr : seedNodes) { - builder.value(addr.toString()); + for (String addr : seedNodes) { + builder.value(addr); } builder.endArray(); builder.field("connected", numNodesConnected > 0); @@ -136,4 +171,5 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable return Objects.hash(seedNodes, connectionsPerCluster, initialConnectionTimeout, numNodesConnected, clusterAlias, skipUnavailable); } + } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index f25f8844153..219aee9ebe2 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; +import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateApplier; @@ -408,6 +409,57 @@ public class TransportBulkActionIngestTests extends ESTestCase { validateDefaultPipeline(new IndexRequest(WITH_DEFAULT_PIPELINE_ALIAS, "type", "id")); } + public void testUseDefaultPipelineWithBulkUpsert() throws Exception { + Exception exception = new Exception("fake exception"); + BulkRequest bulkRequest = new BulkRequest(); + IndexRequest indexRequest1 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id1").source(Collections.emptyMap()); + IndexRequest indexRequest2 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id2").source(Collections.emptyMap()); + IndexRequest indexRequest3 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id3").source(Collections.emptyMap()); + UpdateRequest upsertRequest = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id1").upsert(indexRequest1).script(mockScript("1")); + UpdateRequest docAsUpsertRequest = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id2").doc(indexRequest2).docAsUpsert(true); + // this test only covers the mechanics that scripted bulk upserts will execute a default pipeline. However, in practice scripted + // bulk upserts with a default pipeline are a bit surprising since the script executes AFTER the pipeline. + UpdateRequest scriptedUpsert = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id2").upsert(indexRequest3).script(mockScript("1")) + .scriptedUpsert(true); + bulkRequest.add(upsertRequest).add(docAsUpsertRequest).add(scriptedUpsert); + + AtomicBoolean responseCalled = new AtomicBoolean(false); + AtomicBoolean failureCalled = new AtomicBoolean(false); + assertNull(indexRequest1.getPipeline()); + assertNull(indexRequest2.getPipeline()); + assertNull(indexRequest3.getPipeline()); + action.execute(null, bulkRequest, ActionListener.wrap( + response -> { + BulkItemResponse itemResponse = response.iterator().next(); + assertThat(itemResponse.getFailure().getMessage(), containsString("fake exception")); + responseCalled.set(true); + }, + e -> { + assertThat(e, sameInstance(exception)); + failureCalled.set(true); + })); + + // check failure works, and passes through to the listener + assertFalse(action.isExecuted); // haven't executed yet + assertFalse(responseCalled.get()); + assertFalse(failureCalled.get()); + verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any()); + assertEquals(indexRequest1.getPipeline(), "default_pipeline"); + assertEquals(indexRequest2.getPipeline(), "default_pipeline"); + assertEquals(indexRequest3.getPipeline(), "default_pipeline"); + completionHandler.getValue().accept(exception); + assertTrue(failureCalled.get()); + + // now check success of the transport bulk action + indexRequest1.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing + indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing + indexRequest3.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing + completionHandler.getValue().accept(null); + assertTrue(action.isExecuted); + assertFalse(responseCalled.get()); // listener would only be called by real index action, not our mocked one + verifyZeroInteractions(transportService); + } + public void testCreateIndexBeforeRunPipeline() throws Exception { Exception exception = new Exception("fake exception"); IndexRequest indexRequest = new IndexRequest("missing_index", "type", "id"); @@ -445,6 +497,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { indexRequest.source(Collections.emptyMap()); AtomicBoolean responseCalled = new AtomicBoolean(false); AtomicBoolean failureCalled = new AtomicBoolean(false); + assertNull(indexRequest.getPipeline()); singleItemBulkWriteAction.execute(null, indexRequest, ActionListener.wrap( response -> { responseCalled.set(true); @@ -459,6 +512,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { assertFalse(responseCalled.get()); assertFalse(failureCalled.get()); verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any()); + assertEquals(indexRequest.getPipeline(), "default_pipeline"); completionHandler.getValue().accept(exception); assertTrue(failureCalled.get()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index a058cf47741..162ef56553d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -23,8 +23,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.TransportBulkActionTookTests.Resolver; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; +import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -132,4 +134,23 @@ public class TransportBulkActionTests extends ESTestCase { throw new AssertionError(exception); })); } + + public void testGetIndexWriteRequest() throws Exception { + IndexRequest indexRequest = new IndexRequest("index", "type", "id1").source(Collections.emptyMap()); + UpdateRequest upsertRequest = new UpdateRequest("index", "type", "id1").upsert(indexRequest).script(mockScript("1")); + UpdateRequest docAsUpsertRequest = new UpdateRequest("index", "type", "id2").doc(indexRequest).docAsUpsert(true); + UpdateRequest scriptedUpsert = new UpdateRequest("index", "type", "id2").upsert(indexRequest).script(mockScript("1")) + .scriptedUpsert(true); + + assertEquals(TransportBulkAction.getIndexWriteRequest(indexRequest), indexRequest); + assertEquals(TransportBulkAction.getIndexWriteRequest(upsertRequest), indexRequest); + assertEquals(TransportBulkAction.getIndexWriteRequest(docAsUpsertRequest), indexRequest); + assertEquals(TransportBulkAction.getIndexWriteRequest(scriptedUpsert), indexRequest); + + DeleteRequest deleteRequest = new DeleteRequest("index", "id"); + assertNull(TransportBulkAction.getIndexWriteRequest(deleteRequest)); + + UpdateRequest badUpsertRequest = new UpdateRequest("index", "type", "id1"); + assertNull(TransportBulkAction.getIndexWriteRequest(badUpsertRequest)); + } } diff --git a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java index ca5c35ccab3..2a04a976677 100644 --- a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java @@ -65,7 +65,7 @@ public class ExplainResponseTests extends AbstractStreamableXContentTestCase nowInMillis); Streamable action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); @@ -372,7 +373,7 @@ public class UpdateRequestTests extends ESTestCase { .script(mockInlineScript("ctx._timestamp = ctx._now")) .scriptedUpsert(true); // We simulate that the document is not existing yet - GetResult getResult = new GetResult("test", "type1", "2", 0, true, new BytesArray("{}"), null); + GetResult getResult = new GetResult("test", "type1", "2", 0, 1, 0, true, new BytesArray("{}"), null); UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> 42L); Streamable action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); @@ -381,7 +382,7 @@ public class UpdateRequestTests extends ESTestCase { public void testIndexTimeout() { final GetResult getResult = - new GetResult("test", "type", "1", 0, true, new BytesArray("{\"f\":\"v\"}"), null); + new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null); final UpdateRequest updateRequest = new UpdateRequest("test", "type", "1") .script(mockInlineScript("return")) @@ -391,7 +392,7 @@ public class UpdateRequestTests extends ESTestCase { public void testDeleteTimeout() { final GetResult getResult = - new GetResult("test", "type", "1", 0, true, new BytesArray("{\"f\":\"v\"}"), null); + new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null); final UpdateRequest updateRequest = new UpdateRequest("test", "type", "1") .script(mockInlineScript("ctx.op = delete")) @@ -402,7 +403,7 @@ public class UpdateRequestTests extends ESTestCase { public void testUpsertTimeout() throws IOException { final boolean exists = randomBoolean(); final BytesReference source = exists ? new BytesArray("{\"f\":\"v\"}") : null; - final GetResult getResult = new GetResult("test", "type", "1", 0, exists, source, null); + final GetResult getResult = new GetResult("test", "type", "1", UNASSIGNED_SEQ_NO, 0, 0, exists, source, null); final XContentBuilder sourceBuilder = jsonBuilder(); sourceBuilder.startObject(); { @@ -535,7 +536,7 @@ public class UpdateRequestTests extends ESTestCase { } public void testRoutingExtraction() throws Exception { - GetResult getResult = new GetResult("test", "type", "1", 0, false, null, null); + GetResult getResult = new GetResult("test", "type", "1", UNASSIGNED_SEQ_NO, 0, 0, false, null, null); IndexRequest indexRequest = new IndexRequest("test", "type", "1"); // There is no routing and parent because the document doesn't exist @@ -545,7 +546,7 @@ public class UpdateRequestTests extends ESTestCase { assertNull(UpdateHelper.calculateRouting(getResult, indexRequest)); // Doc exists but has no source or fields - getResult = new GetResult("test", "type", "1", 0, true, null, null); + getResult = new GetResult("test", "type", "1", 0, 1, 0, true, null, null); // There is no routing and parent on either request assertNull(UpdateHelper.calculateRouting(getResult, indexRequest)); @@ -554,7 +555,7 @@ public class UpdateRequestTests extends ESTestCase { fields.put("_routing", new DocumentField("_routing", Collections.singletonList("routing1"))); // Doc exists and has the parent and routing fields - getResult = new GetResult("test", "type", "1", 0, true, null, fields); + getResult = new GetResult("test", "type", "1", 0, 1, 0, true, null, fields); // Use the get result parent and routing assertThat(UpdateHelper.calculateRouting(getResult, indexRequest), equalTo("routing1")); @@ -563,7 +564,7 @@ public class UpdateRequestTests extends ESTestCase { @SuppressWarnings("deprecated") // VersionType.FORCE is deprecated public void testCalculateUpdateVersion() throws Exception { long randomVersion = randomIntBetween(0, 100); - GetResult getResult = new GetResult("test", "type", "1", randomVersion, true, new BytesArray("{}"), null); + GetResult getResult = new GetResult("test", "type", "1", 0, 1, randomVersion, true, new BytesArray("{}"), null); UpdateRequest request = new UpdateRequest("test", "type1", "1"); long version = UpdateHelper.calculateUpdateVersion(request, getResult); @@ -580,7 +581,7 @@ public class UpdateRequestTests extends ESTestCase { public void testNoopDetection() throws Exception { ShardId shardId = new ShardId("test", "", 0); - GetResult getResult = new GetResult("test", "type", "1", 0, true, + GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"body\": \"foo\"}"), null); @@ -611,7 +612,7 @@ public class UpdateRequestTests extends ESTestCase { public void testUpdateScript() throws Exception { ShardId shardId = new ShardId("test", "", 0); - GetResult getResult = new GetResult("test", "type", "1", 0, true, + GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"body\": \"bar\"}"), null); diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java index c8d63f73732..8ec0423b406 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java @@ -74,11 +74,12 @@ public class UpdateResponseTests extends ESTestCase { UpdateResponse updateResponse = new UpdateResponse(new ReplicationResponse.ShardInfo(3, 2), new ShardId("books", "books_uuid", 2), "book", "1", 7, 17, 2, UPDATED); - updateResponse.setGetResult(new GetResult("books", "book", "1", 2, true, source, fields)); + updateResponse.setGetResult(new GetResult("books", "book", "1",0, 1, 2, true, source, fields)); String output = Strings.toString(updateResponse); assertEquals("{\"_index\":\"books\",\"_type\":\"book\",\"_id\":\"1\",\"_version\":2,\"result\":\"updated\"," + - "\"_shards\":{\"total\":3,\"successful\":2,\"failed\":0},\"_seq_no\":7,\"_primary_term\":17,\"get\":{\"found\":true," + + "\"_shards\":{\"total\":3,\"successful\":2,\"failed\":0},\"_seq_no\":7,\"_primary_term\":17,\"get\":{" + + "\"_seq_no\":0,\"_primary_term\":1,\"found\":true," + "\"_source\":{\"title\":\"Book title\",\"isbn\":\"ABC-123\"},\"fields\":{\"isbn\":[\"ABC-123\"],\"title\":[\"Book " + "title\"]}}}", output); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java index 393f7f6b1d4..1fdea596afb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java @@ -227,7 +227,7 @@ public class IndexMetaDataTests extends ESTestCase { assertEquals("the number of target shards (0) must be greater than the shard id: 0", expectThrows(IllegalArgumentException.class, () -> IndexMetaData.selectSplitShard(0, metaData, 0)).getMessage()); - assertEquals("the number of source shards [2] must be a must be a factor of [3]", + assertEquals("the number of source shards [2] must be a factor of [3]", expectThrows(IllegalArgumentException.class, () -> IndexMetaData.selectSplitShard(0, metaData, 3)).getMessage()); assertEquals("the number of routing shards [4] must be a multiple of the target shards [8]", @@ -285,6 +285,6 @@ public class IndexMetaDataTests extends ESTestCase { Settings notAFactorySettings = Settings.builder().put("index.number_of_shards", 2).put("index.number_of_routing_shards", 3).build(); iae = expectThrows(IllegalArgumentException.class, () -> IndexMetaData.INDEX_NUMBER_OF_ROUTING_SHARDS_SETTING.get(notAFactorySettings)); - assertEquals("the number of source shards [2] must be a must be a factor of [3]", iae.getMessage()); + assertEquals("the number of source shards [2] must be a factor of [3]", iae.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index 6cbd83e5b24..ec89e085f07 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -154,7 +154,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { MetaDataCreateIndexService.validateShrinkIndex(state, "source", Collections.emptySet(), "target", targetSettings) ).getMessage()); - assertEquals("the number of source shards [8] must be a must be a multiple of [3]", + assertEquals("the number of source shards [8] must be a multiple of [3]", expectThrows(IllegalArgumentException.class, () -> MetaDataCreateIndexService.validateShrinkIndex(createClusterState("source", 8, randomIntBetween(0, 10), Settings.builder().put("index.blocks.write", true).build()), "source", Collections.emptySet(), "target", @@ -221,7 +221,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { ).getMessage()); - assertEquals("the number of source shards [3] must be a must be a factor of [4]", + assertEquals("the number of source shards [3] must be a factor of [4]", expectThrows(IllegalArgumentException.class, () -> MetaDataCreateIndexService.validateSplitIndex(createClusterState("source", 3, randomIntBetween(0, 10), Settings.builder().put("index.blocks.write", true).build()), "source", Collections.emptySet(), "target", diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index ea894a2edd0..1891be362b8 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -531,24 +531,26 @@ public class LuceneTests extends ESTestCase { } public static Object randomSortValue() { - switch(randomIntBetween(0, 8)) { + switch(randomIntBetween(0, 9)) { case 0: - return randomAlphaOfLengthBetween(3, 10); + return null; case 1: - return randomInt(); + return randomAlphaOfLengthBetween(3, 10); case 2: - return randomLong(); + return randomInt(); case 3: - return randomFloat(); + return randomLong(); case 4: - return randomDouble(); + return randomFloat(); case 5: - return randomByte(); + return randomDouble(); case 6: - return randomShort(); + return randomByte(); case 7: - return randomBoolean(); + return randomShort(); case 8: + return randomBoolean(); + case 9: return new BytesRef(randomAlphaOfLengthBetween(3, 10)); default: throw new UnsupportedOperationException(); diff --git a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java index 5ada31b6129..c87a896d318 100644 --- a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -55,7 +55,8 @@ public class EnvironmentTests extends ESTestCase { Environment environment = newEnvironment(); assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue()); assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue()); - environment = newEnvironment(Settings.builder().putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build()); + environment = newEnvironment(Settings.builder() + .putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build()); assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue()); assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue()); assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue()); diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 7a24ebaf048..63635f5cbe7 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -352,7 +352,8 @@ public class NodeEnvironmentTests extends ESTestCase { for (int i = 0; i < iters; i++) { int shard = randomIntBetween(0, counts.length - 1); try { - try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "fooUUID", shard), scaledRandomIntBetween(0, 10))) { + try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "fooUUID", shard), + scaledRandomIntBetween(0, 10))) { counts[shard].value++; countsAtomic[shard].incrementAndGet(); assertEquals(flipFlop[shard].incrementAndGet(), 1); @@ -386,7 +387,9 @@ public class NodeEnvironmentTests extends ESTestCase { final Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "myindexUUID").build(); IndexSettings s1 = IndexSettingsModule.newIndexSettings("myindex", indexSettings); - IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder().put(indexSettings).put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build()); + IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder() + .put(indexSettings) + .put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build()); Index index = new Index("myindex", "myindexUUID"); ShardId sid = new ShardId(index, 0); diff --git a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java index 1cc2612041f..0dc6b2573ea 100644 --- a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java @@ -44,6 +44,7 @@ import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.index.get.DocumentFieldTests.randomDocumentField; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; @@ -72,15 +73,16 @@ public class GetResultTests extends ESTestCase { public void testToXContent() throws IOException { { - GetResult getResult = new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " + + GetResult getResult = new GetResult("index", "type", "id", 0, 1, 1, true, new BytesArray("{ \"field1\" : " + "\"value1\", \"field2\":\"value2\"}"), singletonMap("field1", new DocumentField("field1", singletonList("value1")))); String output = Strings.toString(getResult); - assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" " + - ": \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", output); + assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," + + "\"found\":true,\"_source\":{ \"field1\" : \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", + output); } { - GetResult getResult = new GetResult("index", "type", "id", 1, false, null, null); + GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null); String output = Strings.toString(getResult); assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"found\":false}", output); } @@ -92,7 +94,7 @@ public class GetResultTests extends ESTestCase { GetResult getResult = tuple.v1(); // We don't expect to retrieve the index/type/id of the GetResult because they are not rendered // by the toXContentEmbedded method. - GetResult expectedGetResult = new GetResult(null, null, null, -1, + GetResult expectedGetResult = new GetResult(null, null, null, tuple.v2().getSeqNo(), tuple.v2().getPrimaryTerm(), -1, tuple.v2().isExists(), tuple.v2().sourceRef(), tuple.v2().getFields()); boolean humanReadable = randomBoolean(); @@ -118,16 +120,16 @@ public class GetResultTests extends ESTestCase { fields.put("foo", new DocumentField("foo", singletonList("bar"))); fields.put("baz", new DocumentField("baz", Arrays.asList("baz_0", "baz_1"))); - GetResult getResult = new GetResult("index", "type", "id", 2, true, + GetResult getResult = new GetResult("index", "type", "id", 0, 1, 2, true, new BytesArray("{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}"), fields); BytesReference originalBytes = toXContentEmbedded(getResult, XContentType.JSON, false); - assertEquals("{\"found\":true,\"_source\":{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}," + + assertEquals("{\"_seq_no\":0,\"_primary_term\":1,\"found\":true,\"_source\":{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}," + "\"fields\":{\"foo\":[\"bar\"],\"baz\":[\"baz_0\",\"baz_1\"]}}", originalBytes.utf8ToString()); } public void testToXContentEmbeddedNotFound() throws IOException { - GetResult getResult = new GetResult("index", "type", "id", 1, false, null, null); + GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null); BytesReference originalBytes = toXContentEmbedded(getResult, XContentType.JSON, false); assertEquals("{\"found\":false}", originalBytes.utf8ToString()); @@ -149,25 +151,34 @@ public class GetResultTests extends ESTestCase { } public static GetResult copyGetResult(GetResult getResult) { - return new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), - getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()); + return new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()); } public static GetResult mutateGetResult(GetResult getResult) { List> mutations = new ArrayList<>(); - mutations.add(() -> new GetResult(randomUnicodeOfLength(15), getResult.getType(), getResult.getId(), getResult.getVersion(), + mutations.add(() -> new GetResult(randomUnicodeOfLength(15), getResult.getType(), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), randomUnicodeOfLength(15), getResult.getId(), getResult.getVersion(), - getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), randomUnicodeOfLength(15), getResult.getVersion(), - getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), randomNonNegativeLong(), - getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), - getResult.isExists() == false, getResult.internalSourceRef(), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), - getResult.isExists(), RandomObjects.randomSource(random()), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), + mutations.add(() -> new GetResult(getResult.getIndex(), randomUnicodeOfLength(15), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), randomUnicodeOfLength(15), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), randomNonNegativeLong(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), + getResult.isExists() ? UNASSIGNED_SEQ_NO : getResult.getSeqNo(), + getResult.isExists() ? 0 : getResult.getPrimaryTerm(), + getResult.getVersion(), getResult.isExists() == false, getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), getResult.isExists(), + RandomObjects.randomSource(random()), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), getResult.isExists(), getResult.internalSourceRef(), randomDocumentFields(XContentType.JSON).v1())); return randomFrom(mutations).get(); } @@ -177,12 +188,16 @@ public class GetResultTests extends ESTestCase { final String type = randomAlphaOfLengthBetween(3, 10); final String id = randomAlphaOfLengthBetween(3, 10); final long version; + final long seqNo; + final long primaryTerm; final boolean exists; BytesReference source = null; Map fields = null; Map expectedFields = null; if (frequently()) { version = randomNonNegativeLong(); + seqNo = randomNonNegativeLong(); + primaryTerm = randomLongBetween(1, 100); exists = true; if (frequently()) { source = RandomObjects.randomSource(random()); @@ -193,11 +208,13 @@ public class GetResultTests extends ESTestCase { expectedFields = tuple.v2(); } } else { + seqNo = UNASSIGNED_SEQ_NO; + primaryTerm = 0; version = -1; exists = false; } - GetResult getResult = new GetResult(index, type, id, version, exists, source, fields); - GetResult expectedGetResult = new GetResult(index, type, id, version, exists, source, expectedFields); + GetResult getResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, fields); + GetResult expectedGetResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, expectedFields); return Tuple.tuple(getResult, expectedGetResult); } diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index ca9a21973aa..bcd2b4ef144 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -131,7 +131,8 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase {throw new RuntimeException("error");}); + Processor processor2 = new Processor() { + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + //Simulates the drop processor + return null; + } + + @Override + public String getType() { + return "drop"; + } + + @Override + public String getTag() { + return null; + } + }; + + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L); + CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor1), + Collections.singletonList(processor2), relativeTimeProvider); + assertNull(compoundProcessor.execute(ingestDocument)); + assertThat(processor1.getInvokedCounter(), equalTo(1)); + assertStats(compoundProcessor, 1, 1, 0); + } + public void testSingleProcessorWithNestedFailures() throws Exception { TestProcessor processor = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");}); TestProcessor processorToFail = new TestProcessor("id2", "second", ingestDocument -> { diff --git a/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java index 6624d4eb8de..0fb5f7ac114 100644 --- a/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java @@ -76,14 +76,16 @@ public class FullRollingRestartIT extends ESIntegTestCase { internalCluster().startNode(settings); // make sure the cluster state is green, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3")); logger.info("--> add two more nodes"); internalCluster().startNode(settings); internalCluster().startNode(settings); // make sure the cluster state is green, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("5")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("5")); logger.info("--> refreshing and checking data"); refresh(); @@ -94,11 +96,13 @@ public class FullRollingRestartIT extends ESIntegTestCase { // now start shutting nodes down internalCluster().stopRandomDataNode(); // make sure the cluster state is green, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("4")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("4")); internalCluster().stopRandomDataNode(); // make sure the cluster state is green, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3")); logger.info("--> stopped two nodes, verifying data"); refresh(); @@ -109,12 +113,14 @@ public class FullRollingRestartIT extends ESIntegTestCase { // closing the 3rd node internalCluster().stopRandomDataNode(); // make sure the cluster state is green, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("2")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("2")); internalCluster().stopRandomDataNode(); // make sure the cluster state is yellow, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForYellowStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("1")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForYellowStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("1")); logger.info("--> one node left, verifying data"); refresh(); @@ -133,7 +139,9 @@ public class FullRollingRestartIT extends ESIntegTestCase { * to relocating to the restarting node since all had 2 shards and now one node has nothing allocated. * We have a fix for this to wait until we have allocated unallocated shards now so this shouldn't happen. */ - prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6").put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0").put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get(); + prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6") + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get(); for (int i = 0; i < 100; i++) { client().prepareIndex("test", "type1", Long.toString(i)) @@ -152,7 +160,8 @@ public class FullRollingRestartIT extends ESIntegTestCase { recoveryResponse = client().admin().indices().prepareRecoveries("test").get(); for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) { - assertTrue("relocated from: " + recoveryState.getSourceNode() + " to: " + recoveryState.getTargetNode()+ "-- \nbefore: \n" + state, + assertTrue("relocated from: " + recoveryState.getSourceNode() + " to: " + + recoveryState.getTargetNode()+ "-- \nbefore: \n" + state, recoveryState.getRecoverySource().getType() != RecoverySource.Type.PEER || recoveryState.getPrimary() == false); } } diff --git a/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 0d2235c30a4..c0345be6fae 100644 --- a/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -53,14 +53,18 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllS import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; -@TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.cluster.service:TRACE,org.elasticsearch.index.seqno:TRACE,org.elasticsearch.indices.recovery:TRACE") +@TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.cluster.service:TRACE," + + "org.elasticsearch.index.seqno:TRACE,org.elasticsearch.indices.recovery:TRACE") public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { private final Logger logger = LogManager.getLogger(RecoveryWhileUnderLoadIT.class); public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 1, Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, numberOfShards) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -92,7 +96,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { logger.info("--> waiting for GREEN health status ..."); // make sure the cluster state is green, and all has been recovered - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus()); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus()); logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs); waitForDocs(totalNumDocs, indexer); @@ -113,7 +118,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasRelocatePrimariesTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 1, Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, numberOfShards) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -142,7 +150,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { allowNodes("test", 4); logger.info("--> waiting for GREEN health status ..."); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus()); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus()); logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs); @@ -164,7 +173,9 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadWithReducedAllowedNodes() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 2, Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -194,7 +205,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { allowNodes("test", 4); logger.info("--> waiting for GREEN health status ..."); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus().setWaitForNoRelocatingShards(true)); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m") + .setWaitForGreenStatus() + .setWaitForNoRelocatingShards(true)); logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs); waitForDocs(totalNumDocs, indexer); @@ -205,23 +219,31 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { logger.info("--> allow 3 nodes for index [test] ..."); allowNodes("test", 3); logger.info("--> waiting for relocations ..."); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true)); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m") + .setWaitForNoRelocatingShards(true)); logger.info("--> allow 2 nodes for index [test] ..."); allowNodes("test", 2); logger.info("--> waiting for relocations ..."); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true)); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m") + .setWaitForNoRelocatingShards(true)); logger.info("--> allow 1 nodes for index [test] ..."); allowNodes("test", 1); logger.info("--> waiting for relocations ..."); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true)); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m") + .setWaitForNoRelocatingShards(true)); logger.info("--> marking and waiting for indexing threads to stop ..."); indexer.stop(); logger.info("--> indexing threads stopped"); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true)); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m") + .setWaitForNoRelocatingShards(true)); logger.info("--> refreshing the index"); refreshAndAssert(); @@ -235,7 +257,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { final int numReplicas = 0; logger.info("--> creating test index ..."); int allowNodes = 2; - assertAcked(prepareCreate("test", 3, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 3, Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, numShards) + .put(SETTING_NUMBER_OF_REPLICAS, numReplicas) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int numDocs = scaledRandomIntBetween(200, 9999); @@ -258,7 +283,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { logger.info("--> indexing threads stopped"); logger.info("--> bump up number of replicas to 1 and allow all nodes to hold the index"); allowNodes("test", 3); - assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("number_of_replicas", 1)).get()); + assertAcked(client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder().put("number_of_replicas", 1)).get()); ensureGreen(TimeValue.timeValueMinutes(5)); logger.info("--> refreshing the index"); @@ -273,7 +299,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { SearchResponse[] iterationResults = new SearchResponse[iterations]; boolean error = false; for (int i = 0; i < iterations; i++) { - SearchResponse searchResponse = client().prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).addSort("id", SortOrder.ASC).get(); + SearchResponse searchResponse = client().prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()) + .addSort("id", SortOrder.ASC).get(); logSearchResponse(numberOfShards, numberOfDocs, i, searchResponse); iterationResults[i] = searchResponse; if (searchResponse.getHits().getTotalHits().value != numberOfDocs) { @@ -286,7 +313,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats().get(); for (ShardStats shardStats : indicesStatsResponse.getShards()) { DocsStats docsStats = shardStats.getStats().docs; - logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(), shardStats.getShardRouting().primary()); + logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(), + shardStats.getShardRouting().primary()); } ClusterService clusterService = clusterService(); @@ -332,12 +360,14 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { } private void logSearchResponse(int numberOfShards, long numberOfDocs, int iteration, SearchResponse searchResponse) { - logger.info("iteration [{}] - successful shards: {} (expected {})", iteration, searchResponse.getSuccessfulShards(), numberOfShards); + logger.info("iteration [{}] - successful shards: {} (expected {})", iteration, + searchResponse.getSuccessfulShards(), numberOfShards); logger.info("iteration [{}] - failed shards: {} (expected 0)", iteration, searchResponse.getFailedShards()); if (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) { logger.info("iteration [{}] - shard failures: {}", iteration, Arrays.toString(searchResponse.getShardFailures())); } - logger.info("iteration [{}] - returned documents: {} (expected {})", iteration, searchResponse.getHits().getTotalHits().value, numberOfDocs); + logger.info("iteration [{}] - returned documents: {} (expected {})", iteration, + searchResponse.getHits().getTotalHits().value, numberOfDocs); } private void refreshAndAssert() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java index b27e4fd229a..62208a40488 100644 --- a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -133,7 +133,8 @@ public class RelocationIT extends ESIntegTestCase { logger.info("--> start another node"); final String node_2 = internalCluster().startNode(); - ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("2").execute().actionGet(); + ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) + .setWaitForNodes("2").execute().actionGet(); assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); logger.info("--> relocate the shard from node1 to node2"); @@ -141,7 +142,8 @@ public class RelocationIT extends ESIntegTestCase { .add(new MoveAllocationCommand("test", 0, node_1, node_2)) .execute().actionGet(); - clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); + clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); logger.info("--> verifying count again..."); @@ -155,7 +157,8 @@ public class RelocationIT extends ESIntegTestCase { int numberOfReplicas = randomBoolean() ? 0 : 1; int numberOfNodes = numberOfReplicas == 0 ? 2 : 3; - logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", numberOfRelocations, numberOfReplicas, numberOfNodes); + logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", + numberOfRelocations, numberOfReplicas, numberOfNodes); String[] nodes = new String[numberOfNodes]; logger.info("--> starting [node1] ..."); @@ -172,8 +175,10 @@ public class RelocationIT extends ESIntegTestCase { logger.info("--> starting [node{}] ...", i); nodes[i - 1] = internalCluster().startNode(); if (i != numberOfNodes) { - ClusterHealthResponse healthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) - .setWaitForNodes(Integer.toString(i)).setWaitForGreenStatus().execute().actionGet(); + ClusterHealthResponse healthResponse = client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID) + .setWaitForNodes(Integer.toString(i)) + .setWaitForGreenStatus().execute().actionGet(); assertThat(healthResponse.isTimedOut(), equalTo(false)); } } @@ -202,7 +207,10 @@ public class RelocationIT extends ESIntegTestCase { logger.debug("--> flushing"); client().admin().indices().prepareFlush().get(); } - ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); + ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true) + .setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); indexer.pauseIndexing(); logger.info("--> DONE relocate the shard from {} to {}", fromNode, toNode); @@ -218,7 +226,8 @@ public class RelocationIT extends ESIntegTestCase { boolean ranOnce = false; for (int i = 0; i < 10; i++) { logger.info("--> START search test round {}", i + 1); - SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields().execute().actionGet().getHits(); + SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()) + .setSize((int) indexer.totalIndexedDocs()).storedFields().execute().actionGet().getHits(); ranOnce = true; if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; @@ -252,7 +261,8 @@ public class RelocationIT extends ESIntegTestCase { int numberOfReplicas = randomBoolean() ? 0 : 1; int numberOfNodes = numberOfReplicas == 0 ? 2 : 3; - logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", numberOfRelocations, numberOfReplicas, numberOfNodes); + logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", + numberOfRelocations, numberOfReplicas, numberOfNodes); String[] nodes = new String[numberOfNodes]; logger.info("--> starting [node_0] ..."); @@ -281,13 +291,15 @@ public class RelocationIT extends ESIntegTestCase { final Semaphore postRecoveryShards = new Semaphore(0); final IndexEventListener listener = new IndexEventListener() { @Override - public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState, IndexShardState currentState, @Nullable String reason) { + public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState, + IndexShardState currentState, @Nullable String reason) { if (currentState == IndexShardState.POST_RECOVERY) { postRecoveryShards.release(); } } }; - for (MockIndexEventListener.TestEventListener eventListener : internalCluster().getInstances(MockIndexEventListener.TestEventListener.class)) { + for (MockIndexEventListener.TestEventListener eventListener : internalCluster() + .getInstances(MockIndexEventListener.TestEventListener.class)) { eventListener.setNewDelegate(listener); } @@ -327,7 +339,10 @@ public class RelocationIT extends ESIntegTestCase { indexRandom(true, true, builders2); // verify cluster was finished. - assertFalse(client().admin().cluster().prepareHealth().setWaitForNoRelocatingShards(true).setWaitForEvents(Priority.LANGUID).setTimeout("30s").get().isTimedOut()); + assertFalse(client().admin().cluster().prepareHealth() + .setWaitForNoRelocatingShards(true) + .setWaitForEvents(Priority.LANGUID) + .setTimeout("30s").get().isTimedOut()); logger.info("--> DONE relocate the shard from {} to {}", fromNode, toNode); logger.debug("--> verifying all searches return the same number of docs"); @@ -374,17 +389,20 @@ public class RelocationIT extends ESIntegTestCase { MockTransportService mockTransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, p_node); for (DiscoveryNode node : clusterService.state().nodes()) { if (!node.equals(clusterService.localNode())) { - mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, node.getName()), new RecoveryCorruption(corruptionCount)); + mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, node.getName()), + new RecoveryCorruption(corruptionCount)); } } - client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get(); + client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get(); corruptionCount.await(); logger.info("--> stopping replica assignment"); assertAcked(client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); + .setTransientSettings(Settings.builder() + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); logger.info("--> wait for all replica shards to be removed, on all nodes"); assertBusy(() -> { @@ -408,7 +426,8 @@ public class RelocationIT extends ESIntegTestCase { Files.walkFileTree(shardLoc, new SimpleFileVisitor() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - assertThat("found a temporary recovery file: " + file, file.getFileName().toString(), not(startsWith("recovery."))); + assertThat("found a temporary recovery file: " + file, file.getFileName().toString(), + not(startsWith("recovery."))); return FileVisitResult.CONTINUE; } }); @@ -496,13 +515,15 @@ public class RelocationIT extends ESIntegTestCase { } @Override - public void sendRequest(Transport.Connection connection, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException { + public void sendRequest(Transport.Connection connection, long requestId, String action, TransportRequest request, + TransportRequestOptions options) throws IOException { if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) { RecoveryFileChunkRequest chunkRequest = (RecoveryFileChunkRequest) request; if (chunkRequest.name().startsWith(IndexFileNames.SEGMENTS)) { // corrupting the segments_N files in order to make sure future recovery re-send files logger.debug("corrupting [{}] to {}. file name: [{}]", action, connection.getNode(), chunkRequest.name()); - assert chunkRequest.content().toBytesRef().bytes == chunkRequest.content().toBytesRef().bytes : "no internal reference!!"; + assert chunkRequest.content().toBytesRef().bytes == + chunkRequest.content().toBytesRef().bytes : "no internal reference!!"; byte[] array = chunkRequest.content().toBytesRef().bytes; array[0] = (byte) ~array[0]; // flip one byte in the content corruptionCount.countDown(); diff --git a/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java b/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java index ac8688c9847..973c687ebe8 100644 --- a/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java @@ -89,7 +89,8 @@ public class TruncatedRecoveryIT extends ESIntegTestCase { // we have no replicas so far and make sure that we allocate the primary on the lucky node assertAcked(prepareCreate("test") .addMapping("type1", "field1", "type=text", "the_id", "type=text") - .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards()) + .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards()) .put("index.routing.allocation.include._name", primariesNode.getNode().getName()))); // only allocate on the lucky node // index some docs and check if they are coming back @@ -112,7 +113,8 @@ public class TruncatedRecoveryIT extends ESIntegTestCase { final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean truncate = new AtomicBoolean(true); for (NodeStats dataNode : dataNodeStats) { - MockTransportService mockTransportService = ((MockTransportService) internalCluster().getInstance(TransportService.class, dataNode.getNode().getName())); + MockTransportService mockTransportService = ((MockTransportService) internalCluster() + .getInstance(TransportService.class, dataNode.getNode().getName())); mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, unluckyNode.getNode().getName()), (connection, requestId, action, request, options) -> { if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) { diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java index b9fd724fb65..f012c1393c9 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java @@ -23,25 +23,38 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.rest.action.document.RestGetSourceAction.RestGetSourceResponseListener; import org.elasticsearch.test.rest.FakeRestChannel; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.test.rest.RestActionTestCase; import org.junit.AfterClass; +import org.junit.Before; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import static java.util.Collections.emptyMap; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.rest.action.document.RestGetSourceAction.RestGetSourceResponseListener; import static org.hamcrest.Matchers.equalTo; -public class RestGetSourceActionTests extends ESTestCase { +public class RestGetSourceActionTests extends RestActionTestCase { private static RestRequest request = new FakeRestRequest(); private static FakeRestChannel channel = new FakeRestChannel(request, true, 0); private static RestGetSourceResponseListener listener = new RestGetSourceResponseListener(channel, request); + @Before + public void setUpAction() { + new RestGetSourceAction(Settings.EMPTY, controller()); + } + @AfterClass public static void cleanupReferences() { request = null; @@ -49,9 +62,41 @@ public class RestGetSourceActionTests extends ESTestCase { listener = null; } + /** + * test deprecation is logged if type is used in path + */ + public void testTypeInPath() { + for (Method method : Arrays.asList(Method.GET, Method.HEAD)) { + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(method) + .withPath("/some_index/some_type/id/_source") + .build(); + dispatchRequest(request); + assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE); + } + } + + /** + * test deprecation is logged if type is used as parameter + */ + public void testTypeParameter() { + Map params = new HashMap<>(); + params.put("type", "some_type"); + for (Method method : Arrays.asList(Method.GET, Method.HEAD)) { + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(method) + .withPath("/some_index/_source/id") + .withParams(params) + .build(); + dispatchRequest(request); + assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE); + } + } + public void testRestGetSourceAction() throws Exception { final BytesReference source = new BytesArray("{\"foo\": \"bar\"}"); - final GetResponse response = new GetResponse(new GetResult("index1", "_doc", "1", -1, true, source, emptyMap())); + final GetResponse response = + new GetResponse(new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, true, source, emptyMap())); final RestResponse restResponse = listener.buildResponse(response); @@ -61,7 +106,8 @@ public class RestGetSourceActionTests extends ESTestCase { } public void testRestGetSourceActionWithMissingDocument() { - final GetResponse response = new GetResponse(new GetResult("index1", "_doc", "1", -1, false, null, emptyMap())); + final GetResponse response = + new GetResponse(new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, false, null, emptyMap())); final ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.buildResponse(response)); @@ -69,7 +115,8 @@ public class RestGetSourceActionTests extends ESTestCase { } public void testRestGetSourceActionWithMissingDocumentSource() { - final GetResponse response = new GetResponse(new GetResult("index1", "_doc", "1", -1, true, null, emptyMap())); + final GetResponse response = + new GetResponse(new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, true, null, emptyMap())); final ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.buildResponse(response)); diff --git a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java index f6b8dc828f4..797b5dd888e 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.search; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.LuceneTests; import org.elasticsearch.common.xcontent.ToXContent; @@ -31,23 +32,36 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.test.RandomObjects; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.Arrays; +import java.util.Base64; public class SearchSortValuesTests extends AbstractSerializingTestCase { public static SearchSortValues createTestItem(XContentType xContentType, boolean transportSerialization) { int size = randomIntBetween(1, 20); Object[] values = new Object[size]; - DocValueFormat[] sortValueFormats = new DocValueFormat[size]; - for (int i = 0; i < size; i++) { - Object sortValue = randomSortValue(xContentType, transportSerialization); - values[i] = sortValue; - //make sure that for BytesRef, we provide a specific doc value format that overrides format(BytesRef) - sortValueFormats[i] = sortValue instanceof BytesRef ? DocValueFormat.RAW : randomDocValueFormat(); + if (transportSerialization) { + DocValueFormat[] sortValueFormats = new DocValueFormat[size]; + for (int i = 0; i < size; i++) { + Object sortValue = randomSortValue(xContentType, transportSerialization); + values[i] = sortValue; + //make sure that for BytesRef, we provide a specific doc value format that overrides format(BytesRef) + sortValueFormats[i] = sortValue instanceof BytesRef ? DocValueFormat.RAW : randomDocValueFormat(); + } + return new SearchSortValues(values, sortValueFormats); + } else { + //xcontent serialization doesn't write/parse the raw sort values, only the formatted ones + for (int i = 0; i < size; i++) { + Object sortValue = randomSortValue(xContentType, transportSerialization); + //make sure that BytesRef are not provided as formatted values + sortValue = sortValue instanceof BytesRef ? DocValueFormat.RAW.format((BytesRef)sortValue) : sortValue; + values[i] = sortValue; + } + return new SearchSortValues(values); } - return new SearchSortValues(values, sortValueFormats); } private static Object randomSortValue(XContentType xContentType, boolean transportSerialization) { @@ -79,7 +93,7 @@ public class SearchSortValuesTests extends AbstractSerializingTestCase seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); assertTrue(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -206,9 +208,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); assertTrue(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -259,9 +261,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); assertTrue(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -282,7 +284,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(discoverableTransport.getLocalDiscoNode()); knownNodes.add(incompatibleTransport.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List> seedNodes = Arrays.asList(() -> incompatibleSeedNode, () -> seedNode); + List>> seedNodes = Arrays.asList( + Tuple.tuple(incompatibleSeedNode.toString(), () -> incompatibleSeedNode), + Tuple.tuple(seedNode.toString(), () -> seedNode)); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -317,9 +321,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); assertTrue(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); assertFalse(connectionManager.nodeConnected(spareNode)); @@ -367,9 +371,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); if (rejectedNode.equals(seedNode)) { assertFalse(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); @@ -382,11 +386,15 @@ public class RemoteClusterConnectionTests extends ESTestCase { } } } - private void updateSeedNodes(RemoteClusterConnection connection, List> seedNodes) throws Exception { + private void updateSeedNodes( + final RemoteClusterConnection connection, final List>> seedNodes) throws Exception { updateSeedNodes(connection, seedNodes, null); } - private void updateSeedNodes(RemoteClusterConnection connection, List> seedNodes, String proxyAddress) + private void updateSeedNodes( + final RemoteClusterConnection connection, + final List>> seedNodes, + final String proxyAddress) throws Exception { CountDownLatch latch = new CountDownLatch(1); AtomicReference exceptionAtomicReference = new AtomicReference<>(); @@ -428,9 +436,11 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - expectThrows(Exception.class, () -> updateSeedNodes(connection, Arrays.asList(() -> seedNode))); + expectThrows( + Exception.class, + () -> updateSeedNodes(connection, Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)))); assertFalse(connectionManager.nodeConnected(seedNode)); assertTrue(connection.assertNoRunningConnections()); } @@ -481,7 +491,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) { connection.addConnectedNode(seedNode); for (DiscoveryNode node : knownNodes) { final Transport.Connection transportConnection = connection.getConnection(node); @@ -524,7 +534,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { CountDownLatch listenerCalled = new CountDownLatch(1); AtomicReference exceptionReference = new AtomicReference<>(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { ActionListener listener = ActionListener.wrap(x -> { listenerCalled.countDown(); fail("expected exception"); @@ -532,7 +542,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { exceptionReference.set(x); listenerCalled.countDown(); }); - connection.updateSeedNodes(null, Arrays.asList(() -> seedNode), listener); + connection.updateSeedNodes(null, seedNodes(seedNode), listener); acceptedLatch.await(); connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on assertTrue(connection.assertNoRunningConnections()); @@ -548,6 +558,18 @@ public class RemoteClusterConnectionTests extends ESTestCase { } } + private List>> seedNodes(final DiscoveryNode... seedNodes) { + if (seedNodes.length == 0) { + return Collections.emptyList(); + } else if (seedNodes.length == 1) { + return Collections.singletonList(Tuple.tuple(seedNodes[0].toString(), () -> seedNodes[0])); + } else { + return Arrays.stream(seedNodes) + .map(s -> Tuple.tuple(s.toString(), (Supplier)() -> s)) + .collect(Collectors.toList()); + } + } + public void testFetchShards() throws Exception { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); @@ -559,11 +581,11 @@ public class RemoteClusterConnectionTests extends ESTestCase { try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { service.start(); service.acceptIncomingRequests(); - List> nodes = Collections.singletonList(() -> seedNode); + final List>> seedNodes = seedNodes(seedNode); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - nodes, service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes, service, Integer.MAX_VALUE, n -> true, null)) { if (randomBoolean()) { - updateSeedNodes(connection, nodes); + updateSeedNodes(connection, seedNodes); } if (randomBoolean()) { connection.updateSkipUnavailable(randomBoolean()); @@ -599,9 +621,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { service.start(); service.acceptIncomingRequests(); - List> nodes = Collections.singletonList(() -> seedNode); + final List>> seedNodes = seedNodes(seedNode); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - nodes, service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes, service, Integer.MAX_VALUE, n -> true, null)) { SearchRequest request = new SearchRequest("test-index"); Thread[] threads = new Thread[10]; for (int i = 0; i < threads.length; i++) { @@ -655,7 +677,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Collections.singletonList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); SearchRequest request = new SearchRequest("test-index"); @@ -759,7 +781,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(discoverableTransport.getLocalDiscoNode()); knownNodes.add(seedTransport1.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List> seedNodes = Arrays.asList(() -> seedNode1, () -> seedNode); + List>> seedNodes = seedNodes(seedNode1, seedNode); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -839,7 +861,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(discoverableTransport.getLocalDiscoNode()); knownNodes.add(seedTransport1.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List> seedNodes = Arrays.asList(() -> seedNode1, () -> seedNode); + List>> seedNodes = seedNodes(seedNode1, seedNode); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -926,7 +948,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(transport3.getLocalDiscoNode()); knownNodes.add(transport2.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List> seedNodes = Arrays.asList(() -> node3, () -> node1, () -> node2); + List>> seedNodes = seedNodes(node3, node1, node2); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -958,44 +980,32 @@ public class RemoteClusterConnectionTests extends ESTestCase { } public void testRemoteConnectionInfo() throws IOException { - RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 3, TimeValue.timeValueMinutes(30), false); + RemoteConnectionInfo stats = + new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), false); assertSerialization(stats); - RemoteConnectionInfo stats1 = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 4, TimeValue.timeValueMinutes(30), true); + RemoteConnectionInfo stats1 = + new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 4, TimeValue.timeValueMinutes(30), true); assertSerialization(stats1); assertNotEquals(stats, stats1); - stats1 = new RemoteConnectionInfo("test_cluster_1", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 3, TimeValue.timeValueMinutes(30), false); + stats1 = new RemoteConnectionInfo("test_cluster_1", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), false); assertSerialization(stats1); assertNotEquals(stats, stats1); - stats1 = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 15)), - 4, 3, TimeValue.timeValueMinutes(30), false); + stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:15"), 4, 3, TimeValue.timeValueMinutes(30), false); assertSerialization(stats1); assertNotEquals(stats, stats1); - stats1 = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 3, TimeValue.timeValueMinutes(30), true); + stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), true); assertSerialization(stats1); assertNotEquals(stats, stats1); - stats1 = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 3, TimeValue.timeValueMinutes(325), true); + stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(325), true); assertSerialization(stats1); assertNotEquals(stats, stats1); - stats1 = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 5, 3, TimeValue.timeValueMinutes(30), false); + stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 5, 3, TimeValue.timeValueMinutes(30), false); assertSerialization(stats1); assertNotEquals(stats, stats1); } @@ -1016,9 +1026,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { public void testRemoteConnectionInfoBwComp() throws IOException { final Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_1_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)); - RemoteConnectionInfo expected = new RemoteConnectionInfo("test_cluster", - Collections.singletonList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 4, new TimeValue(30, TimeUnit.MINUTES), false); + RemoteConnectionInfo expected = + new RemoteConnectionInfo("test_cluster", Arrays.asList("0.0.0.0:1"), 4, 4, new TimeValue(30, TimeUnit.MINUTES), false); // This version was created using the serialization code in use from 6.1 but before 7.0 String encoded = "AQQAAAAABzAuMC4wLjAAAAABAQQAAAAABzAuMC4wLjAAAABQBDwEBAx0ZXN0X2NsdXN0ZXIA"; @@ -1042,27 +1051,25 @@ public class RemoteClusterConnectionTests extends ESTestCase { } public void testRenderConnectionInfoXContent() throws IOException { - RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 3, TimeValue.timeValueMinutes(30), true); + RemoteConnectionInfo stats = + new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), true); stats = assertSerialization(stats); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); stats.toXContent(builder, null); builder.endObject(); - assertEquals("{\"test_cluster\":{\"seeds\":[\"0.0.0.0:1\"],\"connected\":true," + + assertEquals("{\"test_cluster\":{\"seeds\":[\"seed:1\"],\"connected\":true," + "\"num_nodes_connected\":3,\"max_connections_per_cluster\":4,\"initial_connect_timeout\":\"30m\"," + "\"skip_unavailable\":true}}", Strings.toString(builder)); - stats = new RemoteConnectionInfo("some_other_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1), new TransportAddress(TransportAddress.META_ADDRESS, 2)), - 2, 0, TimeValue.timeValueSeconds(30), false); + stats = new RemoteConnectionInfo( + "some_other_cluster", Arrays.asList("seed:1", "seed:2"), 2, 0, TimeValue.timeValueSeconds(30), false); stats = assertSerialization(stats); builder = XContentFactory.jsonBuilder(); builder.startObject(); stats.toXContent(builder, null); builder.endObject(); - assertEquals("{\"some_other_cluster\":{\"seeds\":[\"0.0.0.0:1\",\"0.0.0.0:2\"]," + assertEquals("{\"some_other_cluster\":{\"seeds\":[\"seed:1\",\"seed:2\"]," + "\"connected\":false,\"num_nodes_connected\":0,\"max_connections_per_cluster\":2,\"initial_connect_timeout\":\"30s\"," + "\"skip_unavailable\":false}}", Strings.toString(builder)); } @@ -1081,7 +1088,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); assertFalse(connectionManager.nodeConnected(seedNode)); assertFalse(connectionManager.nodeConnected(discoverableNode)); @@ -1131,9 +1138,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { if (randomBoolean()) { - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); } CountDownLatch responseLatch = new CountDownLatch(1); AtomicReference> reference = new AtomicReference<>(); @@ -1165,14 +1172,14 @@ public class RemoteClusterConnectionTests extends ESTestCase { List discoverableTransports = new CopyOnWriteArrayList<>(); try { final int numDiscoverableNodes = randomIntBetween(5, 20); - List> discoverableNodes = new ArrayList<>(numDiscoverableNodes); + List>> discoverableNodes = new ArrayList<>(numDiscoverableNodes); for (int i = 0; i < numDiscoverableNodes; i++ ) { MockTransportService transportService = startTransport("discoverable_node" + i, knownNodes, Version.CURRENT); - discoverableNodes.add(transportService::getLocalDiscoNode); + discoverableNodes.add(Tuple.tuple("discoverable_node" + i, transportService::getLocalDiscoNode)); discoverableTransports.add(transportService); } - List> seedNodes = randomSubsetOf(discoverableNodes); + List>> seedNodes = randomSubsetOf(discoverableNodes); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -1221,7 +1228,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { discoverableTransports.add(transportService); connection.addConnectedNode(transportService.getLocalDiscoNode()); } else { - DiscoveryNode node = randomFrom(discoverableNodes).get(); + DiscoveryNode node = randomFrom(discoverableNodes).v2().get(); connection.onNodeDisconnected(node); } } @@ -1269,14 +1276,16 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList( () -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Collections.singletonList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); assertTrue(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); - List> discoveryNodes = - Arrays.asList(otherClusterTransport::getLocalDiscoNode, () -> seedNode); + List>> discoveryNodes = + Arrays.asList( + Tuple.tuple("other", otherClusterTransport::getLocalDiscoNode), + Tuple.tuple(seedNode.toString(), () -> seedNode)); Collections.shuffle(discoveryNodes, random()); updateSeedNodes(connection, discoveryNodes); assertTrue(connectionManager.nodeConnected(seedNode)); @@ -1287,7 +1296,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { assertTrue(connectionManager.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () -> - updateSeedNodes(connection, Arrays.asList(() -> otherClusterTransport.getLocalDiscoNode()))); + updateSeedNodes(connection, Arrays.asList(Tuple.tuple("other", otherClusterTransport::getLocalDiscoNode)))); assertThat(illegalStateException.getMessage(), startsWith("handshake failed, mismatched cluster name [Cluster [otherCluster]]" + " - {other_cluster_discoverable_node}")); @@ -1339,7 +1348,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Collections.singletonList(() -> connectedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) { + seedNodes(connectedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) { connection.addConnectedNode(connectedNode); for (int i = 0; i < 10; i++) { //always a direct connection as the remote node is already connected @@ -1376,10 +1385,10 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); CountDownLatch multipleResolveLatch = new CountDownLatch(2); - Supplier seedSupplier = () -> { + Tuple> seedSupplier = Tuple.tuple(seedNode.toString(), () -> { multipleResolveLatch.countDown(); return seedNode; - }; + }); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", Arrays.asList(seedSupplier), service, Integer.MAX_VALUE, n -> true, null)) { updateSeedNodes(connection, Arrays.asList(seedSupplier)); @@ -1409,9 +1418,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { threadPool, null, Collections.emptySet())) { service.start(); service.acceptIncomingRequests(); - Supplier seedSupplier = () -> - RemoteClusterAware.buildSeedNode("some-remote-cluster", "node_0:" + randomIntBetween(1, 10000), true); - assertEquals("node_0", seedSupplier.get().getAttributes().get("server_name")); + Tuple> seedSupplier = Tuple.tuple("node_0", () -> + RemoteClusterAware.buildSeedNode("some-remote-cluster", "node_0:" + randomIntBetween(1, 10000), true)); + assertEquals("node_0", seedSupplier.v2().get().getAttributes().get("server_name")); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", Arrays.asList(seedSupplier), service, Integer.MAX_VALUE, n -> true, proxyAddress)) { updateSeedNodes(connection, Arrays.asList(seedSupplier), proxyAddress); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index dfc5d4367b4..34dfc420133 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -125,41 +125,42 @@ public class RemoteClusterServiceTests extends ESTestCase { } public void testBuildRemoteClustersDynamicConfig() throws Exception { - Map>>> map = RemoteClusterService.buildRemoteClustersDynamicConfig( - Settings.builder() - .put("cluster.remote.foo.seeds", "192.168.0.1:8080") - .put("cluster.remote.bar.seeds", "[::1]:9090") - .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") - .put("cluster.remote.boom.proxy", "foo.bar.com:1234") - .put("search.remote.quux.seeds", "quux:9300") - .put("search.remote.quux.proxy", "quux-proxy:19300") - .build()); + Map>>>> map = + RemoteClusterService.buildRemoteClustersDynamicConfig( + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("cluster.remote.bar.seeds", "[::1]:9090") + .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") + .put("cluster.remote.boom.proxy", "foo.bar.com:1234") + .put("search.remote.quux.seeds", "quux:9300") + .put("search.remote.quux.proxy", "quux-proxy:19300") + .build()); assertThat(map.keySet(), containsInAnyOrder(equalTo("foo"), equalTo("bar"), equalTo("boom"), equalTo("quux"))); assertThat(map.get("foo").v2(), hasSize(1)); assertThat(map.get("bar").v2(), hasSize(1)); assertThat(map.get("boom").v2(), hasSize(1)); assertThat(map.get("quux").v2(), hasSize(1)); - DiscoveryNode foo = map.get("foo").v2().get(0).get(); + DiscoveryNode foo = map.get("foo").v2().get(0).v2().get(); assertEquals("", map.get("foo").v1()); assertEquals(foo.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("192.168.0.1"), 8080))); assertEquals(foo.getId(), "foo#192.168.0.1:8080"); assertEquals(foo.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); - DiscoveryNode bar = map.get("bar").v2().get(0).get(); + DiscoveryNode bar = map.get("bar").v2().get(0).v2().get(); assertEquals(bar.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("[::1]"), 9090))); assertEquals(bar.getId(), "bar#[::1]:9090"); assertEquals("", map.get("bar").v1()); assertEquals(bar.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); - DiscoveryNode boom = map.get("boom").v2().get(0).get(); + DiscoveryNode boom = map.get("boom").v2().get(0).v2().get(); assertEquals(boom.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0)); assertEquals("boom-node1.internal", boom.getHostName()); assertEquals(boom.getId(), "boom#boom-node1.internal:1000"); assertEquals("foo.bar.com:1234", map.get("boom").v1()); assertEquals(boom.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); - DiscoveryNode quux = map.get("quux").v2().get(0).get(); + DiscoveryNode quux = map.get("quux").v2().get(0).v2().get(); assertEquals(quux.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0)); assertEquals("quux", quux.getHostName()); assertEquals(quux.getId(), "quux#quux:9300"); diff --git a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java index 9de70f4339f..0c253f1446c 100644 --- a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -311,12 +312,11 @@ public class SimpleVersioningIT extends ESIntegTestCase { assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(1, 2).execute(), VersionConflictEngineException.class); client().admin().indices().prepareRefresh().execute().actionGet(); - // TODO: Enable once get response returns seqNo -// for (int i = 0; i < 10; i++) { -// final GetResponse response = client().prepareGet("test", "type", "1").get(); -// assertThat(response.getSeqNo(), equalTo(1L)); -// assertThat(response.getPrimaryTerm(), equalTo(1L)); -// } + for (int i = 0; i < 10; i++) { + final GetResponse response = client().prepareGet("test", "type", "1").get(); + assertThat(response.getSeqNo(), equalTo(1L)); + assertThat(response.getPrimaryTerm(), equalTo(1L)); + } // search with versioning for (int i = 0; i < 10; i++) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java index 3478c14cfda..cb7f5ff4a22 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java @@ -27,6 +27,6 @@ public abstract class AbstractWireSerializingTestCase exten @Override protected T copyInstance(T instance, Version version) throws IOException { - return copyWriteable(instance, getNamedWriteableRegistry(), instanceReader()); + return copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), version); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index c81d0810f08..4669284685c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -135,13 +135,16 @@ public final class RandomObjects { } } if (value instanceof Float) { + if (xContentType == XContentType.CBOR) { + //with CBOR we get back a float + return value; + } if (xContentType == XContentType.SMILE) { //with SMILE we get back a double (this will change in Jackson 2.9 where it will return a Float) return ((Float)value).doubleValue(); - } else { - //with JSON AND YAML we get back a double, but with float precision. - return Double.parseDouble(value.toString()); } + //with JSON AND YAML we get back a double, but with float precision. + return Double.parseDouble(value.toString()); } if (value instanceof Byte) { return ((Byte)value).intValue(); diff --git a/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc b/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc index 540f5866825..18c88f7addd 100644 --- a/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc +++ b/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc @@ -2,7 +2,7 @@ [[security-api-invalidate-token]] === Invalidate token API -Invalidates an access token or a refresh token. +Invalidates one or more access tokens or refresh tokens. ==== Request @@ -19,21 +19,31 @@ can no longer be used. That time period is defined by the The refresh tokens returned by the <> are only valid for 24 hours. They can also be used exactly once. -If you want to invalidate an access or refresh token immediately, use this invalidate token API. +If you want to invalidate one or more access or refresh tokens immediately, use this invalidate token API. ==== Request Body The following parameters can be specified in the body of a DELETE request and -pertain to invalidating a token: +pertain to invalidating tokens: `token` (optional):: -(string) An access token. This parameter cannot be used when `refresh_token` is used. +(string) An access token. This parameter cannot be used any of `refresh_token`, `realm_name` or + `username` are used. `refresh_token` (optional):: -(string) A refresh token. This parameter cannot be used when `token` is used. +(string) A refresh token. This parameter cannot be used any of `refresh_token`, `realm_name` or + `username` are used. -NOTE: One of `token` or `refresh_token` parameters is required. +`realm_name` (optional):: +(string) The name of an authentication realm. This parameter cannot be used with either `refresh_token` or `token`. + +`username` (optional):: +(string) The username of a user. This parameter cannot be used with either `refresh_token` or `token` + +NOTE: While all parameters are optional, at least one of them is required. More specifically, either one of `token` +or `refresh_token` parameters is required. If none of these two are specified, then `realm_name` and/or `username` +need to be specified. ==== Examples @@ -59,15 +69,75 @@ DELETE /_security/oauth2/token -------------------------------------------------- // NOTCONSOLE -A successful call returns a JSON structure that indicates whether the token -has already been invalidated. +The following example invalidates all access tokens and refresh tokens for the `saml1` realm immediately: [source,js] -------------------------------------------------- +DELETE /_xpack/security/oauth2/token { - "created" : true <1> + "realm_name" : "saml1" } -------------------------------------------------- // NOTCONSOLE -<1> When a token has already been invalidated, `created` is set to false. +The following example invalidates all access tokens and refresh tokens for the user `myuser` in all realms immediately: + +[source,js] +-------------------------------------------------- +DELETE /_xpack/security/oauth2/token +{ + "username" : "myuser" +} +-------------------------------------------------- +// NOTCONSOLE + +Finally, the following example invalidates all access tokens and refresh tokens for the user `myuser` in + the `saml1` realm immediately: + +[source,js] +-------------------------------------------------- +DELETE /_xpack/security/oauth2/token +{ + "username" : "myuser", + "realm_name" : "saml1" +} +-------------------------------------------------- +// NOTCONSOLE + +A successful call returns a JSON structure that contains the number of tokens that were invalidated, the number +of tokens that had already been invalidated, and potentially a list of errors encountered while invalidating +specific tokens. + +[source,js] +-------------------------------------------------- +{ + "invalidated_tokens":9, <1> + "previously_invalidated_tokens":15, <2> + "error_count":2, <3> + "error_details":[ <4> + { + "type":"exception", + "reason":"Elasticsearch exception [type=exception, reason=foo]", + "caused_by":{ + "type":"exception", + "reason":"Elasticsearch exception [type=illegal_argument_exception, reason=bar]" + } + }, + { + "type":"exception", + "reason":"Elasticsearch exception [type=exception, reason=boo]", + "caused_by":{ + "type":"exception", + "reason":"Elasticsearch exception [type=illegal_argument_exception, reason=far]" + } + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +<1> The number of the tokens that were invalidated as part of this request. +<2> The number of tokens that were already invalidated. +<3> The number of errors that were encountered when invalidating the tokens. +<4> Details about these errors. This field is not present in the response when + `error_count` is 0. diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index b25bd71c67f..58ba11e4d04 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ccr; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; @@ -111,7 +110,6 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E private final boolean enabled; private final Settings settings; private final CcrLicenseChecker ccrLicenseChecker; - private final SetOnce repositoryManager = new SetOnce<>(); private Client client; /** @@ -152,11 +150,10 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E return emptyList(); } - this.repositoryManager.set(new CcrRepositoryManager(settings, clusterService, client)); - return Arrays.asList( ccrLicenseChecker, - new AutoFollowCoordinator(client, clusterService, ccrLicenseChecker) + new CcrRepositoryManager(settings, clusterService, client), + new AutoFollowCoordinator(client, clusterService, ccrLicenseChecker, threadPool::relativeTimeInMillis) ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java index a1504ff2f8a..54403df3678 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.ccr.action.repositories.DeleteInternalCcrRepositoryAction; @@ -18,31 +19,70 @@ import org.elasticsearch.xpack.ccr.action.repositories.PutInternalCcrRepositoryA import org.elasticsearch.xpack.ccr.action.repositories.PutInternalCcrRepositoryRequest; import org.elasticsearch.xpack.ccr.repository.CcrRepository; +import java.io.IOException; import java.util.List; +import java.util.Set; -class CcrRepositoryManager extends RemoteClusterAware { +class CcrRepositoryManager extends AbstractLifecycleComponent { private final Client client; + private final RemoteSettingsUpdateListener updateListener; CcrRepositoryManager(Settings settings, ClusterService clusterService, Client client) { super(settings); this.client = client; - listenForUpdates(clusterService.getClusterSettings()); + updateListener = new RemoteSettingsUpdateListener(settings); + updateListener.listenForUpdates(clusterService.getClusterSettings()); } @Override - protected void updateRemoteCluster(String clusterAlias, List addresses, String proxyAddress) { - String repositoryName = CcrRepository.NAME_PREFIX + clusterAlias; - if (addresses.isEmpty()) { - DeleteInternalCcrRepositoryRequest request = new DeleteInternalCcrRepositoryRequest(repositoryName); - PlainActionFuture f = PlainActionFuture.newFuture(); - client.execute(DeleteInternalCcrRepositoryAction.INSTANCE, request, f); - assert f.isDone() : "Should be completed as it is executed synchronously"; - } else { - ActionRequest request = new PutInternalCcrRepositoryRequest(repositoryName, CcrRepository.TYPE); - PlainActionFuture f = PlainActionFuture.newFuture(); - client.execute(PutInternalCcrRepositoryAction.INSTANCE, request, f); - assert f.isDone() : "Should be completed as it is executed synchronously"; + protected void doStart() { + updateListener.init(); + } + + @Override + protected void doStop() { + } + + @Override + protected void doClose() throws IOException { + } + + private void putRepository(String repositoryName) { + ActionRequest request = new PutInternalCcrRepositoryRequest(repositoryName, CcrRepository.TYPE); + PlainActionFuture f = PlainActionFuture.newFuture(); + client.execute(PutInternalCcrRepositoryAction.INSTANCE, request, f); + assert f.isDone() : "Should be completed as it is executed synchronously"; + } + + private void deleteRepository(String repositoryName) { + DeleteInternalCcrRepositoryRequest request = new DeleteInternalCcrRepositoryRequest(repositoryName); + PlainActionFuture f = PlainActionFuture.newFuture(); + client.execute(DeleteInternalCcrRepositoryAction.INSTANCE, request, f); + assert f.isDone() : "Should be completed as it is executed synchronously"; + } + + private class RemoteSettingsUpdateListener extends RemoteClusterAware { + + private RemoteSettingsUpdateListener(Settings settings) { + super(settings); + } + + void init() { + Set clusterAliases = buildRemoteClustersDynamicConfig(settings).keySet(); + for (String clusterAlias : clusterAliases) { + putRepository(CcrRepository.NAME_PREFIX + clusterAlias); + } + } + + @Override + protected void updateRemoteCluster(String clusterAlias, List addresses, String proxy) { + String repositoryName = CcrRepository.NAME_PREFIX + clusterAlias; + if (addresses.isEmpty()) { + deleteRepository(repositoryName); + } else { + putRepository(repositoryName); + } } } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 7900351105c..4888b0367fd 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -52,9 +52,12 @@ import java.util.TreeMap; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; +import java.util.function.LongSupplier; import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster; + /** * A component that runs only on the elected master node and follows leader indices automatically * if they match with a auto follow pattern that is defined in {@link AutoFollowMetadata}. @@ -67,6 +70,7 @@ public class AutoFollowCoordinator implements ClusterStateListener { private final Client client; private final ClusterService clusterService; private final CcrLicenseChecker ccrLicenseChecker; + private final LongSupplier relativeMillisTimeProvider; private volatile Map autoFollowers = Collections.emptyMap(); @@ -79,10 +83,13 @@ public class AutoFollowCoordinator implements ClusterStateListener { public AutoFollowCoordinator( Client client, ClusterService clusterService, - CcrLicenseChecker ccrLicenseChecker) { + CcrLicenseChecker ccrLicenseChecker, + LongSupplier relativeMillisTimeProvider) { + this.client = client; this.clusterService = clusterService; this.ccrLicenseChecker = Objects.requireNonNull(ccrLicenseChecker, "ccrLicenseChecker"); + this.relativeMillisTimeProvider = relativeMillisTimeProvider; clusterService.addListener(this); this.recentAutoFollowErrors = new LinkedHashMap() { @Override @@ -93,11 +100,26 @@ public class AutoFollowCoordinator implements ClusterStateListener { } public synchronized AutoFollowStats getStats() { + final Map autoFollowers = this.autoFollowers; + final TreeMap timesSinceLastAutoFollowPerRemoteCluster = new TreeMap<>(); + for (Map.Entry entry : autoFollowers.entrySet()) { + long lastAutoFollowTimeInMillis = entry.getValue().lastAutoFollowTimeInMillis; + long lastSeenMetadataVersion = entry.getValue().metadataVersion; + if (lastAutoFollowTimeInMillis != -1) { + long timeSinceLastCheckInMillis = relativeMillisTimeProvider.getAsLong() - lastAutoFollowTimeInMillis; + timesSinceLastAutoFollowPerRemoteCluster.put(entry.getKey(), + new AutoFollowedCluster(timeSinceLastCheckInMillis, lastSeenMetadataVersion)); + } else { + timesSinceLastAutoFollowPerRemoteCluster.put(entry.getKey(), new AutoFollowedCluster(-1L, lastSeenMetadataVersion)); + } + } + return new AutoFollowStats( numberOfFailedIndicesAutoFollowed, numberOfFailedRemoteClusterStateRequests, numberOfSuccessfulIndicesAutoFollowed, - new TreeMap<>(recentAutoFollowErrors) + new TreeMap<>(recentAutoFollowErrors), + timesSinceLastAutoFollowPerRemoteCluster ); } @@ -146,7 +168,8 @@ public class AutoFollowCoordinator implements ClusterStateListener { Map newAutoFollowers = new HashMap<>(newRemoteClusters.size()); for (String remoteCluster : newRemoteClusters) { - AutoFollower autoFollower = new AutoFollower(remoteCluster, this::updateStats, clusterService::state) { + AutoFollower autoFollower = + new AutoFollower(remoteCluster, this::updateStats, clusterService::state, relativeMillisTimeProvider) { @Override void getRemoteClusterState(final String remoteCluster, @@ -239,20 +262,25 @@ public class AutoFollowCoordinator implements ClusterStateListener { private final String remoteCluster; private final Consumer> statsUpdater; private final Supplier followerClusterStateSupplier; + private final LongSupplier relativeTimeProvider; + private volatile long lastAutoFollowTimeInMillis = -1; private volatile long metadataVersion = 0; private volatile CountDown autoFollowPatternsCountDown; private volatile AtomicArray autoFollowResults; AutoFollower(final String remoteCluster, final Consumer> statsUpdater, - final Supplier followerClusterStateSupplier) { + final Supplier followerClusterStateSupplier, + LongSupplier relativeTimeProvider) { this.remoteCluster = remoteCluster; this.statsUpdater = statsUpdater; this.followerClusterStateSupplier = followerClusterStateSupplier; + this.relativeTimeProvider = relativeTimeProvider; } void start() { + lastAutoFollowTimeInMillis = relativeTimeProvider.getAsLong(); final ClusterState clusterState = followerClusterStateSupplier.get(); final AutoFollowMetadata autoFollowMetadata = clusterState.metaData().custom(AutoFollowMetadata.TYPE); if (autoFollowMetadata == null) { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 5abe852ca5f..8865c536917 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; @@ -117,27 +116,23 @@ public abstract class CcrIntegTestCase extends ESTestCase { } stopClusters(); - NodeConfigurationSource nodeConfigurationSource = createNodeConfigurationSource(); Collection> mockPlugins = Arrays.asList(ESIntegTestCase.TestSeedPlugin.class, TestZenDiscovery.TestPlugin.class, MockHttpTransport.TestPlugin.class, getTestTransportPlugin()); InternalTestCluster leaderCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(), - numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), nodeConfigurationSource, 0, "leader", mockPlugins, + numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), createNodeConfigurationSource(null), 0, "leader", mockPlugins, Function.identity()); - InternalTestCluster followerCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(), - numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), nodeConfigurationSource, 0, "follower", mockPlugins, - Function.identity()); - clusterGroup = new ClusterGroup(leaderCluster, followerCluster); - leaderCluster.beforeTest(random(), 0.0D); leaderCluster.ensureAtLeastNumDataNodes(numberOfNodesPerCluster()); + + String address = leaderCluster.getDataNodeInstance(TransportService.class).boundAddress().publishAddress().toString(); + InternalTestCluster followerCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(), + numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), createNodeConfigurationSource(address), 0, "follower", + mockPlugins, Function.identity()); + clusterGroup = new ClusterGroup(leaderCluster, followerCluster); + followerCluster.beforeTest(random(), 0.0D); followerCluster.ensureAtLeastNumDataNodes(numberOfNodesPerCluster()); - - ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); - String address = leaderCluster.getDataNodeInstance(TransportService.class).boundAddress().publishAddress().toString(); - updateSettingsRequest.persistentSettings(Settings.builder().put("cluster.remote.leader_cluster.seeds", address)); - assertAcked(followerClient().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); } /** @@ -175,7 +170,7 @@ public abstract class CcrIntegTestCase extends ESTestCase { } } - private NodeConfigurationSource createNodeConfigurationSource() { + private NodeConfigurationSource createNodeConfigurationSource(String leaderSeedAddress) { Settings.Builder builder = Settings.builder(); builder.put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), Integer.MAX_VALUE); // Default the watermarks to absurdly low to prevent the tests @@ -195,6 +190,9 @@ public abstract class CcrIntegTestCase extends ESTestCase { builder.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false); builder.put(XPackSettings.LOGSTASH_ENABLED.getKey(), false); builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial"); + if (leaderSeedAddress != null) { + builder.put("cluster.remote.leader_cluster.seeds", leaderSeedAddress); + } return new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 534397a0a9a..7228acaacf1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -89,7 +89,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101")); assertThat(entries.get(0).getValue(), nullValue()); }; - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(currentState)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(currentState), () -> 1L) { @Override void getRemoteClusterState(String remoteCluster, long metadataVersion, @@ -154,7 +154,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(results.get(0).clusterStateFetchException, sameInstance(failure)); assertThat(results.get(0).autoFollowExecutionResults.entrySet().size(), equalTo(0)); }; - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) { @Override void getRemoteClusterState(String remoteCluster, long metadataVersion, @@ -209,7 +209,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101")); assertThat(entries.get(0).getValue(), sameInstance(failure)); }; - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) { @Override void getRemoteClusterState(String remoteCluster, long metadataVersion, @@ -266,7 +266,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101")); assertThat(entries.get(0).getValue(), sameInstance(failure)); }; - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) { @Override void getRemoteClusterState(String remoteCluster, long metadataVersion, @@ -532,8 +532,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase { AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator( null, mock(ClusterService.class), - new CcrLicenseChecker(() -> true, () -> false) - ); + new CcrLicenseChecker(() -> true, () -> false), + () -> 1L); autoFollowCoordinator.updateStats(Collections.singletonList( new AutoFollowCoordinator.AutoFollowResult("_alias1")) @@ -585,6 +585,92 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias2:index2").getCause().getMessage(), equalTo("error")); } + public void testUpdateAutoFollowers() { + ClusterService clusterService = mock(ClusterService.class); + // Return a cluster state with no patterns so that the auto followers never really execute: + ClusterState followerState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))) + .build(); + when(clusterService.state()).thenReturn(followerState); + AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator( + null, + clusterService, + new CcrLicenseChecker(() -> true, () -> false), + () -> 1L); + // Add 3 patterns: + Map patterns = new HashMap<>(); + patterns.put("pattern1", new AutoFollowPattern("remote1", Collections.singletonList("logs-*"), null, null, null, + null, null, null, null, null, null, null, null)); + patterns.put("pattern2", new AutoFollowPattern("remote2", Collections.singletonList("logs-*"), null, null, null, + null, null, null, null, null, null, null, null)); + patterns.put("pattern3", new AutoFollowPattern("remote2", Collections.singletonList("metrics-*"), null, null, null, + null, null, null, null, null, null, null, null)); + ClusterState clusterState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap()))) + .build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(2)); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote1"), notNullValue()); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue()); + // Remove patterns 1 and 3: + patterns.remove("pattern1"); + patterns.remove("pattern3"); + clusterState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap()))) + .build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(1)); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue()); + // Add pattern 4: + patterns.put("pattern4", new AutoFollowPattern("remote1", Collections.singletonList("metrics-*"), null, null, null, + null, null, null, null, null, null, null, null)); + clusterState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap()))) + .build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(2)); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote1"), notNullValue()); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue()); + // Remove patterns 2 and 4: + patterns.remove("pattern2"); + patterns.remove("pattern4"); + clusterState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap()))) + .build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0)); + } + + public void testUpdateAutoFollowersNoPatterns() { + AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator( + null, + mock(ClusterService.class), + new CcrLicenseChecker(() -> true, () -> false), + () -> 1L); + ClusterState clusterState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))) + .build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0)); + } + + public void testUpdateAutoFollowersNoAutoFollowMetadata() { + AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator( + null, + mock(ClusterService.class), + new CcrLicenseChecker(() -> true, () -> false), + () -> 1L); + ClusterState clusterState = ClusterState.builder(new ClusterName("remote")).build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0)); + } + public void testWaitForMetadataVersion() { Client client = mock(Client.class); when(client.getRemoteClusterClient(anyString())).thenReturn(client); @@ -611,7 +697,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { List allResults = new ArrayList<>(); Consumer> handler = allResults::addAll; - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states), () -> 1L) { long previousRequestedMetadataVersion = 0; @@ -669,7 +755,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { fail("should not be invoked"); }; AtomicInteger counter = new AtomicInteger(); - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states), () -> 1L) { long previousRequestedMetadataVersion = 0; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java index c651cca5b6a..41e771ac97e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomReadExceptions; +import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomTrackingClusters; import static org.elasticsearch.xpack.ccr.action.StatsResponsesTests.createStatsResponse; public class AutoFollowStatsResponseTests extends AbstractWireSerializingTestCase { @@ -27,7 +28,8 @@ public class AutoFollowStatsResponseTests extends AbstractWireSerializingTestCas randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), - randomReadExceptions() + randomReadExceptions(), + randomTrackingClusters() ); FollowStatsAction.StatsResponses statsResponse = createStatsResponse(); return new CcrStatsAction.Response(autoFollowStats, statsResponse); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java index c4a61529f49..61b92b485c1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ccr.AutoFollowStats; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster; import java.io.IOException; import java.util.Map; @@ -34,7 +35,8 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase randomTrackingClusters() { + final int count = randomIntBetween(0, 16); + final NavigableMap readExceptions = new TreeMap<>(); + for (int i = 0; i < count; i++) { + readExceptions.put("" + i, new AutoFollowedCluster(randomLong(), randomNonNegativeLong())); + } + return readExceptions; + } + @Override protected Writeable.Reader instanceReader() { return AutoFollowStats::new; @@ -56,6 +67,11 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase entry : newInstance.getRecentAutoFollowErrors().entrySet()) { @@ -68,6 +84,8 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase(Collections.singletonMap( randomAlphaOfLength(4), new ElasticsearchException("cannot follow index"))); + + final NavigableMap trackingClusters = + new TreeMap<>(Collections.singletonMap( + randomAlphaOfLength(4), + new AutoFollowedCluster(1L, 1L))); final AutoFollowStats autoFollowStats = - new AutoFollowStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), recentAutoFollowExceptions); + new AutoFollowStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), recentAutoFollowExceptions, + trackingClusters); final AutoFollowStatsMonitoringDoc document = new AutoFollowStatsMonitoringDoc("_cluster", timestamp, intervalMillis, node, autoFollowStats); @@ -99,7 +107,7 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase + "\"ccr_auto_follow_stats\":{" + "\"number_of_failed_follow_indices\":" + autoFollowStats.getNumberOfFailedFollowIndices() + "," + "\"number_of_failed_remote_cluster_state_requests\":" + - autoFollowStats.getNumberOfFailedRemoteClusterStateRequests() + "," + autoFollowStats.getNumberOfFailedRemoteClusterStateRequests() + "," + "\"number_of_successful_follow_indices\":" + autoFollowStats.getNumberOfSuccessfulFollowIndices() + "," + "\"recent_auto_follow_errors\":[" + "{" @@ -109,6 +117,15 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase + "\"reason\":\"cannot follow index\"" + "}" + "}" + + "]," + + "\"auto_followed_clusters\":[" + + "{" + + "\"cluster_name\":\"" + trackingClusters.keySet().iterator().next() + "\"," + + "\"time_since_last_check_millis\":" + + trackingClusters.values().iterator().next().getTimeSinceLastCheckMillis() + "," + + "\"last_seen_metadata_version\":" + + trackingClusters.values().iterator().next().getLastSeenMetadataVersion() + + "}" + "]" + "}" + "}")); @@ -117,7 +134,11 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase public void testShardFollowNodeTaskStatusFieldsMapped() throws IOException { final NavigableMap fetchExceptions = new TreeMap<>(Collections.singletonMap("leader_index", new ElasticsearchException("cannot follow index"))); - final AutoFollowStats status = new AutoFollowStats(1, 0, 2, fetchExceptions); + final NavigableMap trackingClusters = + new TreeMap<>(Collections.singletonMap( + randomAlphaOfLength(4), + new AutoFollowedCluster(1L, 1L))); + final AutoFollowStats status = new AutoFollowStats(1, 0, 2, fetchExceptions, trackingClusters); XContentBuilder builder = jsonBuilder(); builder.value(status); Map serializedStatus = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false); @@ -142,18 +163,28 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase assertThat("expected keyword field type for field [" + fieldName + "]", fieldType, anyOf(equalTo("keyword"), equalTo("text"))); } else { + Map innerFieldValue = (Map) ((List) fieldValue).get(0); // Manual test specific object fields and if not just fail: if (fieldName.equals("recent_auto_follow_errors")) { assertThat(fieldType, equalTo("nested")); - assertThat(((Map) fieldMapping.get("properties")).size(), equalTo(2)); + assertThat(((Map) fieldMapping.get("properties")).size(), equalTo(innerFieldValue.size())); assertThat(XContentMapValues.extractValue("properties.leader_index.type", fieldMapping), equalTo("keyword")); assertThat(XContentMapValues.extractValue("properties.auto_follow_exception.type", fieldMapping), equalTo("object")); + innerFieldValue = (Map) innerFieldValue.get("auto_follow_exception"); Map exceptionFieldMapping = (Map) XContentMapValues.extractValue("properties.auto_follow_exception.properties", fieldMapping); - assertThat(exceptionFieldMapping.size(), equalTo(2)); + assertThat(exceptionFieldMapping.size(), equalTo(innerFieldValue.size())); assertThat(XContentMapValues.extractValue("type.type", exceptionFieldMapping), equalTo("keyword")); assertThat(XContentMapValues.extractValue("reason.type", exceptionFieldMapping), equalTo("text")); + } else if (fieldName.equals("auto_followed_clusters")) { + assertThat(fieldType, equalTo("nested")); + Map innerFieldMapping = ((Map) fieldMapping.get("properties")); + assertThat(innerFieldMapping.size(), equalTo(innerFieldValue.size())); + + assertThat(XContentMapValues.extractValue("cluster_name.type", innerFieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("time_since_last_check_millis.type", innerFieldMapping), equalTo("long")); + assertThat(XContentMapValues.extractValue("last_seen_metadata_version.type", innerFieldMapping), equalTo("long")); } else { fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java index 6f28c450f04..032cedbdcdf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.ccr; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -17,6 +18,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.AbstractMap; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.NavigableMap; @@ -33,6 +35,10 @@ public class AutoFollowStats implements Writeable, ToXContentObject { private static final ParseField RECENT_AUTO_FOLLOW_ERRORS = new ParseField("recent_auto_follow_errors"); private static final ParseField LEADER_INDEX = new ParseField("leader_index"); private static final ParseField AUTO_FOLLOW_EXCEPTION = new ParseField("auto_follow_exception"); + private static final ParseField AUTO_FOLLOWED_CLUSTERS = new ParseField("auto_followed_clusters"); + private static final ParseField CLUSTER_NAME = new ParseField("cluster_name"); + private static final ParseField TIME_SINCE_LAST_CHECK_MILLIS = new ParseField("time_since_last_check_millis"); + private static final ParseField LAST_SEEN_METADATA_VERSION = new ParseField("last_seen_metadata_version"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser STATS_PARSER = new ConstructingObjectParser<>("auto_follow_stats", @@ -43,26 +49,39 @@ public class AutoFollowStats implements Writeable, ToXContentObject { new TreeMap<>( ((List>) args[3]) .stream() - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) - )); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))), + new TreeMap<>( + ((List>) args[4]) + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))))); private static final ConstructingObjectParser, Void> AUTO_FOLLOW_EXCEPTIONS_PARSER = new ConstructingObjectParser<>( "auto_follow_stats_errors", args -> new AbstractMap.SimpleEntry<>((String) args[0], (ElasticsearchException) args[1])); + private static final ConstructingObjectParser, Void> AUTO_FOLLOWED_CLUSTERS_PARSER = + new ConstructingObjectParser<>( + "auto_followed_clusters", + args -> new AbstractMap.SimpleEntry<>((String) args[0], new AutoFollowedCluster((Long) args[1], (Long) args[2]))); + static { AUTO_FOLLOW_EXCEPTIONS_PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_INDEX); AUTO_FOLLOW_EXCEPTIONS_PARSER.declareObject( ConstructingObjectParser.constructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), AUTO_FOLLOW_EXCEPTION); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareString(ConstructingObjectParser.constructorArg(), CLUSTER_NAME); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_SINCE_LAST_CHECK_MILLIS); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_SEEN_METADATA_VERSION); STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED); STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS); STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED); STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOW_EXCEPTIONS_PARSER, RECENT_AUTO_FOLLOW_ERRORS); + STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOWED_CLUSTERS_PARSER, + AUTO_FOLLOWED_CLUSTERS); } public static AutoFollowStats fromXContent(final XContentParser parser) { @@ -73,24 +92,32 @@ public class AutoFollowStats implements Writeable, ToXContentObject { private final long numberOfFailedRemoteClusterStateRequests; private final long numberOfSuccessfulFollowIndices; private final NavigableMap recentAutoFollowErrors; + private final NavigableMap autoFollowedClusters; public AutoFollowStats( - long numberOfFailedFollowIndices, - long numberOfFailedRemoteClusterStateRequests, - long numberOfSuccessfulFollowIndices, - NavigableMap recentAutoFollowErrors + long numberOfFailedFollowIndices, + long numberOfFailedRemoteClusterStateRequests, + long numberOfSuccessfulFollowIndices, + NavigableMap recentAutoFollowErrors, + NavigableMap autoFollowedClusters ) { this.numberOfFailedFollowIndices = numberOfFailedFollowIndices; this.numberOfFailedRemoteClusterStateRequests = numberOfFailedRemoteClusterStateRequests; this.numberOfSuccessfulFollowIndices = numberOfSuccessfulFollowIndices; this.recentAutoFollowErrors = recentAutoFollowErrors; + this.autoFollowedClusters = autoFollowedClusters; } public AutoFollowStats(StreamInput in) throws IOException { numberOfFailedFollowIndices = in.readVLong(); numberOfFailedRemoteClusterStateRequests = in.readVLong(); numberOfSuccessfulFollowIndices = in.readVLong(); - recentAutoFollowErrors= new TreeMap<>(in.readMap(StreamInput::readString, StreamInput::readException)); + recentAutoFollowErrors = new TreeMap<>(in.readMap(StreamInput::readString, StreamInput::readException)); + if (in.getVersion().onOrAfter(Version.V_6_6_0)) { + autoFollowedClusters = new TreeMap<>(in.readMap(StreamInput::readString, AutoFollowedCluster::new)); + } else { + autoFollowedClusters = Collections.emptyNavigableMap(); + } } @Override @@ -99,6 +126,9 @@ public class AutoFollowStats implements Writeable, ToXContentObject { out.writeVLong(numberOfFailedRemoteClusterStateRequests); out.writeVLong(numberOfSuccessfulFollowIndices); out.writeMap(recentAutoFollowErrors, StreamOutput::writeString, StreamOutput::writeException); + if (out.getVersion().onOrAfter(Version.V_6_6_0)) { + out.writeMap(autoFollowedClusters, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } } public long getNumberOfFailedFollowIndices() { @@ -117,6 +147,10 @@ public class AutoFollowStats implements Writeable, ToXContentObject { return recentAutoFollowErrors; } + public NavigableMap getAutoFollowedClusters() { + return autoFollowedClusters; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -148,6 +182,19 @@ public class AutoFollowStats implements Writeable, ToXContentObject { } } builder.endArray(); + builder.startArray(AUTO_FOLLOWED_CLUSTERS.getPreferredName()); + { + for (final Map.Entry entry : autoFollowedClusters.entrySet()) { + builder.startObject(); + { + builder.field(CLUSTER_NAME.getPreferredName(), entry.getKey()); + builder.field(TIME_SINCE_LAST_CHECK_MILLIS.getPreferredName(), entry.getValue().getTimeSinceLastCheckMillis()); + builder.field(LAST_SEEN_METADATA_VERSION.getPreferredName(), entry.getValue().getLastSeenMetadataVersion()); + } + builder.endObject(); + } + } + builder.endArray(); return builder; } @@ -165,7 +212,8 @@ public class AutoFollowStats implements Writeable, ToXContentObject { * keys. */ recentAutoFollowErrors.keySet().equals(that.recentAutoFollowErrors.keySet()) && - getFetchExceptionMessages(this).equals(getFetchExceptionMessages(that)); + getFetchExceptionMessages(this).equals(getFetchExceptionMessages(that)) && + Objects.equals(autoFollowedClusters, that.autoFollowedClusters); } @Override @@ -179,7 +227,8 @@ public class AutoFollowStats implements Writeable, ToXContentObject { * messages. Note that we are relying on the fact that the auto follow exceptions are ordered by keys. */ recentAutoFollowErrors.keySet(), - getFetchExceptionMessages(this) + getFetchExceptionMessages(this), + autoFollowedClusters ); } @@ -194,6 +243,58 @@ public class AutoFollowStats implements Writeable, ToXContentObject { ", numberOfFailedRemoteClusterStateRequests=" + numberOfFailedRemoteClusterStateRequests + ", numberOfSuccessfulFollowIndices=" + numberOfSuccessfulFollowIndices + ", recentAutoFollowErrors=" + recentAutoFollowErrors + + ", autoFollowedClusters=" + autoFollowedClusters + '}'; } + + public static class AutoFollowedCluster implements Writeable { + + private final long timeSinceLastCheckMillis; + private final long lastSeenMetadataVersion; + + public AutoFollowedCluster(long timeSinceLastCheckMillis, long lastSeenMetadataVersion) { + this.timeSinceLastCheckMillis = timeSinceLastCheckMillis; + this.lastSeenMetadataVersion = lastSeenMetadataVersion; + } + + public AutoFollowedCluster(StreamInput in) throws IOException { + this(in.readZLong(), in.readVLong()); + } + + public long getTimeSinceLastCheckMillis() { + return timeSinceLastCheckMillis; + } + + public long getLastSeenMetadataVersion() { + return lastSeenMetadataVersion; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeZLong(timeSinceLastCheckMillis); + out.writeVLong(lastSeenMetadataVersion); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutoFollowedCluster that = (AutoFollowedCluster) o; + return timeSinceLastCheckMillis == that.timeSinceLastCheckMillis && + lastSeenMetadataVersion == that.lastSeenMetadataVersion; + } + + @Override + public int hashCode() { + return Objects.hash(timeSinceLastCheckMillis, lastSeenMetadataVersion); + } + + @Override + public String toString() { + return "AutoFollowedCluster{" + + "timeSinceLastCheckMillis=" + timeSinceLastCheckMillis + + ", lastSeenMetadataVersion=" + lastSeenMetadataVersion + + '}'; + } + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java index 679ee0756f6..57bd5bd35dd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.core.security.action.token; import org.elasticsearch.action.Action; /** - * Action for invalidating a given token + * Action for invalidating one or more tokens */ public final class InvalidateTokenAction extends Action { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java index 7a8372fe456..de3b73ec4af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.security.action.token; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -22,31 +23,81 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; public final class InvalidateTokenRequest extends ActionRequest { public enum Type { - ACCESS_TOKEN, - REFRESH_TOKEN + ACCESS_TOKEN("token"), + REFRESH_TOKEN("refresh_token"); + + private final String value; + + Type(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + + public static Type fromString(String tokenType) { + if (tokenType != null) { + for (Type type : values()) { + if (type.getValue().equals(tokenType)) { + return type; + } + } + } + return null; + } } private String tokenString; private Type tokenType; + private String realmName; + private String userName; public InvalidateTokenRequest() {} /** - * @param tokenString the string representation of the token + * @param tokenString the string representation of the token to be invalidated + * @param tokenType the type of the token to be invalidated + * @param realmName the name of the realm for which all tokens will be invalidated + * @param userName the principal of the user for which all tokens will be invalidated */ - public InvalidateTokenRequest(String tokenString, Type type) { + public InvalidateTokenRequest(@Nullable String tokenString, @Nullable String tokenType, + @Nullable String realmName, @Nullable String userName) { this.tokenString = tokenString; - this.tokenType = type; + this.tokenType = Type.fromString(tokenType); + this.realmName = realmName; + this.userName = userName; + } + + /** + * @param tokenString the string representation of the token to be invalidated + * @param tokenType the type of the token to be invalidated + */ + public InvalidateTokenRequest(String tokenString, String tokenType) { + this.tokenString = tokenString; + this.tokenType = Type.fromString(tokenType); + this.realmName = null; + this.userName = null; } @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; - if (Strings.isNullOrEmpty(tokenString)) { - validationException = addValidationError("token string must be provided", null); - } - if (tokenType == null) { - validationException = addValidationError("token type must be provided", validationException); + if (Strings.hasText(realmName) || Strings.hasText(userName)) { + if (Strings.hasText(tokenString)) { + validationException = + addValidationError("token string must not be provided when realm name or username is specified", null); + } + if (tokenType != null) { + validationException = + addValidationError("token type must not be provided when realm name or username is specified", validationException); + } + } else if (Strings.isNullOrEmpty(tokenString)) { + validationException = + addValidationError("token string must be provided when not specifying a realm name or a username", null); + } else if (tokenType == null) { + validationException = + addValidationError("token type must be provided when a token string is specified", null); } return validationException; } @@ -67,26 +118,76 @@ public final class InvalidateTokenRequest extends ActionRequest { this.tokenType = tokenType; } + public String getRealmName() { + return realmName; + } + + public void setRealmName(String realmName) { + this.realmName = realmName; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(tokenString); + if (out.getVersion().before(Version.V_7_0_0)) { + if (Strings.isNullOrEmpty(tokenString)) { + throw new IllegalArgumentException("token is required for versions < v6.6.0"); + } + out.writeString(tokenString); + } else { + out.writeOptionalString(tokenString); + } if (out.getVersion().onOrAfter(Version.V_6_2_0)) { - out.writeVInt(tokenType.ordinal()); + if (out.getVersion().before(Version.V_7_0_0)) { + if (tokenType == null) { + throw new IllegalArgumentException("token type is not optional for versions > v6.2.0 and < v6.6.0"); + } + out.writeVInt(tokenType.ordinal()); + } else { + out.writeOptionalVInt(tokenType == null ? null : tokenType.ordinal()); + } } else if (tokenType == Type.REFRESH_TOKEN) { - throw new IllegalArgumentException("refresh token invalidation cannot be serialized with version [" + out.getVersion() + - "]"); + throw new IllegalArgumentException("refresh token invalidation cannot be serialized with version [" + out.getVersion() + "]"); + } + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeOptionalString(realmName); + out.writeOptionalString(userName); + } else if (realmName != null || userName != null) { + throw new IllegalArgumentException( + "realm or user token invalidation cannot be serialized with version [" + out.getVersion() + "]"); } } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - tokenString = in.readString(); + if (in.getVersion().before(Version.V_7_0_0)) { + tokenString = in.readString(); + } else { + tokenString = in.readOptionalString(); + } if (in.getVersion().onOrAfter(Version.V_6_2_0)) { - tokenType = Type.values()[in.readVInt()]; + if (in.getVersion().before(Version.V_7_0_0)) { + int type = in.readVInt(); + tokenType = Type.values()[type]; + } else { + Integer type = in.readOptionalVInt(); + tokenType = type == null ? null : Type.values()[type]; + } } else { tokenType = Type.ACCESS_TOKEN; } + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + realmName = in.readOptionalString(); + userName = in.readOptionalString(); + } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java index f77f6c65332..0b454905cfa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java @@ -34,4 +34,20 @@ public final class InvalidateTokenRequestBuilder request.setTokenType(type); return this; } + + /** + * Sets the name of the realm for which all tokens should be invalidated + */ + public InvalidateTokenRequestBuilder setRealmName(String realmName) { + request.setRealmName(realmName); + return this; + } + + /** + * Sets the username for which all tokens should be invalidated + */ + public InvalidateTokenRequestBuilder setUserName(String username) { + request.setUserName(username); + return this; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java index cebb005b272..886caeac370 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java @@ -5,41 +5,83 @@ */ package org.elasticsearch.xpack.core.security.action.token; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Objects; /** - * Response for a invalidation of a token. + * Response for a invalidation of one or multiple tokens. */ -public final class InvalidateTokenResponse extends ActionResponse { +public final class InvalidateTokenResponse extends ActionResponse implements ToXContent { - private boolean created; + private TokensInvalidationResult result; public InvalidateTokenResponse() {} - public InvalidateTokenResponse(boolean created) { - this.created = created; + public InvalidateTokenResponse(TokensInvalidationResult result) { + this.result = result; } - /** - * If the token is already invalidated then created will be false - */ - public boolean isCreated() { - return created; + public TokensInvalidationResult getResult() { + return result; + } + + private boolean isCreated() { + return result.getInvalidatedTokens().size() > 0 + && result.getPreviouslyInvalidatedTokens().isEmpty() + && result.getErrors().isEmpty(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeBoolean(created); + if (out.getVersion().before(Version.V_7_0_0)) { + out.writeBoolean(isCreated()); + } else { + result.writeTo(out); + } } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - created = in.readBoolean(); + if (in.getVersion().before(Version.V_7_0_0)) { + final boolean created = in.readBoolean(); + if (created) { + result = new TokensInvalidationResult(Arrays.asList(""), Collections.emptyList(), Collections.emptyList(), 0); + } else { + result = new TokensInvalidationResult(Collections.emptyList(), Arrays.asList(""), Collections.emptyList(), 0); + } + } else { + result = new TokensInvalidationResult(in); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + result.toXContent(builder, params); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InvalidateTokenResponse that = (InvalidateTokenResponse) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java new file mode 100644 index 00000000000..cfa83b63ed5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java @@ -0,0 +1,113 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authc.support; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * The result of attempting to invalidate one or multiple tokens. The result contains information about: + *
    + *
  • how many of the tokens were actually invalidated
  • + *
  • how many tokens are not invalidated in this request because they were already invalidated
  • + *
  • how many errors were encountered while invalidating tokens and the error details
  • + *
+ */ +public class TokensInvalidationResult implements ToXContentObject, Writeable { + + private final List invalidatedTokens; + private final List previouslyInvalidatedTokens; + private final List errors; + private final int attemptCount; + + public TokensInvalidationResult(List invalidatedTokens, List previouslyInvalidatedTokens, + @Nullable List errors, int attemptCount) { + Objects.requireNonNull(invalidatedTokens, "invalidated_tokens must be provided"); + this.invalidatedTokens = invalidatedTokens; + Objects.requireNonNull(previouslyInvalidatedTokens, "previously_invalidated_tokens must be provided"); + this.previouslyInvalidatedTokens = previouslyInvalidatedTokens; + if (null != errors) { + this.errors = errors; + } else { + this.errors = Collections.emptyList(); + } + this.attemptCount = attemptCount; + } + + public TokensInvalidationResult(StreamInput in) throws IOException { + this.invalidatedTokens = in.readList(StreamInput::readString); + this.previouslyInvalidatedTokens = in.readList(StreamInput::readString); + this.errors = in.readList(StreamInput::readException); + this.attemptCount = in.readVInt(); + } + + public static TokensInvalidationResult emptyResult() { + return new TokensInvalidationResult(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), 0); + } + + + public List getInvalidatedTokens() { + return invalidatedTokens; + } + + public List getPreviouslyInvalidatedTokens() { + return previouslyInvalidatedTokens; + } + + public List getErrors() { + return errors; + } + + public int getAttemptCount() { + return attemptCount; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject() + //Remove created after PR is backported to 6.x + .field("created", isCreated()) + .field("invalidated_tokens", invalidatedTokens.size()) + .field("previously_invalidated_tokens", previouslyInvalidatedTokens.size()) + .field("error_count", errors.size()); + if (errors.isEmpty() == false) { + builder.field("error_details"); + builder.startArray(); + for (ElasticsearchException e : errors) { + builder.startObject(); + ElasticsearchException.generateThrowableXContent(builder, params, e); + builder.endObject(); + } + builder.endArray(); + } + return builder.endObject(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringList(invalidatedTokens); + out.writeStringList(previouslyInvalidatedTokens); + out.writeCollection(errors, StreamOutput::writeException); + out.writeVInt(attemptCount); + } + + private boolean isCreated() { + return this.getInvalidatedTokens().size() > 0 + && this.getPreviouslyInvalidatedTokens().isEmpty() + && this.getErrors().isEmpty(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java index ef59f870c68..a7faf4d2231 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java @@ -326,6 +326,10 @@ public class SecurityClient { return new InvalidateTokenRequestBuilder(client).setTokenString(token); } + public InvalidateTokenRequestBuilder prepareInvalidateToken() { + return new InvalidateTokenRequestBuilder(client); + } + public void invalidateToken(InvalidateTokenRequest request, ActionListener listener) { client.execute(InvalidateTokenAction.INSTANCE, request, listener); } diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 1e6d3ec892a..c34fed37516 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -1060,6 +1060,20 @@ } } } + }, + "auto_followed_clusters": { + "type": "nested", + "properties": { + "cluster_name": { + "type": "keyword" + }, + "time_since_last_check_millis": { + "type": "long" + }, + "last_seen_metadata_version": { + "type": "long" + } + } } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java index bd23198e8ea..2d8782f0111 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.token; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasItem; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestTests.java new file mode 100644 index 00000000000..3fd7eb7da46 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestTests.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.token; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; + +public class InvalidateTokenRequestTests extends ESTestCase { + + public void testValidation() { + InvalidateTokenRequest request = new InvalidateTokenRequest(); + ActionRequestValidationException ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), containsString("token string must be provided when not specifying a realm")); + + request = new InvalidateTokenRequest(randomAlphaOfLength(12), randomFrom("", null)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), containsString("token type must be provided when a token string is specified")); + + request = new InvalidateTokenRequest(randomFrom("", null), "access_token"); + ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), containsString("token string must be provided when not specifying a realm")); + + request = new InvalidateTokenRequest(randomFrom("", null), randomFrom("", null), randomAlphaOfLength(4), randomAlphaOfLength(8)); + ve = request.validate(); + assertNull(ve); + + request = + new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("", null), randomAlphaOfLength(4), randomAlphaOfLength(8)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), + containsString("token string must not be provided when realm name or username is specified")); + + request = new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("token", "refresh_token"), + randomAlphaOfLength(4), randomAlphaOfLength(8)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(2, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), + containsString("token string must not be provided when realm name or username is specified")); + assertThat(ve.validationErrors().get(1), + containsString("token type must not be provided when realm name or username is specified")); + + request = + new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("", null), randomAlphaOfLength(4), randomAlphaOfLength(8)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), + containsString("token string must not be provided when realm name or username is specified")); + + request = + new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("token", "refresh_token"), randomFrom("", null), + randomAlphaOfLength(8)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(2, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), + containsString("token string must not be provided when realm name or username is specified")); + assertThat(ve.validationErrors().get(1), + containsString("token type must not be provided when realm name or username is specified")); + + request = new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("", null), randomFrom("", null), randomAlphaOfLength(8)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), + containsString("token string must not be provided when realm name or username is specified")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java new file mode 100644 index 00000000000..1a59971ff9c --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.token; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class InvalidateTokenResponseTests extends ESTestCase { + + public void testSerialization() throws IOException { + TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false)), + Arrays.asList(generateRandomStringArray(20, 15, false)), + Arrays.asList(new ElasticsearchException("foo", new IllegalArgumentException("this is an error message")), + new ElasticsearchException("bar", new IllegalArgumentException("this is an error message2"))), + randomIntBetween(0, 5)); + InvalidateTokenResponse response = new InvalidateTokenResponse(result); + try (BytesStreamOutput output = new BytesStreamOutput()) { + response.writeTo(output); + try (StreamInput input = output.bytes().streamInput()) { + InvalidateTokenResponse serialized = new InvalidateTokenResponse(); + serialized.readFrom(input); + assertThat(serialized.getResult().getInvalidatedTokens(), equalTo(response.getResult().getInvalidatedTokens())); + assertThat(serialized.getResult().getPreviouslyInvalidatedTokens(), + equalTo(response.getResult().getPreviouslyInvalidatedTokens())); + assertThat(serialized.getResult().getErrors().size(), equalTo(response.getResult().getErrors().size())); + assertThat(serialized.getResult().getErrors().get(0).toString(), containsString("this is an error message")); + assertThat(serialized.getResult().getErrors().get(1).toString(), containsString("this is an error message2")); + } + } + + result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false)), + Arrays.asList(generateRandomStringArray(20, 15, false)), + Collections.emptyList(), randomIntBetween(0, 5)); + response = new InvalidateTokenResponse(result); + try (BytesStreamOutput output = new BytesStreamOutput()) { + response.writeTo(output); + try (StreamInput input = output.bytes().streamInput()) { + InvalidateTokenResponse serialized = new InvalidateTokenResponse(); + serialized.readFrom(input); + assertThat(serialized.getResult().getInvalidatedTokens(), equalTo(response.getResult().getInvalidatedTokens())); + assertThat(serialized.getResult().getPreviouslyInvalidatedTokens(), + equalTo(response.getResult().getPreviouslyInvalidatedTokens())); + assertThat(serialized.getResult().getErrors().size(), equalTo(response.getResult().getErrors().size())); + } + } + } + + public void testSerializationToPre66Version() throws IOException{ + final Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_2_0, Version.V_6_5_1); + TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)), + Arrays.asList(generateRandomStringArray(20, 15, false, false)), + Arrays.asList(new ElasticsearchException("foo", new IllegalArgumentException("this is an error message")), + new ElasticsearchException("bar", new IllegalArgumentException("this is an error message2"))), + randomIntBetween(0, 5)); + InvalidateTokenResponse response = new InvalidateTokenResponse(result); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(version); + response.writeTo(output); + try (StreamInput input = output.bytes().streamInput()) { + // False as we have errors and previously invalidated tokens + assertThat(input.readBoolean(), equalTo(false)); + } + } + + result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)), + Arrays.asList(generateRandomStringArray(20, 15, false, false)), + Collections.emptyList(), randomIntBetween(0, 5)); + response = new InvalidateTokenResponse(result); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(version); + response.writeTo(output); + try (StreamInput input = output.bytes().streamInput()) { + // False as we have previously invalidated tokens + assertThat(input.readBoolean(), equalTo(false)); + } + } + + result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)), + Collections.emptyList(), Collections.emptyList(), randomIntBetween(0, 5)); + response = new InvalidateTokenResponse(result); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(version); + response.writeTo(output); + try (StreamInput input = output.bytes().streamInput()) { + assertThat(input.readBoolean(), equalTo(true)); + } + } + } + + public void testToXContent() throws IOException { + List invalidatedTokens = Arrays.asList(generateRandomStringArray(20, 15, false)); + List previouslyInvalidatedTokens = Arrays.asList(generateRandomStringArray(20, 15, false)); + TokensInvalidationResult result = new TokensInvalidationResult(invalidatedTokens, previouslyInvalidatedTokens, + Arrays.asList(new ElasticsearchException("foo", new IllegalArgumentException("this is an error message")), + new ElasticsearchException("bar", new IllegalArgumentException("this is an error message2"))), + randomIntBetween(0, 5)); + InvalidateTokenResponse response = new InvalidateTokenResponse(result); + XContentBuilder builder = XContentFactory.jsonBuilder(); + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertThat(Strings.toString(builder), + equalTo("{\"created\":false," + + "\"invalidated_tokens\":" + invalidatedTokens.size() + "," + + "\"previously_invalidated_tokens\":" + previouslyInvalidatedTokens.size() + "," + + "\"error_count\":2," + + "\"error_details\":[" + + "{\"type\":\"exception\"," + + "\"reason\":\"foo\"," + + "\"caused_by\":{" + + "\"type\":\"illegal_argument_exception\"," + + "\"reason\":\"this is an error message\"}" + + "}," + + "{\"type\":\"exception\"," + + "\"reason\":\"bar\"," + + "\"caused_by\":" + + "{\"type\":\"illegal_argument_exception\"," + + "\"reason\":\"this is an error message2\"}" + + "}" + + "]" + + "}")); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java index f0e6bf2c990..8c35df01ed9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionAction; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionRequest; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionResponse; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.UserToken; @@ -27,12 +28,11 @@ import org.elasticsearch.xpack.security.authc.saml.SamlRedirect; import org.elasticsearch.xpack.security.authc.saml.SamlUtils; import org.opensaml.saml.saml2.core.LogoutResponse; -import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.stream.Collectors; +import java.util.function.Predicate; import static org.elasticsearch.xpack.security.authc.saml.SamlRealm.findSamlRealms; @@ -85,7 +85,7 @@ public final class TransportSamlInvalidateSessionAction private void findAndInvalidateTokens(SamlRealm realm, SamlLogoutRequestHandler.Result result, ActionListener listener) { final Map tokenMetadata = realm.createTokenMetadata(result.getNameId(), result.getSession()); - if (Strings.hasText((String) tokenMetadata.get(SamlRealm.TOKEN_METADATA_NAMEID_VALUE)) == false) { + if (Strings.isNullOrEmpty((String) tokenMetadata.get(SamlRealm.TOKEN_METADATA_NAMEID_VALUE))) { // If we don't have a valid name-id to match against, don't do anything logger.debug("Logout request [{}] has no NameID value, so cannot invalidate any sessions", result); listener.onResponse(0); @@ -93,22 +93,21 @@ public final class TransportSamlInvalidateSessionAction } tokenService.findActiveTokensForRealm(realm.name(), ActionListener.wrap(tokens -> { - List> sessionTokens = filterTokens(tokens, tokenMetadata); - logger.debug("Found [{}] token pairs to invalidate for SAML metadata [{}]", sessionTokens.size(), tokenMetadata); - if (sessionTokens.isEmpty()) { - listener.onResponse(0); - } else { - GroupedActionListener groupedListener = new GroupedActionListener<>( - ActionListener.wrap(collection -> listener.onResponse(collection.size()), listener::onFailure), - sessionTokens.size(), Collections.emptyList() - ); - sessionTokens.forEach(tuple -> invalidateTokenPair(tuple, groupedListener)); - } - }, e -> listener.onFailure(e) - )); + logger.debug("Found [{}] token pairs to invalidate for SAML metadata [{}]", tokens.size(), tokenMetadata); + if (tokens.isEmpty()) { + listener.onResponse(0); + } else { + GroupedActionListener groupedListener = new GroupedActionListener<>( + ActionListener.wrap(collection -> listener.onResponse(collection.size()), listener::onFailure), + tokens.size(), Collections.emptyList() + ); + tokens.forEach(tuple -> invalidateTokenPair(tuple, groupedListener)); + } + }, listener::onFailure + ), containsMetadata(tokenMetadata)); } - private void invalidateTokenPair(Tuple tokenPair, ActionListener listener) { + private void invalidateTokenPair(Tuple tokenPair, ActionListener listener) { // Invalidate the refresh token first, so the client doesn't trigger a refresh once the access token is invalidated tokenService.invalidateRefreshToken(tokenPair.v2(), ActionListener.wrap(ignore -> tokenService.invalidateAccessToken( tokenPair.v1(), @@ -118,13 +117,12 @@ public final class TransportSamlInvalidateSessionAction })), listener::onFailure)); } - private List> filterTokens(Collection> tokens, Map requiredMetadata) { - return tokens.stream() - .filter(tup -> { - Map actualMetadata = tup.v1().getMetadata(); - return requiredMetadata.entrySet().stream().allMatch(e -> Objects.equals(actualMetadata.get(e.getKey()), e.getValue())); - }) - .collect(Collectors.toList()); + + private Predicate> containsMetadata(Map requiredMetadata) { + return source -> { + Map actualMetadata = (Map) source.get("metadata"); + return requiredMetadata.entrySet().stream().allMatch(e -> Objects.equals(actualMetadata.get(e.getKey()), e.getValue())); + }; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java index b62702ead78..28e9f911cd5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutRequest; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Realm; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; @@ -79,7 +80,7 @@ public final class TransportSamlLogoutAction }, listener::onFailure)); } - private void invalidateRefreshToken(String refreshToken, ActionListener listener) { + private void invalidateRefreshToken(String refreshToken, ActionListener listener) { if (refreshToken == null) { listener.onResponse(null); } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java index 70f614435fc..9f0443a86f7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java @@ -8,12 +8,14 @@ package org.elasticsearch.xpack.security.action.token; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenResponse; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.security.authc.TokenService; /** @@ -31,9 +33,12 @@ public final class TransportInvalidateTokenAction extends HandledTransportAction @Override protected void doExecute(Task task, InvalidateTokenRequest request, ActionListener listener) { - final ActionListener invalidateListener = - ActionListener.wrap(created -> listener.onResponse(new InvalidateTokenResponse(created)), listener::onFailure); - if (request.getTokenType() == InvalidateTokenRequest.Type.ACCESS_TOKEN) { + final ActionListener invalidateListener = + ActionListener.wrap(tokensInvalidationResult -> + listener.onResponse(new InvalidateTokenResponse(tokensInvalidationResult)), listener::onFailure); + if (Strings.hasText(request.getUserName()) || Strings.hasText(request.getRealmName())) { + tokenService.invalidateActiveTokensForRealmAndUser(request.getRealmName(), request.getUserName(), invalidateListener); + } else if (request.getTokenType() == InvalidateTokenRequest.Type.ACCESS_TOKEN) { tokenService.invalidateAccessToken(request.getTokenString(), invalidateListener); } else { assert request.getTokenType() == InvalidateTokenRequest.Type.REFRESH_TOKEN; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index be5b11aa666..15d3e758426 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -17,6 +17,11 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest.OpType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetItemResponse; @@ -24,7 +29,6 @@ import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -39,6 +43,7 @@ import org.elasticsearch.cluster.ack.AckedRequest; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; @@ -61,7 +66,6 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -74,6 +78,7 @@ import org.elasticsearch.xpack.core.security.ScrollHelper; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.KeyAndTimestamp; import org.elasticsearch.xpack.core.security.authc.TokenMetaData; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import javax.crypto.Cipher; @@ -90,6 +95,7 @@ import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; +import java.io.UncheckedIOException; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.security.GeneralSecurityException; @@ -116,6 +122,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; +import java.util.function.Predicate; +import java.util.stream.Collectors; import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; @@ -221,9 +229,9 @@ public final class TokenService { boolean includeRefreshToken) throws IOException { ensureEnabled(); if (authentication == null) { - listener.onFailure(traceLog("create token", null, new IllegalArgumentException("authentication must be provided"))); + listener.onFailure(traceLog("create token", new IllegalArgumentException("authentication must be provided"))); } else if (originatingClientAuth == null) { - listener.onFailure(traceLog("create token", null, + listener.onFailure(traceLog("create token", new IllegalArgumentException("originating client authentication must be provided"))); } else { final Instant created = clock.instant(); @@ -471,7 +479,7 @@ public final class TokenService { * have been created on versions on or after 6.2; this step involves performing an update to * the token document and setting the invalidated field to true */ - public void invalidateAccessToken(String tokenString, ActionListener listener) { + public void invalidateAccessToken(String tokenString, ActionListener listener) { ensureEnabled(); if (Strings.isNullOrEmpty(tokenString)) { logger.trace("No token-string provided"); @@ -484,7 +492,8 @@ public final class TokenService { listener.onFailure(traceLog("invalidate token", tokenString, malformedTokenException())); } else { final long expirationEpochMilli = getExpirationTime().toEpochMilli(); - indexBwcInvalidation(userToken, listener, new AtomicInteger(0), expirationEpochMilli); + indexBwcInvalidation(Collections.singleton(userToken.getId()), listener, new AtomicInteger(0), + expirationEpochMilli, null); } }, listener::onFailure)); } catch (IOException e) { @@ -499,7 +508,7 @@ public final class TokenService { * * @see #invalidateAccessToken(String, ActionListener) */ - public void invalidateAccessToken(UserToken userToken, ActionListener listener) { + public void invalidateAccessToken(UserToken userToken, ActionListener listener) { ensureEnabled(); if (userToken == null) { logger.trace("No access token provided"); @@ -507,11 +516,17 @@ public final class TokenService { } else { maybeStartTokenRemover(); final long expirationEpochMilli = getExpirationTime().toEpochMilli(); - indexBwcInvalidation(userToken, listener, new AtomicInteger(0), expirationEpochMilli); + indexBwcInvalidation(Collections.singleton(userToken.getId()), listener, new AtomicInteger(0), expirationEpochMilli, null); } } - public void invalidateRefreshToken(String refreshToken, ActionListener listener) { + /** + * This method performs the steps necessary to invalidate a refresh token so that it may no longer be used. + * + * @param refreshToken The string representation of the refresh token + * @param listener the listener to notify upon completion + */ + public void invalidateRefreshToken(String refreshToken, ActionListener listener) { ensureEnabled(); if (Strings.isNullOrEmpty(refreshToken)) { logger.trace("No refresh token provided"); @@ -520,152 +535,222 @@ public final class TokenService { maybeStartTokenRemover(); findTokenFromRefreshToken(refreshToken, ActionListener.wrap(tuple -> { - final String docId = tuple.v1().getHits().getAt(0).getId(); - final long docVersion = tuple.v1().getHits().getAt(0).getVersion(); - indexInvalidation(docId, Version.CURRENT, listener, tuple.v2(), "refresh_token", docVersion); + final String docId = getTokenIdFromDocumentId(tuple.v1().getHits().getAt(0).getId()); + indexInvalidation(Collections.singletonList(docId), listener, tuple.v2(), "refresh_token", null); }, listener::onFailure), new AtomicInteger(0)); } } /** - * Performs the actual bwc invalidation of a token and then kicks off the new invalidation method + * Invalidate all access tokens and all refresh tokens of a given {@code realmName} and/or of a given + * {@code username} so that they may no longer be used * - * @param userToken the token to invalidate - * @param listener the listener to notify upon completion - * @param attemptCount the number of attempts to invalidate that have already been tried - * @param expirationEpochMilli the expiration time as milliseconds since the epoch + * @param realmName the realm of which the tokens should be invalidated + * @param username the username for which the tokens should be invalidated + * @param listener the listener to notify upon completion */ - private void indexBwcInvalidation(UserToken userToken, ActionListener listener, AtomicInteger attemptCount, - long expirationEpochMilli) { - if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { - logger.warn("Failed to invalidate token [{}] after [{}] attempts", userToken.getId(), attemptCount.get()); - listener.onFailure(invalidGrantException("failed to invalidate token")); + public void invalidateActiveTokensForRealmAndUser(@Nullable String realmName, @Nullable String username, + ActionListener listener) { + ensureEnabled(); + if (Strings.isNullOrEmpty(realmName) && Strings.isNullOrEmpty(username)) { + logger.trace("No realm name or username provided"); + listener.onFailure(new IllegalArgumentException("realm name or username must be provided")); } else { - final String invalidatedTokenId = getInvalidatedTokenDocumentId(userToken); - IndexRequest indexRequest = client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, invalidatedTokenId) - .setOpType(OpType.CREATE) - .setSource("doc_type", INVALIDATED_TOKEN_DOC_TYPE, "expiration_time", expirationEpochMilli) - .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) - .request(); - final String tokenDocId = getTokenDocumentId(userToken); - final Version version = userToken.getVersion(); - securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", tokenDocId, ex)), - () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, indexRequest, - ActionListener.wrap(indexResponse -> { - ActionListener wrappedListener = - ActionListener.wrap(ignore -> listener.onResponse(true), listener::onFailure); - indexInvalidation(tokenDocId, version, wrappedListener, attemptCount, "access_token", 1L); - }, e -> { - Throwable cause = ExceptionsHelper.unwrapCause(e); - traceLog("(bwc) invalidate token", tokenDocId, cause); - if (cause instanceof VersionConflictEngineException) { - // expected since something else could have invalidated - ActionListener wrappedListener = - ActionListener.wrap(ignore -> listener.onResponse(false), listener::onFailure); - indexInvalidation(tokenDocId, version, wrappedListener, attemptCount, "access_token", 1L); - } else if (isShardNotAvailableException(e)) { - attemptCount.incrementAndGet(); - indexBwcInvalidation(userToken, listener, attemptCount, expirationEpochMilli); - } else { - listener.onFailure(e); - } - }), client::index)); + if (Strings.isNullOrEmpty(realmName)) { + findActiveTokensForUser(username, ActionListener.wrap(tokenTuples -> { + if (tokenTuples.isEmpty()) { + logger.warn("No tokens to invalidate for realm [{}] and username [{}]", realmName, username); + listener.onResponse(TokensInvalidationResult.emptyResult()); + } else { + invalidateAllTokens(tokenTuples.stream().map(t -> t.v1().getId()).collect(Collectors.toList()), listener); + } + }, listener::onFailure)); + } else { + Predicate filter = null; + if (Strings.hasText(username)) { + filter = isOfUser(username); + } + findActiveTokensForRealm(realmName, ActionListener.wrap(tokenTuples -> { + if (tokenTuples.isEmpty()) { + logger.warn("No tokens to invalidate for realm [{}] and username [{}]", realmName, username); + listener.onResponse(TokensInvalidationResult.emptyResult()); + } else { + invalidateAllTokens(tokenTuples.stream().map(t -> t.v1().getId()).collect(Collectors.toList()), listener); + } + }, listener::onFailure), filter); + } } } /** - * Performs the actual invalidation of a token + * Invalidates a collection of access_token and refresh_token that were retrieved by + * {@link TokenService#invalidateActiveTokensForRealmAndUser} * - * @param tokenDocId the id of the token doc to invalidate + * @param accessTokenIds The ids of the access tokens which should be invalidated (along with the respective refresh_token) + * @param listener the listener to notify upon completion + */ + private void invalidateAllTokens(Collection accessTokenIds, ActionListener listener) { + maybeStartTokenRemover(); + final long expirationEpochMilli = getExpirationTime().toEpochMilli(); + // Invalidate the refresh tokens first so that they cannot be used to get new + // access tokens while we invalidate the access tokens we currently know about + indexInvalidation(accessTokenIds, ActionListener.wrap(result -> + indexBwcInvalidation(accessTokenIds, listener, new AtomicInteger(result.getAttemptCount()), + expirationEpochMilli, result), + listener::onFailure), new AtomicInteger(0), "refresh_token", null); + } + + /** + * Performs the actual bwc invalidation of a collection of tokens and then kicks off the new invalidation method. + * + * @param tokenIds the collection of token ids or token document ids that should be invalidated + * @param listener the listener to notify upon completion + * @param attemptCount the number of attempts to invalidate that have already been tried + * @param expirationEpochMilli the expiration time as milliseconds since the epoch + * @param previousResult if this not the initial attempt for invalidation, it contains the result of invalidating + * tokens up to the point of the retry. This result is added to the result of the current attempt + */ + private void indexBwcInvalidation(Collection tokenIds, ActionListener listener, + AtomicInteger attemptCount, long expirationEpochMilli, + @Nullable TokensInvalidationResult previousResult) { + + if (tokenIds.isEmpty()) { + logger.warn("No tokens provided for invalidation"); + listener.onFailure(invalidGrantException("No tokens provided for invalidation")); + } else if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { + logger.warn("Failed to invalidate [{}] tokens after [{}] attempts", tokenIds.size(), + attemptCount.get()); + listener.onFailure(invalidGrantException("failed to invalidate tokens")); + } else { + BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + for (String tokenId : tokenIds) { + final String invalidatedTokenId = getInvalidatedTokenDocumentId(tokenId); + IndexRequest indexRequest = client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, invalidatedTokenId) + .setOpType(OpType.CREATE) + .setSource("doc_type", INVALIDATED_TOKEN_DOC_TYPE, "expiration_time", expirationEpochMilli) + .request(); + bulkRequestBuilder.add(indexRequest); + } + bulkRequestBuilder.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); + final BulkRequest bulkRequest = bulkRequestBuilder.request(); + securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", ex)), + () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, bulkRequest, + ActionListener.wrap(bulkResponse -> { + List retryTokenIds = new ArrayList<>(); + for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) { + if (bulkItemResponse.isFailed()) { + Throwable cause = bulkItemResponse.getFailure().getCause(); + logger.error(cause.getMessage()); + traceLog("(bwc) invalidate tokens", cause); + if (isShardNotAvailableException(cause)) { + retryTokenIds.add(getTokenIdFromInvalidatedTokenDocumentId(bulkItemResponse.getFailure().getId())); + } else if ((cause instanceof VersionConflictEngineException) == false){ + // We don't handle VersionConflictEngineException, the ticket has been invalidated + listener.onFailure(bulkItemResponse.getFailure().getCause()); + } + } + } + if (retryTokenIds.isEmpty() == false) { + attemptCount.incrementAndGet(); + indexBwcInvalidation(retryTokenIds, listener, attemptCount, expirationEpochMilli, previousResult); + } + indexInvalidation(tokenIds, listener, attemptCount, "access_token", previousResult); + }, e -> { + Throwable cause = ExceptionsHelper.unwrapCause(e); + traceLog("(bwc) invalidate tokens", cause); + if (isShardNotAvailableException(cause)) { + attemptCount.incrementAndGet(); + indexBwcInvalidation(tokenIds, listener, attemptCount, expirationEpochMilli, previousResult); + } else { + listener.onFailure(e); + } + }), + client::bulk)); + } + } + + /** + * Performs the actual invalidation of a collection of tokens + * + * @param tokenIds the tokens to invalidate * @param listener the listener to notify upon completion * @param attemptCount the number of attempts to invalidate that have already been tried - * @param srcPrefix the prefix to use when constructing the doc to update - * @param documentVersion the expected version of the document we will update + * @param srcPrefix the prefix to use when constructing the doc to update, either refresh_token or access_token depending on + * what type of tokens should be invalidated + * @param previousResult if this not the initial attempt for invalidation, it contains the result of invalidating + * tokens up to the point of the retry. This result is added to the result of the current attempt */ - private void indexInvalidation(String tokenDocId, Version version, ActionListener listener, AtomicInteger attemptCount, - String srcPrefix, long documentVersion) { - if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { - logger.warn("Failed to invalidate token [{}] after [{}] attempts", tokenDocId, attemptCount.get()); - listener.onFailure(invalidGrantException("failed to invalidate token")); + private void indexInvalidation(Collection tokenIds, ActionListener listener, + AtomicInteger attemptCount, String srcPrefix, @Nullable TokensInvalidationResult previousResult) { + if (tokenIds.isEmpty()) { + logger.warn("No [{}] tokens provided for invalidation", srcPrefix); + listener.onFailure(invalidGrantException("No tokens provided for invalidation")); + } else if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { + logger.warn("Failed to invalidate [{}] tokens after [{}] attempts", tokenIds.size(), + attemptCount.get()); + listener.onFailure(invalidGrantException("failed to invalidate tokens")); } else { - UpdateRequest request = client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId) + BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + for (String tokenId : tokenIds) { + UpdateRequest request = client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(tokenId)) .setDoc(srcPrefix, Collections.singletonMap("invalidated", true)) - .setVersion(documentVersion) - .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) + .setFetchSource(srcPrefix, null) .request(); - securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", tokenDocId, ex)), - () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, - ActionListener.wrap(updateResponse -> { - logger.debug("Invalidated [{}] for doc [{}]", srcPrefix, tokenDocId); - if (updateResponse.getGetResult() != null - && updateResponse.getGetResult().sourceAsMap().containsKey(srcPrefix) - && ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix)) - .containsKey("invalidated")) { - final boolean prevInvalidated = (boolean) - ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix)) - .get("invalidated"); - listener.onResponse(prevInvalidated == false); - } else { - listener.onResponse(true); + bulkRequestBuilder.add(request); + } + bulkRequestBuilder.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); + securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", ex)), + () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, bulkRequestBuilder.request(), + ActionListener.wrap(bulkResponse -> { + ArrayList retryTokenDocIds = new ArrayList<>(); + ArrayList failedRequestResponses = new ArrayList<>(); + ArrayList previouslyInvalidated = new ArrayList<>(); + ArrayList invalidated = new ArrayList<>(); + if (null != previousResult) { + failedRequestResponses.addAll((previousResult.getErrors())); + previouslyInvalidated.addAll(previousResult.getPreviouslyInvalidatedTokens()); + invalidated.addAll(previousResult.getInvalidatedTokens()); } + for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) { + if (bulkItemResponse.isFailed()) { + Throwable cause = bulkItemResponse.getFailure().getCause(); + final String failedTokenDocId = getTokenIdFromDocumentId(bulkItemResponse.getFailure().getId()); + if (isShardNotAvailableException(cause)) { + retryTokenDocIds.add(failedTokenDocId); + } + else { + traceLog("invalidate access token", failedTokenDocId, cause); + failedRequestResponses.add(new ElasticsearchException("Error invalidating " + srcPrefix + ": ", cause)); + } + } else { + UpdateResponse updateResponse = bulkItemResponse.getResponse(); + if (updateResponse.getResult() == DocWriteResponse.Result.UPDATED) { + logger.debug("Invalidated [{}] for doc [{}]", srcPrefix, updateResponse.getGetResult().getId()); + invalidated.add(updateResponse.getGetResult().getId()); + } else if (updateResponse.getResult() == DocWriteResponse.Result.NOOP) { + previouslyInvalidated.add(updateResponse.getGetResult().getId()); + } + } + } + if (retryTokenDocIds.isEmpty() == false) { + TokensInvalidationResult incompleteResult = new TokensInvalidationResult(invalidated, previouslyInvalidated, + failedRequestResponses, attemptCount.get()); + attemptCount.incrementAndGet(); + indexInvalidation(retryTokenDocIds, listener, attemptCount, srcPrefix, incompleteResult); + } + TokensInvalidationResult result = new TokensInvalidationResult(invalidated, previouslyInvalidated, + failedRequestResponses, attemptCount.get()); + listener.onResponse(result); }, e -> { Throwable cause = ExceptionsHelper.unwrapCause(e); - traceLog("invalidate token", tokenDocId, cause); - if (cause instanceof DocumentMissingException) { - if (version.onOrAfter(Version.V_6_2_0)) { - // the document should always be there! - listener.onFailure(e); - } else { - listener.onResponse(false); - } - } else if (cause instanceof VersionConflictEngineException - || isShardNotAvailableException(cause)) { + traceLog("invalidate tokens", cause); + if (isShardNotAvailableException(cause)) { attemptCount.incrementAndGet(); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(), - ActionListener.wrap(getResult -> { - if (getResult.isExists()) { - Map source = getResult.getSource(); - Map accessTokenSource = (Map) source.get("access_token"); - Consumer onFailure = ex -> listener.onFailure(traceLog("get token", tokenDocId, ex)); - if (accessTokenSource == null) { - onFailure.accept(new IllegalArgumentException( - "token document is missing access_token field")); - } else { - Boolean invalidated = (Boolean) accessTokenSource.get("invalidated"); - if (invalidated == null) { - onFailure.accept(new IllegalStateException( - "token document missing invalidated value")); - } else if (invalidated) { - logger.trace("Token [{}] is already invalidated", tokenDocId); - listener.onResponse(false); - } else { - indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix, - getResult.getVersion()); - } - } - } else if (version.onOrAfter(Version.V_6_2_0)) { - logger.warn("could not find token document [{}] but there should " + - "be one as token has version [{}]", tokenDocId, version); - listener.onFailure(invalidGrantException("could not invalidate the token")); - } else { - listener.onResponse(false); - } - }, - e1 -> { - traceLog("get token", tokenDocId, e1); - if (isShardNotAvailableException(e1)) { - // don't increment count; call again - indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix, - documentVersion); - } else { - listener.onFailure(e1); - } - }), client::get); + indexInvalidation(tokenIds, listener, attemptCount, srcPrefix, previousResult); } else { listener.onFailure(e); } - }), client::update)); + }), client::bulk)); } } @@ -676,12 +761,12 @@ public final class TokenService { public void refreshToken(String refreshToken, ActionListener> listener) { ensureEnabled(); findTokenFromRefreshToken(refreshToken, - ActionListener.wrap(tuple -> { - final Authentication userAuth = Authentication.readFromContext(client.threadPool().getThreadContext()); - final String tokenDocId = tuple.v1().getHits().getHits()[0].getId(); - innerRefresh(tokenDocId, userAuth, listener, tuple.v2()); - }, listener::onFailure), - new AtomicInteger(0)); + ActionListener.wrap(tuple -> { + final Authentication userAuth = Authentication.readFromContext(client.threadPool().getThreadContext()); + final String tokenDocId = tuple.v1().getHits().getHits()[0].getId(); + innerRefresh(tokenDocId, userAuth, listener, tuple.v2()); + }, listener::onFailure), + new AtomicInteger(0)); } private void findTokenFromRefreshToken(String refreshToken, ActionListener> listener, @@ -691,11 +776,11 @@ public final class TokenService { listener.onFailure(invalidGrantException("could not refresh the requested token")); } else { SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) - .setQuery(QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery("doc_type", "token")) - .filter(QueryBuilders.termQuery("refresh_token.token", refreshToken))) - .setVersion(true) - .request(); + .setQuery(QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("doc_type", "token")) + .filter(QueryBuilders.termQuery("refresh_token.token", refreshToken))) + .setVersion(true) + .request(); final SecurityIndexManager frozenSecurityIndex = securityIndex.freeze(); if (frozenSecurityIndex.indexExists() == false) { @@ -860,12 +945,16 @@ public final class TokenService { } /** - * Find all stored refresh and access tokens that have not been invalidated or expired, and were issued against + * Find stored refresh and access tokens that have not been invalidated or expired, and were issued against * the specified realm. + * + * @param realmName The name of the realm for which to get the tokens + * @param listener The listener to notify upon completion + * @param filter an optional Predicate to test the source of the found documents against */ - public void findActiveTokensForRealm(String realmName, ActionListener>> listener) { + public void findActiveTokensForRealm(String realmName, ActionListener>> listener, + @Nullable Predicate> filter) { ensureEnabled(); - final SecurityIndexManager frozenSecurityIndex = securityIndex.freeze(); if (Strings.isNullOrEmpty(realmName)) { listener.onFailure(new IllegalArgumentException("Realm name is required")); @@ -883,7 +972,10 @@ public final class TokenService { .must(QueryBuilders.termQuery("access_token.invalidated", false)) .must(QueryBuilders.rangeQuery("access_token.user_token.expiration_time").gte(now.toEpochMilli())) ) - .should(QueryBuilders.termQuery("refresh_token.invalidated", false)) + .should(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("refresh_token.invalidated", false)) + .must(QueryBuilders.rangeQuery("creation_time").gte(now.toEpochMilli() - TimeValue.timeValueHours(24).millis())) + ) ); final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) @@ -893,33 +985,102 @@ public final class TokenService { .setSize(1000) .setFetchSource(true) .request(); - securityIndex.checkIndexVersionThenExecute(listener::onFailure, - () -> ScrollHelper.fetchAllByEntity(client, request, listener, this::parseHit)); + () -> ScrollHelper.fetchAllByEntity(client, request, listener, (SearchHit hit) -> filterAndParseHit(hit, filter))); } } - private Tuple parseHit(SearchHit hit) { + /** + * Find stored refresh and access tokens that have not been invalidated or expired, and were issued for + * the specified user. + * + * @param username The user for which to get the tokens + * @param listener The listener to notify upon completion + */ + public void findActiveTokensForUser(String username, ActionListener>> listener) { + ensureEnabled(); + + final SecurityIndexManager frozenSecurityIndex = securityIndex.freeze(); + if (Strings.isNullOrEmpty(username)) { + listener.onFailure(new IllegalArgumentException("username is required")); + } else if (frozenSecurityIndex.indexExists() == false) { + listener.onResponse(Collections.emptyList()); + } else if (frozenSecurityIndex.isAvailable() == false) { + listener.onFailure(frozenSecurityIndex.getUnavailableReason()); + } else { + final Instant now = clock.instant(); + final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("doc_type", "token")) + .filter(QueryBuilders.boolQuery() + .should(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("access_token.invalidated", false)) + .must(QueryBuilders.rangeQuery("access_token.user_token.expiration_time").gte(now.toEpochMilli())) + ) + .should(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("refresh_token.invalidated", false)) + .must(QueryBuilders.rangeQuery("creation_time").gte(now.toEpochMilli() - TimeValue.timeValueHours(24).millis())) + ) + ); + + final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) + .setQuery(boolQuery) + .setVersion(false) + .setSize(1000) + .setFetchSource(true) + .request(); + securityIndex.checkIndexVersionThenExecute(listener::onFailure, + () -> ScrollHelper.fetchAllByEntity(client, request, listener, + (SearchHit hit) -> filterAndParseHit(hit, isOfUser(username)))); + } + } + + private static Predicate> isOfUser(String username) { + return source -> { + String auth = (String) source.get("authentication"); + Integer version = (Integer) source.get("version"); + Version authVersion = Version.fromId(version); + try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(auth))) { + in.setVersion(authVersion); + Authentication authentication = new Authentication(in); + return authentication.getUser().principal().equals(username); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }; + } + + + private Tuple filterAndParseHit(SearchHit hit, @Nullable Predicate> filter) { final Map source = hit.getSourceAsMap(); if (source == null) { throw new IllegalStateException("token document did not have source but source should have been fetched"); } - try { - return parseTokensFromDocument(source); + return parseTokensFromDocument(source, filter); } catch (IOException e) { throw invalidGrantException("cannot read token from document"); } } /** - * @return A {@link Tuple} of access-token and refresh-token-id + * + * Parses a token document into a Tuple of a {@link UserToken} and a String representing the corresponding refresh_token + * + * @param source The token document source as retrieved + * @param filter an optional Predicate to test the source of the UserToken against + * @return A {@link Tuple} of access-token and refresh-token-id or null if a Predicate is defined and the userToken source doesn't + * satisfy it */ - private Tuple parseTokensFromDocument(Map source) throws IOException { - final String refreshToken = (String) ((Map) source.get("refresh_token")).get("token"); + private Tuple parseTokensFromDocument(Map source, @Nullable Predicate> filter) + throws IOException { + final String refreshToken = (String) ((Map) source.get("refresh_token")).get("token"); final Map userTokenSource = (Map) - ((Map) source.get("access_token")).get("user_token"); + ((Map) source.get("access_token")).get("user_token"); + if (null != filter && filter.test(userTokenSource) == false) { + return null; + } final String id = (String) userTokenSource.get("id"); final Integer version = (Integer) userTokenSource.get("version"); final String authString = (String) userTokenSource.get("authentication"); @@ -951,6 +1112,23 @@ public final class TokenService { return "token_" + id; } + private static String getTokenIdFromDocumentId(String docId) { + if (docId.startsWith("token_") == false) { + throw new IllegalStateException("TokenDocument ID [" + docId + "] has unexpected value"); + } else { + return docId.substring("token_".length()); + } + } + + private static String getTokenIdFromInvalidatedTokenDocumentId(String docId) { + final String invalidatedTokenDocPrefix = INVALIDATED_TOKEN_DOC_TYPE + "_"; + if (docId.startsWith(invalidatedTokenDocPrefix) == false) { + throw new IllegalStateException("InvalidatedTokenDocument ID [" + docId + "] has unexpected value"); + } else { + return docId.substring(invalidatedTokenDocPrefix.length()); + } + } + private void ensureEnabled() { if (enabled == false) { throw new IllegalStateException("tokens are not enabled"); @@ -1149,7 +1327,7 @@ public final class TokenService { } /** - * Creates an {@link ElasticsearchSecurityException} that indicates the token was expired. It + * Creates an {@link ElasticsearchSecurityException} that indicates the token was malformed. It * is up to the client to re-authenticate and obtain a new token. The format for this response * is defined in */ @@ -1171,7 +1349,7 @@ public final class TokenService { } /** - * Logs an exception at TRACE level (if enabled) + * Logs an exception concerning a specific Token at TRACE level (if enabled) */ private E traceLog(String action, String identifier, E exception) { if (logger.isTraceEnabled()) { @@ -1179,12 +1357,34 @@ public final class TokenService { final ElasticsearchException esEx = (ElasticsearchException) exception; final Object detail = esEx.getHeader("error_description"); if (detail != null) { - logger.trace("Failure in [{}] for id [{}] - [{}] [{}]", action, identifier, detail, esEx.getDetailedMessage()); + logger.trace(() -> new ParameterizedMessage("Failure in [{}] for id [{}] - [{}]", action, identifier, detail), + esEx); } else { - logger.trace("Failure in [{}] for id [{}] - [{}]", action, identifier, esEx.getDetailedMessage()); + logger.trace(() -> new ParameterizedMessage("Failure in [{}] for id [{}]", action, identifier), + esEx); } } else { - logger.trace("Failure in [{}] for id [{}] - [{}]", action, identifier, exception.toString()); + logger.trace(() -> new ParameterizedMessage("Failure in [{}] for id [{}]", action, identifier), exception); + } + } + return exception; + } + + /** + * Logs an exception at TRACE level (if enabled) + */ + private E traceLog(String action, E exception) { + if (logger.isTraceEnabled()) { + if (exception instanceof ElasticsearchException) { + final ElasticsearchException esEx = (ElasticsearchException) exception; + final Object detail = esEx.getHeader("error_description"); + if (detail != null) { + logger.trace(() -> new ParameterizedMessage("Failure in [{}] - [{}]", action, detail), esEx); + } else { + logger.trace(() -> new ParameterizedMessage("Failure in [{}]", action), esEx); + } + } else { + logger.trace(() -> new ParameterizedMessage("Failure in [{}]", action), exception); } } return exception; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java index 52228d2823a..9801f3c93c8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -37,11 +36,32 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public final class RestInvalidateTokenAction extends SecurityBaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestInvalidateTokenAction.class)); - static final ConstructingObjectParser, Void> PARSER = - new ConstructingObjectParser<>("invalidate_token", a -> new Tuple<>((String) a[0], (String) a[1])); + static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("invalidate_token", a -> { + final String token = (String) a[0]; + final String refreshToken = (String) a[1]; + final String tokenString; + final String tokenType; + if (Strings.hasLength(token) && Strings.hasLength(refreshToken)) { + throw new IllegalArgumentException("only one of [token, refresh_token] may be sent per request"); + } else if (Strings.hasLength(token)) { + tokenString = token; + tokenType = InvalidateTokenRequest.Type.ACCESS_TOKEN.getValue(); + } else if (Strings.hasLength(refreshToken)) { + tokenString = refreshToken; + tokenType = InvalidateTokenRequest.Type.REFRESH_TOKEN.getValue(); + } else { + tokenString = null; + tokenType = null; + } + return new InvalidateTokenRequest(tokenString, tokenType, (String) a[2], (String) a[3]); + }); + static { PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("token")); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("refresh_token")); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("realm_name")); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("username")); } public RestInvalidateTokenAction(Settings settings, RestController controller, XPackLicenseState xPackLicenseState) { @@ -60,36 +80,16 @@ public final class RestInvalidateTokenAction extends SecurityBaseRestHandler { @Override protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { try (XContentParser parser = request.contentParser()) { - final Tuple tuple = PARSER.parse(parser, null); - final String token = tuple.v1(); - final String refreshToken = tuple.v2(); - - final String tokenString; - final InvalidateTokenRequest.Type type; - if (Strings.hasLength(token) && Strings.hasLength(refreshToken)) { - throw new IllegalArgumentException("only one of [token, refresh_token] may be sent per request"); - } else if (Strings.hasLength(token)) { - tokenString = token; - type = InvalidateTokenRequest.Type.ACCESS_TOKEN; - } else if (Strings.hasLength(refreshToken)) { - tokenString = refreshToken; - type = InvalidateTokenRequest.Type.REFRESH_TOKEN; - } else { - tokenString = null; - type = null; - } - - final InvalidateTokenRequest tokenRequest = new InvalidateTokenRequest(tokenString, type); - return channel -> client.execute(InvalidateTokenAction.INSTANCE, tokenRequest, - new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(InvalidateTokenResponse invalidateResp, - XContentBuilder builder) throws Exception { - return new BytesRestResponse(RestStatus.OK, builder.startObject() - .field("created", invalidateResp.isCreated()) - .endObject()); - } - }); + final InvalidateTokenRequest invalidateTokenRequest = PARSER.parse(parser, null); + return channel -> client.execute(InvalidateTokenAction.INSTANCE, invalidateTokenRequest, + new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(InvalidateTokenResponse invalidateResp, + XContentBuilder builder) throws Exception { + invalidateResp.toXContent(builder, channel.request()); + return new BytesRestResponse(RestStatus.OK, builder); + } + }); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index ba1d1762f06..5a4c8f3bde8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -11,6 +11,10 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; @@ -21,11 +25,11 @@ import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; +import org.elasticsearch.action.search.SearchScrollAction; +import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; @@ -106,11 +110,12 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { private SamlRealm samlRealm; private TokenService tokenService; private List indexRequests; - private List updateRequests; + private List bulkRequests; private List searchRequests; private TransportSamlInvalidateSessionAction action; private SamlLogoutRequestHandler.Result logoutRequest; private Function searchFunction = ignore -> new SearchHit[0]; + private Function searchScrollFunction = ignore -> new SearchHit[0]; @Before public void setup() throws Exception { @@ -132,8 +137,8 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { new Authentication(new User("kibana"), new RealmRef("realm", "type", "node"), null).writeToContext(threadContext); indexRequests = new ArrayList<>(); - updateRequests = new ArrayList<>(); searchRequests = new ArrayList<>(); + bulkRequests = new ArrayList<>(); final Client client = new NoOpClient(threadPool) { @Override protected @@ -143,20 +148,29 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { IndexRequest indexRequest = (IndexRequest) request; indexRequests.add(indexRequest); final IndexResponse response = new IndexResponse( - indexRequest.shardId(), indexRequest.type(), indexRequest.id(), 1, 1, 1, true); + indexRequest.shardId(), indexRequest.type(), indexRequest.id(), 1, 1, 1, true); + listener.onResponse((Response) response); + } else if (BulkAction.NAME.equals(action.name())) { + assertThat(request, instanceOf(BulkRequest.class)); + bulkRequests.add((BulkRequest) request); + final BulkResponse response = new BulkResponse(new BulkItemResponse[0], 1); listener.onResponse((Response) response); - } else if (UpdateAction.NAME.equals(action.name())) { - assertThat(request, instanceOf(UpdateRequest.class)); - updateRequests.add((UpdateRequest) request); - listener.onResponse((Response) new UpdateResponse()); } else if (SearchAction.NAME.equals(action.name())) { assertThat(request, instanceOf(SearchRequest.class)); SearchRequest searchRequest = (SearchRequest) request; searchRequests.add(searchRequest); final SearchHit[] hits = searchFunction.apply(searchRequest); final SearchResponse response = new SearchResponse( - new SearchResponseSections(new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), - null, null, false, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null); + new SearchResponseSections(new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, null, false, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null); + listener.onResponse((Response) response); + } else if (SearchScrollAction.NAME.equals(action.name())){ + assertThat(request, instanceOf(SearchScrollRequest.class)); + SearchScrollRequest searchScrollRequest = (SearchScrollRequest) request; + final SearchHit[] hits = searchScrollFunction.apply(searchScrollRequest); + final SearchResponse response = new SearchResponse( + new SearchResponseSections(new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, null, false, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null); listener.onResponse((Response) response); } else if (ClearScrollAction.NAME.equals(action.name())) { assertThat(request, instanceOf(ClearScrollRequest.class)); @@ -296,15 +310,33 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { assertThat(((TermQueryBuilder) filter1.get(1)).fieldName(), equalTo("refresh_token.token")); assertThat(((TermQueryBuilder) filter1.get(1)).value(), equalTo(tokenToInvalidate1.v2())); - assertThat(updateRequests.size(), equalTo(4)); // (refresh-token + access-token) * 2 - assertThat(updateRequests.get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); - assertThat(updateRequests.get(1).id(), equalTo(updateRequests.get(0).id())); - assertThat(updateRequests.get(2).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); - assertThat(updateRequests.get(3).id(), equalTo(updateRequests.get(2).id())); - - assertThat(indexRequests.size(), equalTo(2)); // bwc-invalidate * 2 - assertThat(indexRequests.get(0).id(), startsWith("invalidated-token_")); - assertThat(indexRequests.get(1).id(), startsWith("invalidated-token_")); + assertThat(bulkRequests.size(), equalTo(6)); // 4 updates (refresh-token + access-token) plus 2 indexes (bwc-invalidate * 2) + // Invalidate refresh token 1 + assertThat(bulkRequests.get(0).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(0).requests().get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); + UpdateRequest updateRequest1 = (UpdateRequest) bulkRequests.get(0).requests().get(0); + assertThat(updateRequest1.toString().contains("refresh_token"), equalTo(true)); + // BWC incalidate access token 1 + assertThat(bulkRequests.get(1).requests().get(0), instanceOf(IndexRequest.class)); + assertThat(bulkRequests.get(1).requests().get(0).id(), equalTo("invalidated-token_" + tokenToInvalidate1.v1().getId())); + // Invalidate access token 1 + assertThat(bulkRequests.get(2).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(2).requests().get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); + UpdateRequest updateRequest2 = (UpdateRequest) bulkRequests.get(2).requests().get(0); + assertThat(updateRequest2.toString().contains("access_token"), equalTo(true)); + // Invalidate refresh token 2 + assertThat(bulkRequests.get(3).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(3).requests().get(0).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); + UpdateRequest updateRequest3 = (UpdateRequest) bulkRequests.get(3).requests().get(0); + assertThat(updateRequest3.toString().contains("refresh_token"), equalTo(true)); + // BWC incalidate access token 2 + assertThat(bulkRequests.get(4).requests().get(0), instanceOf(IndexRequest.class)); + assertThat(bulkRequests.get(4).requests().get(0).id(), equalTo("invalidated-token_" + tokenToInvalidate2.v1().getId())); + // Invalidate access token 2 + assertThat(bulkRequests.get(5).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(5).requests().get(0).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); + UpdateRequest updateRequest4 = (UpdateRequest) bulkRequests.get(5).requests().get(0); + assertThat(updateRequest4.toString().contains("access_token"), equalTo(true)); } private Function findTokenByRefreshToken(SearchHit[] searchHits) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 66d3233b07a..7dec105e1ee 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -6,7 +6,11 @@ package org.elasticsearch.xpack.security.action.saml; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; @@ -24,7 +28,6 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; -import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.MapBuilder; @@ -72,6 +75,9 @@ import java.util.function.Consumer; import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; import static org.elasticsearch.xpack.security.authc.TokenServiceTests.mockGetTokenFromId; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; import static org.mockito.Matchers.any; @@ -89,7 +95,7 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { private SamlRealm samlRealm; private TokenService tokenService; private List indexRequests; - private List updateRequests; + private List bulkRequests; private TransportSamlLogoutAction action; private Client client; @@ -112,7 +118,7 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { new Authentication(new User("kibana"), new Authentication.RealmRef("realm", "type", "node"), null).writeToContext(threadContext); indexRequests = new ArrayList<>(); - updateRequests = new ArrayList<>(); + bulkRequests = new ArrayList<>(); client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(settings); @@ -137,6 +143,10 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { .setId((String) invocationOnMock.getArguments()[2]); return builder; }).when(client).prepareUpdate(anyString(), anyString(), anyString()); + doAnswer(invocationOnMock -> { + BulkRequestBuilder builder = new BulkRequestBuilder(client, BulkAction.INSTANCE); + return builder; + }).when(client).prepareBulk(); when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE)); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; @@ -154,15 +164,6 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { listener.onResponse(response); return Void.TYPE; }).when(client).multiGet(any(MultiGetRequest.class), any(ActionListener.class)); - doAnswer(invocationOnMock -> { - UpdateRequest updateRequest = (UpdateRequest) invocationOnMock.getArguments()[0]; - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - updateRequests.add(updateRequest); - final UpdateResponse response = new UpdateResponse( - updateRequest.getShardId(), updateRequest.type(), updateRequest.id(), 1, DocWriteResponse.Result.UPDATED); - listener.onResponse(response); - return Void.TYPE; - }).when(client).update(any(UpdateRequest.class), any(ActionListener.class)); doAnswer(invocationOnMock -> { IndexRequest indexRequest = (IndexRequest) invocationOnMock.getArguments()[0]; ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; @@ -181,6 +182,14 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { listener.onResponse(response); return Void.TYPE; }).when(client).execute(eq(IndexAction.INSTANCE), any(IndexRequest.class), any(ActionListener.class)); + doAnswer(invocationOnMock -> { + BulkRequest bulkRequest = (BulkRequest) invocationOnMock.getArguments()[0]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + bulkRequests.add(bulkRequest); + final BulkResponse response = new BulkResponse(new BulkItemResponse[0], 1); + listener.onResponse(response); + return Void.TYPE; + }).when(client).bulk(any(BulkRequest.class), any(ActionListener.class)); final SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); doAnswer(inv -> { @@ -247,9 +256,17 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { assertThat(indexRequest1, notNullValue()); assertThat(indexRequest1.id(), startsWith("token")); - final IndexRequest indexRequest2 = indexRequests.get(1); - assertThat(indexRequest2, notNullValue()); - assertThat(indexRequest2.id(), startsWith("invalidated-token")); + assertThat(bulkRequests.size(), equalTo(2)); + final BulkRequest bulkRequest1 = bulkRequests.get(0); + assertThat(bulkRequest1.requests().size(), equalTo(1)); + assertThat(bulkRequest1.requests().get(0), instanceOf(IndexRequest.class)); + assertThat(bulkRequest1.requests().get(0).id(), startsWith("invalidated-token_")); + + final BulkRequest bulkRequest2 = bulkRequests.get(1); + assertThat(bulkRequest2.requests().size(), equalTo(1)); + assertThat(bulkRequest2.requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequest2.requests().get(0).id(), startsWith("token_")); + assertThat(bulkRequest2.requests().get(0).toString(), containsString("\"access_token\":{\"invalidated\":true")); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index c4efdc16e10..968c17f556b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -144,7 +144,9 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { .prepareInvalidateToken(response.getTokenString()) .setType(InvalidateTokenRequest.Type.ACCESS_TOKEN) .get(); - assertTrue(invalidateResponse.isCreated()); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(1)); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); AtomicReference docId = new AtomicReference<>(); assertBusy(() -> { SearchResponse searchResponse = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) @@ -189,6 +191,72 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { }, 30, TimeUnit.SECONDS); } + public void testInvalidateAllTokensForUser() throws Exception{ + final int numOfRequests = randomIntBetween(5, 10); + for (int i = 0; i < numOfRequests; i++) { + securityClient().prepareCreateToken() + .setGrantType("password") + .setUsername(SecuritySettingsSource.TEST_USER_NAME) + .setPassword(new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())) + .get(); + } + Client client = client().filterWithHeader(Collections.singletonMap("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING))); + SecurityClient securityClientSuperuser = new SecurityClient(client); + InvalidateTokenResponse invalidateResponse = securityClientSuperuser + .prepareInvalidateToken() + .setUserName(SecuritySettingsSource.TEST_USER_NAME) + .get(); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(2 * (numOfRequests))); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); + } + + public void testInvalidateAllTokensForRealm() throws Exception{ + final int numOfRequests = randomIntBetween(5, 10); + for (int i = 0; i < numOfRequests; i++) { + securityClient().prepareCreateToken() + .setGrantType("password") + .setUsername(SecuritySettingsSource.TEST_USER_NAME) + .setPassword(new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())) + .get(); + } + Client client = client().filterWithHeader(Collections.singletonMap("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING))); + SecurityClient securityClientSuperuser = new SecurityClient(client); + InvalidateTokenResponse invalidateResponse = securityClientSuperuser + .prepareInvalidateToken() + .setRealmName("file") + .get(); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(2 * (numOfRequests))); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); + } + + public void testInvalidateAllTokensForRealmThatHasNone() { + final int numOfRequests = randomIntBetween(2, 4); + for (int i = 0; i < numOfRequests; i++) { + securityClient().prepareCreateToken() + .setGrantType("password") + .setUsername(SecuritySettingsSource.TEST_USER_NAME) + .setPassword(new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())) + .get(); + } + Client client = client().filterWithHeader(Collections.singletonMap("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING))); + SecurityClient securityClientSuperuser = new SecurityClient(client); + InvalidateTokenResponse invalidateResponse = securityClientSuperuser + .prepareInvalidateToken() + .setRealmName("saml") + .get(); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); + } + public void testExpireMultipleTimes() { CreateTokenResponse response = securityClient().prepareCreateToken() .setGrantType("password") @@ -200,12 +268,16 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { .prepareInvalidateToken(response.getTokenString()) .setType(InvalidateTokenRequest.Type.ACCESS_TOKEN) .get(); - assertTrue(invalidateResponse.isCreated()); - assertFalse(securityClient() - .prepareInvalidateToken(response.getTokenString()) - .setType(InvalidateTokenRequest.Type.ACCESS_TOKEN) - .get() - .isCreated()); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(1)); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); + InvalidateTokenResponse invalidateAgainResponse = securityClient() + .prepareInvalidateToken(response.getTokenString()) + .setType(InvalidateTokenRequest.Type.ACCESS_TOKEN) + .get(); + assertThat(invalidateAgainResponse.getResult().getInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateAgainResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(1)); + assertThat(invalidateAgainResponse.getResult().getErrors().size(), equalTo(0)); } public void testRefreshingToken() { @@ -248,7 +320,9 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { .prepareInvalidateToken(createTokenResponse.getRefreshToken()) .setType(InvalidateTokenRequest.Type.REFRESH_TOKEN) .get(); - assertTrue(invalidateResponse.isCreated()); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(1)); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> securityClient.prepareRefreshToken(createTokenResponse.getRefreshToken()).get()); @@ -362,9 +436,11 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { // invalidate PlainActionFuture invalidateResponseFuture = new PlainActionFuture<>(); InvalidateTokenRequest invalidateTokenRequest = - new InvalidateTokenRequest(createTokenResponse.getTokenString(), InvalidateTokenRequest.Type.ACCESS_TOKEN); + new InvalidateTokenRequest(createTokenResponse.getTokenString(), InvalidateTokenRequest.Type.ACCESS_TOKEN.getValue()); securityClient.invalidateToken(invalidateTokenRequest, invalidateResponseFuture); - assertTrue(invalidateResponseFuture.get().isCreated()); + assertThat(invalidateResponseFuture.get().getResult().getInvalidatedTokens().size(), equalTo(1)); + assertThat(invalidateResponseFuture.get().getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponseFuture.get().getResult().getErrors().size(), equalTo(0)); ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> { PlainActionFuture responseFuture = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 7926b44a38c..286f07667ec 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -48,6 +48,7 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.TokenMetaData; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -523,7 +524,7 @@ public class TokenServiceTests extends ESTestCase { assertNull(future.get()); e = expectThrows(IllegalStateException.class, () -> { - PlainActionFuture invalidateFuture = new PlainActionFuture<>(); + PlainActionFuture invalidateFuture = new PlainActionFuture<>(); tokenService.invalidateAccessToken((String) null, invalidateFuture); invalidateFuture.actionGet(); }); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index 41bd8bfc6e6..3d13119292b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -45,6 +45,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Consumer; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; @@ -112,7 +113,7 @@ public class NativeUsersStoreTests extends ESTestCase { SecurityIndexManager.SECURITY_INDEX_NAME, NativeUsersStore.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.RESERVED_USER_TYPE, randomAlphaOfLength(12)), - 1L, + 0, 1, 1L, true, BytesReference.bytes(jsonBuilder().map(values)), Collections.emptyMap()); @@ -181,7 +182,7 @@ public class NativeUsersStoreTests extends ESTestCase { SecurityIndexManager.SECURITY_INDEX_NAME, NativeUsersStore.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.USER_DOC_TYPE, username), - 1L, + UNASSIGNED_SEQ_NO, 0, 1L, false, null, Collections.emptyMap()); @@ -223,7 +224,7 @@ public class NativeUsersStoreTests extends ESTestCase { SecurityIndexManager.SECURITY_INDEX_NAME, NativeUsersStore.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.USER_DOC_TYPE, username), - 1L, + 0, 1, 1L, true, source, Collections.emptyMap()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java new file mode 100644 index 00000000000..06c9411d0bc --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.support; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; + +import java.util.Arrays; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; + +public class TokensInvalidationResultTests extends ESTestCase { + + public void testToXcontent() throws Exception{ + TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList("token1", "token2"), + Arrays.asList("token3", "token4"), + Arrays.asList(new ElasticsearchException("foo", new IllegalStateException("bar")), + new ElasticsearchException("boo", new IllegalStateException("far"))), + randomIntBetween(0, 5)); + + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + result.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertThat(Strings.toString(builder), + equalTo( + "{\"created\":false," + + "\"invalidated_tokens\":2," + + "\"previously_invalidated_tokens\":2," + + "\"error_count\":2," + + "\"error_details\":[" + + "{\"type\":\"exception\"," + + "\"reason\":\"foo\"," + + "\"caused_by\":{" + + "\"type\":\"illegal_state_exception\"," + + "\"reason\":\"bar\"" + + "}" + + "}," + + "{\"type\":\"exception\"," + + "\"reason\":\"boo\"," + + "\"caused_by\":{" + + "\"type\":\"illegal_state_exception\"," + + "\"reason\":\"far\"" + + "}" + + "}" + + "]" + + "}")); + } + } + + public void testToXcontentWithNoErrors() throws Exception{ + TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList("token1", "token2"), + Collections.emptyList(), + Collections.emptyList(), randomIntBetween(0, 5)); + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + result.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertThat(Strings.toString(builder), + equalTo( + "{\"created\":true," + + "\"invalidated_tokens\":2," + + "\"previously_invalidated_tokens\":0," + + "\"error_count\":0" + + "}")); + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index c95204ddfdf..e2acbb81560 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -56,6 +56,7 @@ import java.util.function.Consumer; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.set.Sets.newHashSet; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -129,7 +130,7 @@ public class NativePrivilegeStoreTests extends ESTestCase { final String docSource = Strings.toString(sourcePrivilege); listener.get().onResponse(new GetResponse( - new GetResult(request.index(), request.type(), request.id(), 1L, true, new BytesArray(docSource), emptyMap()) + new GetResult(request.index(), request.type(), request.id(), 0, 1, 1L, true, new BytesArray(docSource), emptyMap()) )); final ApplicationPrivilegeDescriptor getPrivilege = future.get(1, TimeUnit.SECONDS); assertThat(getPrivilege, equalTo(sourcePrivilege)); @@ -146,7 +147,7 @@ public class NativePrivilegeStoreTests extends ESTestCase { assertThat(request.id(), equalTo("application-privilege_myapp:admin")); listener.get().onResponse(new GetResponse( - new GetResult(request.index(), request.type(), request.id(), -1, false, null, emptyMap()) + new GetResult(request.index(), request.type(), request.id(), UNASSIGNED_SEQ_NO, 0, -1, false, null, emptyMap()) )); final ApplicationPrivilegeDescriptor getPrivilege = future.get(1, TimeUnit.SECONDS); assertThat(getPrivilege, Matchers.nullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenActionTests.java new file mode 100644 index 00000000000..00850ba6e5a --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenActionTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.rest.action.oauth2; + +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; + +import static org.hamcrest.Matchers.containsString; + +public class RestInvalidateTokenActionTests extends ESTestCase { + + public void testParserForUserAndRealm() throws Exception { + final String request = "{" + + "\"username\": \"user1\"," + + "\"realm_name\": \"realm1\"" + + "}"; + try (XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, request)) { + InvalidateTokenRequest invalidateTokenRequest = RestInvalidateTokenAction.PARSER.parse(parser, null); + assertEquals("user1", invalidateTokenRequest.getUserName()); + assertEquals("realm1", invalidateTokenRequest.getRealmName()); + assertNull(invalidateTokenRequest.getTokenString()); + assertNull(invalidateTokenRequest.getTokenType()); + } + } + + public void testParserForToken() throws Exception { + final String request = "{" + + "\"refresh_token\": \"refresh_token_string\"" + + "}"; + try (XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, request)) { + InvalidateTokenRequest invalidateTokenRequest = RestInvalidateTokenAction.PARSER.parse(parser, null); + assertEquals("refresh_token_string", invalidateTokenRequest.getTokenString()); + assertEquals("refresh_token", invalidateTokenRequest.getTokenType().getValue()); + assertNull(invalidateTokenRequest.getRealmName()); + assertNull(invalidateTokenRequest.getUserName()); + } + } + + public void testParserForIncorrectInput() throws Exception { + final String request = "{" + + "\"refresh_token\": \"refresh_token_string\"," + + "\"token\": \"access_token_string\"" + + "}"; + try (XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, request)) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestInvalidateTokenAction.PARSER.parse(parser, + null)); + assertThat(e.getCause().getMessage(), containsString("only one of [token, refresh_token] may be sent per request")); + + } + } +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java index 472504dd5ad..43d296058f4 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.client.Version; import java.net.URI; import java.sql.DriverPropertyInfo; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -69,7 +70,7 @@ class JdbcConfiguration extends ConnectionConfiguration { private final String debugOut; // mutable ones - private TimeZone timeZone; + private ZoneId zoneId; public static JdbcConfiguration create(String u, Properties props, int loginTimeoutSeconds) throws JdbcSQLException { URI uri = parseUrl(u); @@ -148,7 +149,8 @@ class JdbcConfiguration extends ConnectionConfiguration { this.debug = parseValue(DEBUG, props.getProperty(DEBUG, DEBUG_DEFAULT), Boolean::parseBoolean); this.debugOut = props.getProperty(DEBUG_OUTPUT, DEBUG_OUTPUT_DEFAULT); - this.timeZone = parseValue(TIME_ZONE, props.getProperty(TIME_ZONE, TIME_ZONE_DEFAULT), TimeZone::getTimeZone); + this.zoneId = parseValue(TIME_ZONE, props.getProperty(TIME_ZONE, TIME_ZONE_DEFAULT), + s -> TimeZone.getTimeZone(s).toZoneId().normalized()); } @Override @@ -165,11 +167,11 @@ class JdbcConfiguration extends ConnectionConfiguration { } public TimeZone timeZone() { - return timeZone; + return zoneId != null ? TimeZone.getTimeZone(zoneId) : null; } public void timeZone(TimeZone timeZone) { - this.timeZone = timeZone; + this.zoneId = timeZone != null ? timeZone.toZoneId() : null; } public static boolean canAccept(String url) { @@ -186,4 +188,4 @@ class JdbcConfiguration extends ConnectionConfiguration { return info.toArray(new DriverPropertyInfo[info.size()]); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec index 5d1e59ef7a2..d4837bfdafc 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec @@ -130,12 +130,66 @@ SELECT MAX(languages) max, MIN(languages) min, SUM(languages) sum, AVG(languages FROM test_emp GROUP BY languages ORDER BY languages ASC LIMIT 5; max:bt | min:bt | sum:bt | avg:d | percent:d | percent_rank:d| kurtosis:d | skewness:d ----------------+---------------+---------------+---------------+---------------+---------------+---------------+--------------- -null |null |null |null |null |null |null |null -1 |1 |15 |1 |1.0 |100.0 |NaN |NaN -2 |2 |38 |2 |2.0 |100.0 |NaN |NaN -3 |3 |51 |3 |3.0 |100.0 |NaN |NaN -4 |4 |72 |4 |4.0 |0.0 |NaN |NaN +---------------+---------------+---------------+--------------+---------------+---------------+---------------+--------------- +null |null |null |null |null |null |null |null +1 |1 |15 |1 |1.0 |100.0 |NaN |NaN +2 |2 |38 |2 |2.0 |100.0 |NaN |NaN +3 |3 |51 |3 |3.0 |100.0 |NaN |NaN +4 |4 |72 |4 |4.0 |0.0 |NaN |NaN +; + +aggByComplexCastedValue +SELECT CONVERT(CONCAT(LTRIM(CONVERT("emp_no", SQL_VARCHAR)), LTRIM(CONVERT("languages", SQL_VARCHAR))), SQL_BIGINT) AS "TEMP" +FROM "test_emp" GROUP BY "TEMP" ORDER BY "TEMP" LIMIT 20; + + TEMP:l +--------------- +10020 +10021 +10022 +10023 +10024 +10025 +10026 +10027 +10028 +10029 +100012 +100025 +100034 +100045 +100051 +100063 +100074 +100082 +100091 +100104 +; + +aggAndOrderByCastedValue +SELECT CHAR_LENGTH(SPACE(CAST(languages AS SMALLINT))), COUNT(*) FROM test_emp GROUP BY 1 ORDER BY 1 DESC; + +CHAR_LENGTH(SPACE(CAST(languages AS SMALLINT))):i| COUNT(1):l +-------------------------------------------------+--------------- +5 |21 +4 |18 +3 |17 +2 |19 +1 |15 +null |10 +; + +aggAndOrderByCastedFunctionValue +SELECT ROUND(SQRT(CAST(EXP(languages) AS SMALLINT)), 2), COUNT(*) FROM test_emp GROUP BY 1 ORDER BY 1 DESC; + +ROUND(SQRT(CAST(EXP(languages) AS SMALLINT)),2):d| COUNT(1):l +-------------------------------------------------+--------------- +12.17 |21 +7.42 |18 +4.47 |17 +2.65 |19 +1.73 |15 +null |10 ; diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec index 9adbe79edc6..149e23f7713 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec @@ -280,6 +280,8 @@ aggMaxWithAlias SELECT gender g, MAX(emp_no) m FROM "test_emp" GROUP BY g ORDER BY gender; aggMaxOnDate SELECT gender, MAX(birth_date) m FROM "test_emp" GROUP BY gender ORDER BY gender; +aggAvgAndMaxWithLikeFilter +SELECT CAST(AVG(salary) AS LONG) AS avg, CAST(SUM(salary) AS LONG) AS s FROM "test_emp" WHERE first_name LIKE 'G%'; // Conditional MAX aggMaxWithHaving diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec index 16fe5511e4d..4b12d2de58f 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec @@ -119,11 +119,14 @@ SELECT DAY_OF_WEEK(birth_date) day, COUNT(*) c FROM test_emp WHERE DAY_OF_WEEK(b currentTimestampYear SELECT YEAR(CURRENT_TIMESTAMP()) AS result; -currentTimestampMonth +// +// H2 uses the local timezone instead of the specified one +// +currentTimestampMonth-Ignore SELECT MONTH(CURRENT_TIMESTAMP()) AS result; currentTimestampHour-Ignore SELECT HOUR(CURRENT_TIMESTAMP()) AS result; -currentTimestampMinute +currentTimestampMinute-Ignore SELECT MINUTE(CURRENT_TIMESTAMP()) AS result; diff --git a/x-pack/plugin/sql/qa/src/main/resources/filter.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/filter.sql-spec index cfbff2ada57..af81b060ebd 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/filter.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/filter.sql-spec @@ -49,6 +49,8 @@ whereFieldWithNotEqualsOnString SELECT last_name l FROM "test_emp" WHERE emp_no < 10003 AND gender <> 'M'; whereFieldWithLikeMatch SELECT last_name l FROM "test_emp" WHERE emp_no < 10003 AND last_name LIKE 'K%'; +whereFieldWithNotLikeMatch +SELECT last_name l FROM "test_emp" WHERE emp_no < 10020 AND first_name NOT LIKE 'Ma%'; whereFieldWithOrderNot SELECT last_name l FROM "test_emp" WHERE NOT emp_no < 10003 ORDER BY emp_no LIMIT 5; diff --git a/x-pack/plugin/sql/qa/src/main/resources/functions.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/functions.csv-spec index 930a15f9438..6fec225df0c 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/functions.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/functions.csv-spec @@ -68,6 +68,33 @@ cct:s AlejandroMcAlpine ; +selectConcatWithNullValues +SELECT first_name, CONCAT(first_name,null),last_name, CONCAT(null,null), LENGTH(CONCAT(null,null)) FROM test_emp ORDER BY first_name DESC LIMIT 20; + + first_name:s |CONCAT(first_name,null):s| last_name:s |CONCAT(null,null):s|LENGTH(CONCAT(null,null)):i +---------------+-------------------------+----------------+-------------------+------------------------- +null | |Demeyer | |0 +null | |Joslin | |0 +null | |Reistad | |0 +null | |Merlo | |0 +null | |Swan | |0 +null | |Chappelet | |0 +null | |Portugali | |0 +null | |Makrucki | |0 +null | |Lortz | |0 +null | |Brender | |0 +Zvonko |Zvonko |Nyanchama | |0 +Zhongwei |Zhongwei |Rosen | |0 +Yongqiao |Yongqiao |Berztiss | |0 +Yishay |Yishay |Tzvieli | |0 +Yinghua |Yinghua |Dredge | |0 +Xinglin |Xinglin |Eugenio | |0 +Weiyi |Weiyi |Meriste | |0 +Vishv |Vishv |Zockler | |0 +Valter |Valter |Sullins | |0 +Valdiodio |Valdiodio |Niizuma | |0 +; + selectAsciiOfConcatWithGroupByOrderByCount SELECT ASCII(CONCAT("first_name","last_name")) ascii, COUNT(*) count FROM "test_emp" GROUP BY ASCII(CONCAT("first_name","last_name")) ORDER BY ASCII(CONCAT("first_name","last_name")) DESC LIMIT 10; diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java index 2b90a7d41fa..aaa8c56323d 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java @@ -21,10 +21,10 @@ import org.elasticsearch.xpack.sql.proto.RequestInfo; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.io.IOException; +import java.time.ZoneId; import java.util.Collections; import java.util.List; import java.util.Objects; -import java.util.TimeZone; import java.util.function.Supplier; /** @@ -33,7 +33,7 @@ import java.util.function.Supplier; public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest implements CompositeIndicesRequest, ToXContentFragment { private String query = ""; - private TimeZone timeZone = Protocol.TIME_ZONE; + private ZoneId zoneId = Protocol.TIME_ZONE; private int fetchSize = Protocol.FETCH_SIZE; private TimeValue requestTimeout = Protocol.REQUEST_TIMEOUT; private TimeValue pageTimeout = Protocol.PAGE_TIMEOUT; @@ -56,12 +56,12 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme super(); } - public AbstractSqlQueryRequest(String query, List params, QueryBuilder filter, TimeZone timeZone, + public AbstractSqlQueryRequest(String query, List params, QueryBuilder filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { super(requestInfo); this.query = query; this.params = params; - this.timeZone = timeZone; + this.zoneId = zoneId; this.fetchSize = fetchSize; this.requestTimeout = requestTimeout; this.pageTimeout = pageTimeout; @@ -76,7 +76,7 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme parser.declareString((request, mode) -> request.mode(Mode.fromString(mode)), MODE); parser.declareString((request, clientId) -> request.clientId(clientId), CLIENT_ID); parser.declareObjectArray(AbstractSqlQueryRequest::params, (p, c) -> SqlTypedParamValue.fromXContent(p), PARAMS); - parser.declareString((request, zoneId) -> request.timeZone(TimeZone.getTimeZone(zoneId)), TIME_ZONE); + parser.declareString((request, zoneId) -> request.zoneId(ZoneId.of(zoneId)), TIME_ZONE); parser.declareInt(AbstractSqlQueryRequest::fetchSize, FETCH_SIZE); parser.declareString((request, timeout) -> request.requestTimeout(TimeValue.parseTimeValue(timeout, Protocol.REQUEST_TIMEOUT, "request_timeout")), REQUEST_TIMEOUT); @@ -121,15 +121,15 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme /** * The client's time zone */ - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } - public AbstractSqlQueryRequest timeZone(TimeZone timeZone) { - if (timeZone == null) { + public AbstractSqlQueryRequest zoneId(ZoneId zoneId) { + if (zoneId == null) { throw new IllegalArgumentException("time zone may not be null."); } - this.timeZone = timeZone; + this.zoneId = zoneId; return this; } @@ -194,7 +194,7 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme super(in); query = in.readString(); params = in.readList(AbstractSqlQueryRequest::readSqlTypedParamValue); - timeZone = TimeZone.getTimeZone(in.readString()); + zoneId = ZoneId.of(in.readString()); fetchSize = in.readVInt(); requestTimeout = in.readTimeValue(); pageTimeout = in.readTimeValue(); @@ -218,7 +218,7 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme for (SqlTypedParamValue param: params) { writeSqlTypedParamValue(out, param); } - out.writeString(timeZone.getID()); + out.writeString(zoneId.getId()); out.writeVInt(fetchSize); out.writeTimeValue(requestTimeout); out.writeTimeValue(pageTimeout); @@ -240,7 +240,7 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme return fetchSize == that.fetchSize && Objects.equals(query, that.query) && Objects.equals(params, that.params) && - Objects.equals(timeZone, that.timeZone) && + Objects.equals(zoneId, that.zoneId) && Objects.equals(requestTimeout, that.requestTimeout) && Objects.equals(pageTimeout, that.pageTimeout) && Objects.equals(filter, that.filter); @@ -248,6 +248,6 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme @Override public int hashCode() { - return Objects.hash(super.hashCode(), query, timeZone, fetchSize, requestTimeout, pageTimeout, filter); + return Objects.hash(super.hashCode(), query, zoneId, fetchSize, requestTimeout, pageTimeout, filter); } } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java index ec3e2b331f0..60c7b66352c 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java @@ -18,9 +18,9 @@ import org.elasticsearch.xpack.sql.proto.RequestInfo; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.io.IOException; +import java.time.ZoneId; import java.util.List; import java.util.Objects; -import java.util.TimeZone; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -40,9 +40,9 @@ public class SqlQueryRequest extends AbstractSqlQueryRequest { super(); } - public SqlQueryRequest(String query, List params, QueryBuilder filter, TimeZone timeZone, + public SqlQueryRequest(String query, List params, QueryBuilder filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, String cursor, RequestInfo requestInfo) { - super(query, params, filter, timeZone, fetchSize, requestTimeout, pageTimeout, requestInfo); + super(query, params, filter, zoneId, fetchSize, requestTimeout, pageTimeout, requestInfo); this.cursor = cursor; } @@ -104,7 +104,7 @@ public class SqlQueryRequest extends AbstractSqlQueryRequest { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { // This is needed just to test round-trip compatibility with proto.SqlQueryRequest - return new org.elasticsearch.xpack.sql.proto.SqlQueryRequest(query(), params(), timeZone(), fetchSize(), requestTimeout(), + return new org.elasticsearch.xpack.sql.proto.SqlQueryRequest(query(), params(), zoneId(), fetchSize(), requestTimeout(), pageTimeout(), filter(), cursor(), requestInfo()).toXContent(builder, params); } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java index e7a670afa72..5443f09c5eb 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java @@ -14,9 +14,9 @@ import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.proto.RequestInfo; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; +import java.time.ZoneId; import java.util.Collections; import java.util.List; -import java.util.TimeZone; /** * The builder to build sql request @@ -29,9 +29,9 @@ public class SqlQueryRequestBuilder extends ActionRequestBuilder params, - QueryBuilder filter, TimeZone timeZone, int fetchSize, TimeValue requestTimeout, + QueryBuilder filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, String nextPageInfo, RequestInfo requestInfo) { - super(client, action, new SqlQueryRequest(query, params, filter, timeZone, fetchSize, requestTimeout, pageTimeout, nextPageInfo, + super(client, action, new SqlQueryRequest(query, params, filter, zoneId, fetchSize, requestTimeout, pageTimeout, nextPageInfo, requestInfo)); } @@ -60,8 +60,8 @@ public class SqlQueryRequestBuilder extends ActionRequestBuilder params, QueryBuilder filter, TimeZone timeZone, + public SqlTranslateRequest(String query, List params, QueryBuilder filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { - super(query, params, filter, timeZone, fetchSize, requestTimeout, pageTimeout, requestInfo); + super(query, params, filter, zoneId, fetchSize, requestTimeout, pageTimeout, requestInfo); } public SqlTranslateRequest(StreamInput in) throws IOException { @@ -64,7 +64,7 @@ public class SqlTranslateRequest extends AbstractSqlQueryRequest { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { // This is needed just to test parsing of SqlTranslateRequest, so we can reuse SqlQuerySerialization - return new SqlQueryRequest(query(), params(), timeZone(), fetchSize(), requestTimeout(), + return new SqlQueryRequest(query(), params(), zoneId(), fetchSize(), requestTimeout(), pageTimeout(), filter(), null, requestInfo()).toXContent(builder, params); } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java index 408f2400ef4..fa96b8f5e89 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java @@ -14,9 +14,9 @@ import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.proto.RequestInfo; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; +import java.time.ZoneId; import java.util.Collections; import java.util.List; -import java.util.TimeZone; /** * Builder for the request for the sql action for translating SQL queries into ES requests @@ -28,10 +28,10 @@ public class SqlTranslateRequestBuilder extends ActionRequestBuilder params, TimeZone timeZone, int fetchSize, TimeValue requestTimeout, + List params, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { super(client, action, - new SqlTranslateRequest(query, params, filter, timeZone, fetchSize, requestTimeout, pageTimeout, requestInfo)); + new SqlTranslateRequest(query, params, filter, zoneId, fetchSize, requestTimeout, pageTimeout, requestInfo)); } public SqlTranslateRequestBuilder query(String query) { @@ -39,8 +39,8 @@ public class SqlTranslateRequestBuilder extends ActionRequestBuilder request.requestInfo(randomValueOtherThan(request.requestInfo(), this::randomRequestInfo)), request -> request.query(randomValueOtherThan(request.query(), () -> randomAlphaOfLength(5))), request -> request.params(randomValueOtherThan(request.params(), this::randomParameters)), - request -> request.timeZone(randomValueOtherThan(request.timeZone(), ESTestCase::randomTimeZone)), + request -> request.zoneId(randomValueOtherThan(request.zoneId(), ESTestCase::randomZone)), request -> request.fetchSize(randomValueOtherThan(request.fetchSize(), () -> between(1, Integer.MAX_VALUE))), request -> request.requestTimeout(randomValueOtherThan(request.requestTimeout(), this::randomTV)), request -> request.filter(randomValueOtherThan(request.filter(), @@ -112,7 +112,7 @@ public class SqlQueryRequestTests extends AbstractSerializingTestCase request.cursor(randomValueOtherThan(request.cursor(), SqlQueryResponseTests::randomStringCursor)) ); SqlQueryRequest newRequest = new SqlQueryRequest(instance.query(), instance.params(), instance.filter(), - instance.timeZone(), instance.fetchSize(), instance.requestTimeout(), instance.pageTimeout(), instance.cursor(), + instance.zoneId(), instance.fetchSize(), instance.requestTimeout(), instance.pageTimeout(), instance.cursor(), instance.requestInfo()); mutator.accept(newRequest); return newRequest; @@ -120,7 +120,7 @@ public class SqlQueryRequestTests extends AbstractSerializingTestCase sqlQueryRequest.timeZone(null)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> sqlQueryRequest.zoneId(null)); assertEquals("time zone may not be null.", e.getMessage()); } } diff --git a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java index f2153065cbd..4e41dddb46c 100644 --- a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java @@ -114,10 +114,10 @@ public class SqlRequestParsersTests extends ESTestCase { assertEquals("whatever", request.cursor()); assertEquals("select", request.query()); - List list = new ArrayList(1); + List list = new ArrayList<>(1); list.add(new SqlTypedParamValue("whatever", 123)); assertEquals(list, request.params()); - assertEquals("UTC", request.timeZone().getID()); + assertEquals("UTC", request.zoneId().getId()); assertEquals(TimeValue.parseTimeValue("5s", "request_timeout"), request.requestTimeout()); assertEquals(TimeValue.parseTimeValue("10s", "page_timeout"), request.pageTimeout()); } diff --git a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java index 3d48f7fc7a4..4b047914067 100644 --- a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java @@ -37,7 +37,7 @@ public class SqlTranslateRequestTests extends AbstractSerializingTestCase mutator = randomFrom( request -> request.query(randomValueOtherThan(request.query(), () -> randomAlphaOfLength(5))), - request -> request.timeZone(randomValueOtherThan(request.timeZone(), ESTestCase::randomTimeZone)), + request -> request.zoneId(randomValueOtherThan(request.zoneId(), ESTestCase::randomZone)), request -> request.fetchSize(randomValueOtherThan(request.fetchSize(), () -> between(1, Integer.MAX_VALUE))), request -> request.requestTimeout(randomValueOtherThan(request.requestTimeout(), this::randomTV)), request -> request.filter(randomValueOtherThan(request.filter(), () -> request.filter() == null ? randomFilter(random()) : randomFilterOrNull(random()))) ); SqlTranslateRequest newRequest = new SqlTranslateRequest(instance.query(), instance.params(), instance.filter(), - instance.timeZone(), instance.fetchSize(), instance.requestTimeout(), instance.pageTimeout(), instance.requestInfo()); + instance.zoneId(), instance.fetchSize(), instance.requestTimeout(), instance.pageTimeout(), instance.requestInfo()); mutator.accept(newRequest); return newRequest; } diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java index 096ebb64e52..4fe6a39820b 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java @@ -32,8 +32,8 @@ import java.io.InputStream; import java.security.AccessController; import java.security.PrivilegedAction; import java.sql.SQLException; +import java.time.ZoneId; import java.util.Collections; -import java.util.TimeZone; import java.util.function.Function; import static org.elasticsearch.xpack.sql.proto.RequestInfo.CLI; @@ -66,7 +66,7 @@ public class HttpClient { public SqlQueryResponse queryInit(String query, int fetchSize) throws SQLException { // TODO allow customizing the time zone - this is what session set/reset/get should be about // method called only from CLI. "client_id" is set to "cli" - SqlQueryRequest sqlRequest = new SqlQueryRequest(query, Collections.emptyList(), null, TimeZone.getTimeZone("UTC"), + SqlQueryRequest sqlRequest = new SqlQueryRequest(query, Collections.emptyList(), null, ZoneId.of("Z"), fetchSize, TimeValue.timeValueMillis(cfg.queryTimeout()), TimeValue.timeValueMillis(cfg.pageTimeout()), new RequestInfo(Mode.PLAIN, CLI)); return query(sqlRequest); diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java index 8080959e3c6..a6af79e0fba 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java @@ -8,13 +8,13 @@ package org.elasticsearch.xpack.sql.proto; import org.elasticsearch.common.unit.TimeValue; -import java.util.TimeZone; +import java.time.ZoneId; /** * Sql protocol defaults and end-points shared between JDBC and REST protocol implementations */ public final class Protocol { - public static final TimeZone TIME_ZONE = TimeZone.getTimeZone("UTC"); + public static final ZoneId TIME_ZONE = ZoneId.of("Z"); /** * Global choice for the default fetch size. diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java index 651dc468bb9..34b19faef78 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java @@ -12,10 +12,10 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.time.ZoneId; import java.util.Collections; import java.util.List; import java.util.Objects; -import java.util.TimeZone; /** * Sql query request for JDBC/CLI client @@ -24,7 +24,7 @@ public class SqlQueryRequest extends AbstractSqlRequest { @Nullable private final String cursor; private final String query; - private final TimeZone timeZone; + private final ZoneId zoneId; private final int fetchSize; private final TimeValue requestTimeout; private final TimeValue pageTimeout; @@ -33,12 +33,12 @@ public class SqlQueryRequest extends AbstractSqlRequest { private final List params; - public SqlQueryRequest(String query, List params, TimeZone timeZone, int fetchSize, + public SqlQueryRequest(String query, List params, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, ToXContent filter, String cursor, RequestInfo requestInfo) { super(requestInfo); this.query = query; this.params = params; - this.timeZone = timeZone; + this.zoneId = zoneId; this.fetchSize = fetchSize; this.requestTimeout = requestTimeout; this.pageTimeout = pageTimeout; @@ -46,9 +46,9 @@ public class SqlQueryRequest extends AbstractSqlRequest { this.cursor = cursor; } - public SqlQueryRequest(String query, List params, ToXContent filter, TimeZone timeZone, + public SqlQueryRequest(String query, List params, ToXContent filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { - this(query, params, timeZone, fetchSize, requestTimeout, pageTimeout, filter, null, requestInfo); + this(query, params, zoneId, fetchSize, requestTimeout, pageTimeout, filter, null, requestInfo); } public SqlQueryRequest(String cursor, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { @@ -81,8 +81,8 @@ public class SqlQueryRequest extends AbstractSqlRequest { /** * The client's time zone */ - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } @@ -116,14 +116,20 @@ public class SqlQueryRequest extends AbstractSqlRequest { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } SqlQueryRequest that = (SqlQueryRequest) o; return fetchSize == that.fetchSize && Objects.equals(query, that.query) && Objects.equals(params, that.params) && - Objects.equals(timeZone, that.timeZone) && + Objects.equals(zoneId, that.zoneId) && Objects.equals(requestTimeout, that.requestTimeout) && Objects.equals(pageTimeout, that.pageTimeout) && Objects.equals(filter, that.filter) && @@ -132,7 +138,7 @@ public class SqlQueryRequest extends AbstractSqlRequest { @Override public int hashCode() { - return Objects.hash(super.hashCode(), query, timeZone, fetchSize, requestTimeout, pageTimeout, filter, cursor); + return Objects.hash(super.hashCode(), query, zoneId, fetchSize, requestTimeout, pageTimeout, filter, cursor); } @Override @@ -151,8 +157,8 @@ public class SqlQueryRequest extends AbstractSqlRequest { } builder.endArray(); } - if (timeZone != null) { - builder.field("time_zone", timeZone.getID()); + if (zoneId != null) { + builder.field("time_zone", zoneId.getId()); } if (fetchSize != Protocol.FETCH_SIZE) { builder.field("fetch_size", fetchSize); @@ -172,5 +178,4 @@ public class SqlQueryRequest extends AbstractSqlRequest { } return builder; } - -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 8a0163df0bb..14d7fa57fff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -263,7 +263,7 @@ public class Querier { private BucketExtractor createExtractor(FieldExtraction ref, BucketExtractor totalCount) { if (ref instanceof GroupByRef) { GroupByRef r = (GroupByRef) ref; - return new CompositeKeyExtractor(r.key(), r.property(), r.timeZone()); + return new CompositeKeyExtractor(r.key(), r.property(), r.zoneId()); } if (ref instanceof MetricAggRef) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java index c799ab27dca..0c374038953 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java @@ -16,7 +16,6 @@ import java.io.IOException; import java.time.ZoneId; import java.util.Map; import java.util.Objects; -import java.util.TimeZone; public class CompositeKeyExtractor implements BucketExtractor { @@ -27,40 +26,37 @@ public class CompositeKeyExtractor implements BucketExtractor { private final String key; private final Property property; - private final TimeZone timeZone; private final ZoneId zoneId; /** * Constructs a new CompositeKeyExtractor instance. * The time-zone parameter is used to indicate a date key. */ - public CompositeKeyExtractor(String key, Property property, TimeZone timeZone) { + public CompositeKeyExtractor(String key, Property property, ZoneId zoneId) { this.key = key; this.property = property; - this.timeZone = timeZone; - this.zoneId = timeZone != null ? timeZone.toZoneId() : null; + this.zoneId = zoneId; } CompositeKeyExtractor(StreamInput in) throws IOException { key = in.readString(); property = in.readEnum(Property.class); if (in.readBoolean()) { - timeZone = TimeZone.getTimeZone(in.readString()); + zoneId = ZoneId.of(in.readString()); } else { - timeZone = null; + zoneId = null; } - this.zoneId = timeZone != null ? timeZone.toZoneId() : null; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(key); out.writeEnum(property); - if (timeZone == null) { + if (zoneId == null) { out.writeBoolean(false); } else { out.writeBoolean(true); - out.writeString(timeZone.getID()); + out.writeString(zoneId.getId()); } } @@ -72,8 +68,8 @@ public class CompositeKeyExtractor implements BucketExtractor { return property; } - TimeZone timeZone() { - return timeZone; + ZoneId zoneId() { + return zoneId; } @Override @@ -95,7 +91,7 @@ public class CompositeKeyExtractor implements BucketExtractor { Object object = ((Map) m).get(key); - if (timeZone != null) { + if (zoneId != null) { if (object == null) { return object; } else if (object instanceof Long) { @@ -110,7 +106,7 @@ public class CompositeKeyExtractor implements BucketExtractor { @Override public int hashCode() { - return Objects.hash(key, property, timeZone); + return Objects.hash(key, property, zoneId); } @Override @@ -126,7 +122,7 @@ public class CompositeKeyExtractor implements BucketExtractor { CompositeKeyExtractor other = (CompositeKeyExtractor) obj; return Objects.equals(key, other.key) && Objects.equals(property, other.property) - && Objects.equals(timeZone, other.timeZone); + && Objects.equals(zoneId, other.zoneId); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index 00581ffd84e..d6faf167322 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -100,6 +100,7 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.Check; +import java.time.ZoneId; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; @@ -108,7 +109,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; -import java.util.TimeZone; import java.util.function.BiFunction; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -441,13 +441,13 @@ public class FunctionRegistry { if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children.get(0), cfg.timeZone()); + return ctorRef.build(location, children.get(0), cfg.zoneId()); }; return def(function, builder, true, names); } interface DatetimeUnaryFunctionBuilder { - T build(Location location, Expression target, TimeZone tz); + T build(Location location, Expression target, ZoneId zi); } /** @@ -463,13 +463,13 @@ public class FunctionRegistry { if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children.get(0), children.get(1), cfg.timeZone()); + return ctorRef.build(location, children.get(0), children.get(1), cfg.zoneId()); }; return def(function, builder, false, names); } interface DatetimeBinaryFunctionBuilder { - T build(Location location, Expression lhs, Expression rhs, TimeZone tz); + T build(Location location, Expression lhs, Expression rhs, ZoneId zi); } /** diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java index 200682d980a..4c1b761b1a0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java @@ -15,26 +15,26 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; +import java.time.ZoneId; import java.util.Objects; -import java.util.TimeZone; public class Histogram extends GroupingFunction { private final Literal interval; - private final TimeZone timeZone; + private final ZoneId zoneId; - public Histogram(Location location, Expression field, Expression interval, TimeZone timeZone) { + public Histogram(Location location, Expression field, Expression interval, ZoneId zoneId) { super(location, field); this.interval = (Literal) interval; - this.timeZone = timeZone; + this.zoneId = zoneId; } public Literal interval() { return interval; } - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } @Override @@ -54,7 +54,7 @@ public class Histogram extends GroupingFunction { @Override protected GroupingFunction replaceChild(Expression newChild) { - return new Histogram(location(), newChild, interval, timeZone); + return new Histogram(location(), newChild, interval, zoneId); } @Override @@ -64,12 +64,12 @@ public class Histogram extends GroupingFunction { @Override protected NodeInfo info() { - return NodeInfo.create(this, Histogram::new, field(), interval, timeZone); + return NodeInfo.create(this, Histogram::new, field(), interval, zoneId); } @Override public int hashCode() { - return Objects.hash(field(), interval, timeZone); + return Objects.hash(field(), interval, zoneId); } @Override @@ -77,7 +77,7 @@ public class Histogram extends GroupingFunction { if (super.equals(obj)) { Histogram other = (Histogram) obj; return Objects.equals(interval, other.interval) - && Objects.equals(timeZone, other.timeZone); + && Objects.equals(zoneId, other.zoneId); } return false; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java index 5c874cc7667..d4265d123e8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java @@ -7,14 +7,18 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.type.DataTypes; +import java.util.Locale; import java.util.Objects; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + public class Cast extends UnaryScalarFunction { private final DataType dataType; @@ -74,6 +78,18 @@ public class Cast extends UnaryScalarFunction { return new CastProcessor(DataTypeConversion.conversionFor(from(), to())); } + @Override + public ScriptTemplate asScript() { + ScriptTemplate fieldAsScript = asScript(field()); + return new ScriptTemplate( + formatTemplate(String.format(Locale.ROOT, "{sql}.cast(%s,{})", fieldAsScript.template())), + paramsBuilder() + .script(fieldAsScript.params()) + .variable(dataType.name()) + .build(), + dataType()); + } + @Override public int hashCode() { return Objects.hash(super.hashCode(), dataType); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java index cfee964b01e..1ac143c2a02 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java @@ -16,40 +16,37 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Objects; -import java.util.TimeZone; abstract class BaseDateTimeFunction extends UnaryScalarFunction { - private final TimeZone timeZone; private final ZoneId zoneId; private final String name; - BaseDateTimeFunction(Location location, Expression field, TimeZone timeZone) { + BaseDateTimeFunction(Location location, Expression field, ZoneId zoneId) { super(location, field); - this.timeZone = timeZone; - this.zoneId = timeZone != null ? timeZone.toZoneId() : null; + this.zoneId = zoneId; StringBuilder sb = new StringBuilder(super.name()); // add timezone as last argument - sb.insert(sb.length() - 1, " [" + timeZone.getID() + "]"); + sb.insert(sb.length() - 1, " [" + zoneId.getId() + "]"); this.name = sb.toString(); } @Override protected final NodeInfo info() { - return NodeInfo.create(this, ctorForInfo(), field(), timeZone()); + return NodeInfo.create(this, ctorForInfo(), field(), zoneId()); } - protected abstract NodeInfo.NodeCtor2 ctorForInfo(); + protected abstract NodeInfo.NodeCtor2 ctorForInfo(); @Override protected TypeResolution resolveType() { return Expressions.typeMustBeDate(field(), functionName(), ParamOrdinal.DEFAULT); } - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } @Override @@ -82,11 +79,11 @@ abstract class BaseDateTimeFunction extends UnaryScalarFunction { } BaseDateTimeFunction other = (BaseDateTimeFunction) obj; return Objects.equals(other.field(), field()) - && Objects.equals(other.timeZone(), timeZone()); + && Objects.equals(other.zoneId(), zoneId()); } @Override public int hashCode() { - return Objects.hash(field(), timeZone()); + return Objects.hash(field(), zoneId()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java index ce6bd1ad470..608057cf235 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java @@ -14,30 +14,26 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.time.ZoneId; import java.time.ZonedDateTime; -import java.util.TimeZone; public abstract class BaseDateTimeProcessor implements Processor { - private final TimeZone timeZone; private final ZoneId zoneId; - BaseDateTimeProcessor(TimeZone timeZone) { - this.timeZone = timeZone; - this.zoneId = timeZone.toZoneId(); + BaseDateTimeProcessor(ZoneId zoneId) { + this.zoneId = zoneId; } BaseDateTimeProcessor(StreamInput in) throws IOException { - timeZone = TimeZone.getTimeZone(in.readString()); - zoneId = timeZone.toZoneId(); + zoneId = ZoneId.of(in.readString()); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(timeZone.getID()); + out.writeString(zoneId.getId()); } - TimeZone timeZone() { - return timeZone; + ZoneId zoneId() { + return zoneId; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java index 0b429fdf1a1..1ad00c8785f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.sql.type.DataType; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.temporal.ChronoField; -import java.util.TimeZone; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; @@ -24,8 +23,8 @@ public abstract class DateTimeFunction extends BaseDateTimeFunction { private final DateTimeExtractor extractor; - DateTimeFunction(Location location, Expression field, TimeZone timeZone, DateTimeExtractor extractor) { - super(location, field, timeZone); + DateTimeFunction(Location location, Expression field, ZoneId zoneId, DateTimeExtractor extractor) { + super(location, field, zoneId); this.extractor = extractor; } @@ -50,7 +49,7 @@ public abstract class DateTimeFunction extends BaseDateTimeFunction { ScriptTemplate script = super.asScript(); String template = formatTemplate("{sql}.dateTimeChrono(" + script.template() + ", {}, {})"); params.script(script.params()) - .variable(timeZone().getID()) + .variable(zoneId().getId()) .variable(extractor.chronoField().name()); return new ScriptTemplate(template, params.build(), dataType()); @@ -59,7 +58,7 @@ public abstract class DateTimeFunction extends BaseDateTimeFunction { @Override protected Processor makeProcessor() { - return new DateTimeProcessor(extractor, timeZone()); + return new DateTimeProcessor(extractor, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java index 1a60ba66f48..0a59c4d52ea 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; import org.elasticsearch.xpack.sql.tree.Location; -import java.util.TimeZone; +import java.time.ZoneId; /** * DateTimeFunctions that can be mapped as histogram. This means the dates order is maintained @@ -17,8 +17,8 @@ import java.util.TimeZone; */ public abstract class DateTimeHistogramFunction extends DateTimeFunction { - DateTimeHistogramFunction(Location location, Expression field, TimeZone timeZone, DateTimeExtractor extractor) { - super(location, field, timeZone, extractor); + DateTimeHistogramFunction(Location location, Expression field, ZoneId zoneId, DateTimeExtractor extractor) { + super(location, field, zoneId, extractor); } /** diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java index c248b50b51d..5357462fdd6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java @@ -9,10 +9,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.temporal.ChronoField; import java.util.Objects; -import java.util.TimeZone; public class DateTimeProcessor extends BaseDateTimeProcessor { @@ -46,8 +46,8 @@ public class DateTimeProcessor extends BaseDateTimeProcessor { public static final String NAME = "dt"; private final DateTimeExtractor extractor; - public DateTimeProcessor(DateTimeExtractor extractor, TimeZone timeZone) { - super(timeZone); + public DateTimeProcessor(DateTimeExtractor extractor, ZoneId zoneId) { + super(zoneId); this.extractor = extractor; } @@ -78,7 +78,7 @@ public class DateTimeProcessor extends BaseDateTimeProcessor { @Override public int hashCode() { - return Objects.hash(extractor, timeZone()); + return Objects.hash(extractor, zoneId()); } @Override @@ -88,7 +88,7 @@ public class DateTimeProcessor extends BaseDateTimeProcessor { } DateTimeProcessor other = (DateTimeProcessor) obj; return Objects.equals(extractor, other.extractor) - && Objects.equals(timeZone(), other.timeZone()); + && Objects.equals(zoneId(), other.zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java index 8d6e12544d0..b5144020e63 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDate import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the day of the week from a datetime in text format (Monday, Tuesday etc.) */ public class DayName extends NamedDateTimeFunction { - public DayName(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, NameExtractor.DAY_NAME); + public DayName(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, NameExtractor.DAY_NAME); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayName::new; } @Override protected DayName replaceChild(Expression newChild) { - return new DayName(location(), newChild, timeZone()); + return new DayName(location(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java index 3c402ef2f4a..837779888f2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the day of the month from a datetime. */ public class DayOfMonth extends DateTimeFunction { - public DayOfMonth(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.DAY_OF_MONTH); + public DayOfMonth(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.DAY_OF_MONTH); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayOfMonth::new; } @Override protected DayOfMonth replaceChild(Expression newChild) { - return new DayOfMonth(location(), newChild, timeZone()); + return new DayOfMonth(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java index 9b03ed0548a..5bc54654bdf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDat import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the day of the week from a datetime in non-ISO format. 1 is Sunday, 2 is Monday, etc. */ public class DayOfWeek extends NonIsoDateTimeFunction { - public DayOfWeek(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, NonIsoDateTimeExtractor.DAY_OF_WEEK); + public DayOfWeek(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, NonIsoDateTimeExtractor.DAY_OF_WEEK); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayOfWeek::new; } @Override protected DayOfWeek replaceChild(Expression newChild) { - return new DayOfWeek(location(), newChild, timeZone()); + return new DayOfWeek(location(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java index a6b843bd0bd..9cacb78b342 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java @@ -11,24 +11,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the day of the year from a datetime. */ public class DayOfYear extends DateTimeFunction { - public DayOfYear(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.DAY_OF_YEAR); + public DayOfYear(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.DAY_OF_YEAR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayOfYear::new; } @Override protected UnaryScalarFunction replaceChild(Expression newChild) { - return new DayOfYear(location(), newChild, timeZone()); + return new DayOfYear(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java index 193a14c0932..490ec721042 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the hour of the day from a datetime. */ public class HourOfDay extends DateTimeFunction { - public HourOfDay(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.HOUR_OF_DAY); + public HourOfDay(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.HOUR_OF_DAY); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return HourOfDay::new; } @Override protected HourOfDay replaceChild(Expression newChild) { - return new HourOfDay(location(), newChild, timeZone()); + return new HourOfDay(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java index 16a3a0098ae..ff02f6490d0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the day of the week (following the ISO standard) from a datetime. 1 is Monday, 2 is Tuesday, etc. */ public class IsoDayOfWeek extends DateTimeFunction { - public IsoDayOfWeek(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.ISO_DAY_OF_WEEK); + public IsoDayOfWeek(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.ISO_DAY_OF_WEEK); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return IsoDayOfWeek::new; } @Override protected IsoDayOfWeek replaceChild(Expression newChild) { - return new IsoDayOfWeek(location(), newChild, timeZone()); + return new IsoDayOfWeek(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java index 5e540e5b846..f50deec9fe0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the week of the year from a datetime following the ISO standard. */ public class IsoWeekOfYear extends DateTimeFunction { - public IsoWeekOfYear(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.ISO_WEEK_OF_YEAR); + public IsoWeekOfYear(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.ISO_WEEK_OF_YEAR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return IsoWeekOfYear::new; } @Override protected IsoWeekOfYear replaceChild(Expression newChild) { - return new IsoWeekOfYear(location(), newChild, timeZone()); + return new IsoWeekOfYear(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java index 25ef41a18ca..e16e0caa836 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java @@ -10,25 +10,25 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the minute of the day from a datetime. */ public class MinuteOfDay extends DateTimeFunction { - public MinuteOfDay(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.MINUTE_OF_DAY); + public MinuteOfDay(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.MINUTE_OF_DAY); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MinuteOfDay::new; } @Override protected MinuteOfDay replaceChild(Expression newChild) { - return new MinuteOfDay(location(), newChild, timeZone()); + return new MinuteOfDay(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java index 798b7007237..0a49bb042f9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Exract the minute of the hour from a datetime. */ public class MinuteOfHour extends DateTimeFunction { - public MinuteOfHour(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.MINUTE_OF_HOUR); + public MinuteOfHour(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.MINUTE_OF_HOUR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MinuteOfHour::new; } @Override protected MinuteOfHour replaceChild(Expression newChild) { - return new MinuteOfHour(location(), newChild, timeZone()); + return new MinuteOfHour(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java index 7a951281015..570a4a2ea2d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDate import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the month from a datetime in text format (January, February etc.) */ public class MonthName extends NamedDateTimeFunction { - public MonthName(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, NameExtractor.MONTH_NAME); + public MonthName(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, NameExtractor.MONTH_NAME); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MonthName::new; } @Override protected MonthName replaceChild(Expression newChild) { - return new MonthName(location(), newChild, timeZone()); + return new MonthName(location(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java index 9231987b5ad..88c025a7231 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the month of the year from a datetime. */ public class MonthOfYear extends DateTimeFunction { - public MonthOfYear(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.MONTH_OF_YEAR); + public MonthOfYear(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.MONTH_OF_YEAR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MonthOfYear::new; } @Override protected MonthOfYear replaceChild(Expression newChild) { - return new MonthOfYear(location(), newChild, timeZone()); + return new MonthOfYear(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java index 4ec42def0eb..d42c18ce88c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java @@ -14,9 +14,9 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; +import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Locale; -import java.util.TimeZone; import static java.lang.String.format; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; @@ -28,8 +28,8 @@ abstract class NamedDateTimeFunction extends BaseDateTimeFunction { private final NameExtractor nameExtractor; - NamedDateTimeFunction(Location location, Expression field, TimeZone timeZone, NameExtractor nameExtractor) { - super(location, field, timeZone); + NamedDateTimeFunction(Location location, Expression field, ZoneId zoneId, NameExtractor nameExtractor) { + super(location, field, zoneId); this.nameExtractor = nameExtractor; } @@ -45,13 +45,13 @@ abstract class NamedDateTimeFunction extends BaseDateTimeFunction { StringUtils.underscoreToLowerCamelCase(nameExtractor.name()))), paramsBuilder() .variable(field.name()) - .variable(timeZone().getID()).build(), + .variable(zoneId().getId()).build(), dataType()); } @Override protected Processor makeProcessor() { - return new NamedDateTimeProcessor(nameExtractor, timeZone()); + return new NamedDateTimeProcessor(nameExtractor, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java index a0707d2a65e..7a23b40be78 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java @@ -14,7 +14,6 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.Locale; import java.util.Objects; -import java.util.TimeZone; import java.util.function.Function; public class NamedDateTimeProcessor extends BaseDateTimeProcessor { @@ -46,8 +45,8 @@ public class NamedDateTimeProcessor extends BaseDateTimeProcessor { private final NameExtractor extractor; - public NamedDateTimeProcessor(NameExtractor extractor, TimeZone timeZone) { - super(timeZone); + public NamedDateTimeProcessor(NameExtractor extractor, ZoneId zoneId) { + super(zoneId); this.extractor = extractor; } @@ -78,7 +77,7 @@ public class NamedDateTimeProcessor extends BaseDateTimeProcessor { @Override public int hashCode() { - return Objects.hash(extractor, timeZone()); + return Objects.hash(extractor, zoneId()); } @Override @@ -88,7 +87,7 @@ public class NamedDateTimeProcessor extends BaseDateTimeProcessor { } NamedDateTimeProcessor other = (NamedDateTimeProcessor) obj; return Objects.equals(extractor, other.extractor) - && Objects.equals(timeZone(), other.timeZone()); + && Objects.equals(zoneId(), other.zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java index b6d28f16a57..82af7380d53 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java @@ -14,9 +14,9 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; +import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Locale; -import java.util.TimeZone; import static java.lang.String.format; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; @@ -28,8 +28,8 @@ abstract class NonIsoDateTimeFunction extends BaseDateTimeFunction { private final NonIsoDateTimeExtractor extractor; - NonIsoDateTimeFunction(Location location, Expression field, TimeZone timeZone, NonIsoDateTimeExtractor extractor) { - super(location, field, timeZone); + NonIsoDateTimeFunction(Location location, Expression field, ZoneId zoneId, NonIsoDateTimeExtractor extractor) { + super(location, field, zoneId); this.extractor = extractor; } @@ -45,13 +45,13 @@ abstract class NonIsoDateTimeFunction extends BaseDateTimeFunction { StringUtils.underscoreToLowerCamelCase(extractor.name()))), paramsBuilder() .variable(field.name()) - .variable(timeZone().getID()).build(), + .variable(zoneId().getId()).build(), dataType()); } @Override protected Processor makeProcessor() { - return new NonIsoDateTimeProcessor(extractor, timeZone()); + return new NonIsoDateTimeProcessor(extractor, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessor.java index e6d4d452169..714c7c86927 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessor.java @@ -60,8 +60,8 @@ public class NonIsoDateTimeProcessor extends BaseDateTimeProcessor { private final NonIsoDateTimeExtractor extractor; - public NonIsoDateTimeProcessor(NonIsoDateTimeExtractor extractor, TimeZone timeZone) { - super(timeZone); + public NonIsoDateTimeProcessor(NonIsoDateTimeExtractor extractor, ZoneId zoneId) { + super(zoneId); this.extractor = extractor; } @@ -92,7 +92,7 @@ public class NonIsoDateTimeProcessor extends BaseDateTimeProcessor { @Override public int hashCode() { - return Objects.hash(extractor, timeZone()); + return Objects.hash(extractor, zoneId()); } @Override @@ -102,7 +102,7 @@ public class NonIsoDateTimeProcessor extends BaseDateTimeProcessor { } NonIsoDateTimeProcessor other = (NonIsoDateTimeProcessor) obj; return Objects.equals(extractor, other.extractor) - && Objects.equals(timeZone(), other.timeZone()); + && Objects.equals(zoneId(), other.zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java index 4da5c94626e..63455c76ba0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java @@ -14,16 +14,16 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import org.elasticsearch.xpack.sql.type.DataType; +import java.time.ZoneId; import java.time.ZonedDateTime; -import java.util.TimeZone; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor.quarter; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public class Quarter extends BaseDateTimeFunction { - public Quarter(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone); + public Quarter(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId); } @Override @@ -36,24 +36,24 @@ public class Quarter extends BaseDateTimeFunction { return new ScriptTemplate(formatTemplate("{sql}.quarter(doc[{}].value, {})"), paramsBuilder() .variable(field.name()) - .variable(timeZone().getID()) + .variable(zoneId().getId()) .build(), dataType()); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return Quarter::new; } @Override protected Quarter replaceChild(Expression newChild) { - return new Quarter(location(), newChild, timeZone()); + return new Quarter(location(), newChild, zoneId()); } @Override protected Processor makeProcessor() { - return new QuarterProcessor(timeZone()); + return new QuarterProcessor(zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java index d2a20de84d3..7d09093d35f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java @@ -14,12 +14,11 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.Locale; import java.util.Objects; -import java.util.TimeZone; public class QuarterProcessor extends BaseDateTimeProcessor { - public QuarterProcessor(TimeZone timeZone) { - super(timeZone); + public QuarterProcessor(ZoneId zoneId) { + super(zoneId); } public QuarterProcessor(StreamInput in) throws IOException { @@ -49,7 +48,7 @@ public class QuarterProcessor extends BaseDateTimeProcessor { @Override public int hashCode() { - return Objects.hash(timeZone()); + return Objects.hash(zoneId()); } @Override @@ -58,6 +57,6 @@ public class QuarterProcessor extends BaseDateTimeProcessor { return false; } DateTimeProcessor other = (DateTimeProcessor) obj; - return Objects.equals(timeZone(), other.timeZone()); + return Objects.equals(zoneId(), other.zoneId()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java index 3702c4beb3f..c06d48ba287 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the second of the minute from a datetime. */ public class SecondOfMinute extends DateTimeFunction { - public SecondOfMinute(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.SECOND_OF_MINUTE); + public SecondOfMinute(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.SECOND_OF_MINUTE); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return SecondOfMinute::new; } @Override protected SecondOfMinute replaceChild(Expression newChild) { - return new SecondOfMinute(location(), newChild, timeZone()); + return new SecondOfMinute(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java index 1d64eec447d..a3d8a128fbc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDat import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the week of the year from a datetime following the non-ISO standard. */ public class WeekOfYear extends NonIsoDateTimeFunction { - public WeekOfYear(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, NonIsoDateTimeExtractor.WEEK_OF_YEAR); + public WeekOfYear(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, NonIsoDateTimeExtractor.WEEK_OF_YEAR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return WeekOfYear::new; } @Override protected WeekOfYear replaceChild(Expression newChild) { - return new WeekOfYear(location(), newChild, timeZone()); + return new WeekOfYear(location(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java index 0ba4c47058d..0f78cf4d78a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; import java.util.concurrent.TimeUnit; /** @@ -20,18 +20,18 @@ public class Year extends DateTimeHistogramFunction { private static long YEAR_IN_MILLIS = TimeUnit.DAYS.toMillis(1) * 365L; - public Year(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.YEAR); + public Year(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.YEAR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return Year::new; } @Override protected Year replaceChild(Expression newChild) { - return new Year(location(), newChild, timeZone()); + return new Year(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java index 3bd03986eb5..d89d8fe6efb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java @@ -51,7 +51,7 @@ public class Concat extends BinaryScalarFunction { @Override public boolean nullable() { - return left().nullable() && right().nullable(); + return false; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index b107598710c..a67da8d6efd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Bina import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InProcessor; import org.elasticsearch.xpack.sql.expression.predicate.regex.RegexProcessor.RegexOperation; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.util.DateUtils; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -164,6 +165,7 @@ public final class InternalSqlScriptUtils { // Regex // public static Boolean regex(String value, String pattern) { + // TODO: this needs to be improved to avoid creating the pattern on every call return RegexOperation.match(value, pattern); } @@ -458,4 +460,11 @@ public final class InternalSqlScriptUtils { public static String ucase(String s) { return (String) StringOperation.UCASE.apply(s); } + + // + // Casting + // + public static Object cast(Object value, String typeName) { + return DataTypeConversion.convert(value, DataType.fromTypeName(typeName)); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java index a5c8028f670..9dc3c69fd29 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java @@ -11,26 +11,24 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Like extends RegexMatch { - public Like(Location location, Expression left, LikePattern right) { - super(location, left, right); + private final LikePattern pattern; + + public Like(Location location, Expression left, LikePattern pattern) { + super(location, left, pattern.asJavaRegex()); + this.pattern = pattern; + } + + public LikePattern pattern() { + return pattern; } @Override protected NodeInfo info() { - return NodeInfo.create(this, Like::new, left(), pattern()); - } - - public LikePattern pattern() { - return (LikePattern) right(); + return NodeInfo.create(this, Like::new, field(), pattern); } @Override - protected Like replaceChildren(Expression newLeft, Expression newRight) { - return new Like(location(), newLeft, (LikePattern) newRight); - } - - @Override - protected String asString(Expression pattern) { - return ((LikePattern) pattern).asJavaRegex(); + protected Like replaceChild(Expression newLeft) { + return new Like(location(), newLeft, pattern); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java index bde8129f8e7..d07df617df9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java @@ -5,10 +5,6 @@ */ package org.elasticsearch.xpack.sql.expression.predicate.regex; -import org.elasticsearch.xpack.sql.expression.LeafExpression; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.NodeInfo; -import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.Objects; @@ -21,7 +17,7 @@ import java.util.Objects; * * To prevent conflicts with ES, the string and char must be validated to not contain '*'. */ -public class LikePattern extends LeafExpression { +public class LikePattern { private final String pattern; private final char escape; @@ -30,8 +26,7 @@ public class LikePattern extends LeafExpression { private final String wildcard; private final String indexNameWildcard; - public LikePattern(Location location, String pattern, char escape) { - super(location); + public LikePattern(String pattern, char escape) { this.pattern = pattern; this.escape = escape; // early initialization to force string validation @@ -40,11 +35,6 @@ public class LikePattern extends LeafExpression { this.indexNameWildcard = StringUtils.likeToIndexWildcard(pattern, escape); } - @Override - protected NodeInfo info() { - return NodeInfo.create(this, LikePattern::new, pattern, escape); - } - public String pattern() { return pattern; } @@ -74,16 +64,6 @@ public class LikePattern extends LeafExpression { return indexNameWildcard; } - @Override - public boolean nullable() { - return false; - } - - @Override - public DataType dataType() { - return DataType.KEYWORD; - } - @Override public int hashCode() { return Objects.hash(pattern, escape); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java index 346c3062bfa..a09586fd35f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java @@ -6,28 +6,29 @@ package org.elasticsearch.xpack.sql.expression.predicate.regex; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class RLike extends RegexMatch { - public RLike(Location location, Expression left, Literal right) { - super(location, left, right); + private final String pattern; + + public RLike(Location location, Expression left, String pattern) { + super(location, left, pattern); + this.pattern = pattern; + } + + public String pattern() { + return pattern; } @Override protected NodeInfo info() { - return NodeInfo.create(this, RLike::new, left(), (Literal) right()); + return NodeInfo.create(this, RLike::new, field(), pattern); } @Override - protected RLike replaceChildren(Expression newLeft, Expression newRight) { - return new RLike(location(), newLeft, (Literal) newRight); - } - - @Override - protected String asString(Expression pattern) { - return pattern.fold().toString(); + protected RLike replaceChild(Expression newChild) { + return new RLike(location(), newChild, pattern); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java index e1e41006492..f9390fdfa45 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java @@ -7,15 +7,19 @@ package org.elasticsearch.xpack.sql.expression.predicate.regex; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.predicate.BinaryPredicate; +import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.predicate.regex.RegexProcessor.RegexOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; -public abstract class RegexMatch extends BinaryPredicate { +public abstract class RegexMatch extends UnaryScalarFunction { - protected RegexMatch(Location location, Expression value, Expression pattern) { - super(location, value, pattern, RegexOperation.INSTANCE); + private final String pattern; + + protected RegexMatch(Location location, Expression value, String pattern) { + super(location, value); + this.pattern = pattern; } @Override @@ -23,18 +27,25 @@ public abstract class RegexMatch extends BinaryPredicate info() { - return NodeInfo.create(this, RegexPipe::new, expression(), left(), right()); - } - - @Override - protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new RegexPipe(location(), expression(), left, right); - } - - @Override - public RegexProcessor asProcessor() { - return new RegexProcessor(left().asProcessor(), right().asProcessor()); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexProcessor.java index 16f6f0a6949..7f9a2ed7623 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexProcessor.java @@ -7,66 +7,47 @@ package org.elasticsearch.xpack.sql.expression.predicate.regex; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.expression.predicate.PredicateBiFunction; import java.io.IOException; import java.util.Objects; import java.util.regex.Pattern; -public class RegexProcessor extends BinaryProcessor { +public class RegexProcessor implements Processor { - public static class RegexOperation implements PredicateBiFunction { + public static class RegexOperation { - public static final RegexOperation INSTANCE = new RegexOperation(); + public static Boolean match(Object value, Pattern pattern) { + if (pattern == null) { + return Boolean.TRUE; + } - @Override - public String name() { - return symbol(); - } - - @Override - public String symbol() { - return "REGEX"; - } - - @Override - public Boolean doApply(String value, String pattern) { - return match(value, pattern); - } - - public static Boolean match(Object value, Object pattern) { - if (value == null || pattern == null) { + if (value == null) { return null; } - Pattern p = Pattern.compile(pattern.toString()); - return p.matcher(value.toString()).matches(); + return pattern.matcher(value.toString()).matches(); + } + + public static Boolean match(Object value, String pattern) { + if (pattern == null) { + return Boolean.TRUE; + } + + if (value == null) { + return null; + } + + return Pattern.compile(pattern).matcher(value.toString()).matches(); } } public static final String NAME = "rgx"; - public RegexProcessor(Processor value, Processor pattern) { - super(value, pattern); - } + private Pattern pattern; - public RegexProcessor(StreamInput in) throws IOException { - super(in); - } - - @Override - protected Boolean doProcess(Object value, Object pattern) { - return RegexOperation.match(value, pattern); - } - - @Override - protected void checkParameter(Object param) { - if (!(param instanceof String || param instanceof Character)) { - throw new SqlIllegalArgumentException("A string/char is required; received [{}]", param); - } + public RegexProcessor(String pattern) { + this.pattern = pattern != null ? Pattern.compile(pattern) : null; } @Override @@ -74,12 +55,23 @@ public class RegexProcessor extends BinaryProcessor { return NAME; } + public RegexProcessor(StreamInput in) throws IOException { + this(in.readOptionalString()); + } + @Override - protected void doWrite(StreamOutput out) throws IOException {} + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(pattern != null ? pattern.toString() : null); + } + + @Override + public Object process(Object input) { + return RegexOperation.match(input, pattern); + } @Override public int hashCode() { - return Objects.hash(left(), right()); + return Objects.hash(pattern); } @Override @@ -93,6 +85,6 @@ public class RegexProcessor extends BinaryProcessor { } RegexProcessor other = (RegexProcessor) obj; - return Objects.equals(left(), other.left()) && Objects.equals(right(), other.right()); + return Objects.equals(pattern, other.pattern); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index cd1cb189b6a..f7d659a2933 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -232,7 +232,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder { e = new Like(loc, exp, visitPattern(pCtx.pattern())); break; case SqlBaseParser.RLIKE: - e = new RLike(loc, exp, new Literal(source(pCtx.regex), string(pCtx.regex), DataType.KEYWORD)); + e = new RLike(loc, exp, string(pCtx.regex)); break; case SqlBaseParser.NULL: // shortcut to avoid double negation later on (since there's no IsNull (missing in ES is a negated exists)) @@ -301,7 +301,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder { } } - return new LikePattern(source(ctx), pattern, escape); + return new LikePattern(pattern, escape); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 35ba50ab75a..20aad3f2f9a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -62,11 +62,12 @@ import org.elasticsearch.xpack.sql.rule.RuleExecutor; import org.elasticsearch.xpack.sql.session.EmptyExecutable; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.Check; +import org.elasticsearch.xpack.sql.util.DateUtils; +import java.time.ZoneId; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; -import java.util.TimeZone; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.sql.planner.QueryTranslator.and; @@ -77,7 +78,6 @@ import static org.elasticsearch.xpack.sql.planner.QueryTranslator.toQuery; * Folds the PhysicalPlan into a {@link Query}. */ class QueryFolder extends RuleExecutor { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); PhysicalPlan fold(PhysicalPlan plan) { return execute(plan); @@ -283,7 +283,7 @@ class QueryFolder extends RuleExecutor { if (matchingGroup != null) { if (exp instanceof Attribute || exp instanceof ScalarFunction) { Processor action = null; - TimeZone tz = DataType.DATE == exp.dataType() ? UTC : null; + ZoneId zi = DataType.DATE == exp.dataType() ? DateUtils.UTC : null; /* * special handling of dates since aggs return the typed Date object which needs * extraction instead of handling this in the scroller, the folder handles this @@ -291,9 +291,9 @@ class QueryFolder extends RuleExecutor { */ if (exp instanceof DateTimeHistogramFunction) { action = ((UnaryPipe) p).action(); - tz = ((DateTimeFunction) exp).timeZone(); + zi = ((DateTimeFunction) exp).zoneId(); } - return new AggPathInput(exp.location(), exp, new GroupByRef(matchingGroup.id(), null, tz), action); + return new AggPathInput(exp.location(), exp, new GroupByRef(matchingGroup.id(), null, zi), action); } } // or found an aggregate expression (which has to work on an attribute used for grouping) @@ -334,8 +334,8 @@ class QueryFolder extends RuleExecutor { // check if the field is a date - if so mark it as such to interpret the long as a date // UTC is used since that's what the server uses and there's no conversion applied // (like for date histograms) - TimeZone dt = DataType.DATE == child.dataType() ? UTC : null; - queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, dt)); + ZoneId zi = DataType.DATE == child.dataType() ? DateUtils.UTC : null; + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi)); } // handle histogram else if (child instanceof GroupingFunction) { @@ -358,8 +358,8 @@ class QueryFolder extends RuleExecutor { matchingGroup = groupingContext.groupFor(ne); Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(ne)); - TimeZone dt = DataType.DATE == ne.dataType() ? UTC : null; - queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, dt)); + ZoneId zi = DataType.DATE == ne.dataType() ? DateUtils.UTC : null; + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi)); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 23352af790d..af180aae90b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.sql.expression.function.grouping.Histogram; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.literal.Intervals; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; @@ -103,7 +104,6 @@ import java.util.function.Supplier; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.expression.Foldables.doubleValuesOf; -import static org.elasticsearch.xpack.sql.expression.Foldables.stringValueOf; import static org.elasticsearch.xpack.sql.expression.Foldables.valueOf; final class QueryTranslator { @@ -121,7 +121,8 @@ final class QueryTranslator { new Likes(), new StringQueries(), new Matches(), - new MultiMatches() + new MultiMatches(), + new Scalars() ); private static final List> AGG_TRANSLATORS = Arrays.asList( @@ -259,7 +260,7 @@ final class QueryTranslator { // dates are handled differently because of date histograms if (exp instanceof DateTimeHistogramFunction) { DateTimeHistogramFunction dthf = (DateTimeHistogramFunction) exp; - key = new GroupByDateHistogram(aggId, nameOf(exp), dthf.interval(), dthf.timeZone()); + key = new GroupByDateHistogram(aggId, nameOf(exp), dthf.interval(), dthf.zoneId()); } // all other scalar functions become a script else if (exp instanceof ScalarFunction) { @@ -277,9 +278,9 @@ final class QueryTranslator { long intervalAsMillis = Intervals.inMillis(h.interval()); // TODO: set timezone if (field instanceof FieldAttribute || field instanceof DateTimeHistogramFunction) { - key = new GroupByDateHistogram(aggId, nameOf(field), intervalAsMillis, h.timeZone()); + key = new GroupByDateHistogram(aggId, nameOf(field), intervalAsMillis, h.zoneId()); } else if (field instanceof Function) { - key = new GroupByDateHistogram(aggId, ((Function) field).asScript(), intervalAsMillis, h.timeZone()); + key = new GroupByDateHistogram(aggId, ((Function) field).asScript(), intervalAsMillis, h.zoneId()); } } // numeric histogram @@ -447,13 +448,13 @@ final class QueryTranslator { boolean inexact = true; String target = null; - if (e.left() instanceof FieldAttribute) { - FieldAttribute fa = (FieldAttribute) e.left(); + if (e.field() instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) e.field(); inexact = fa.isInexact(); target = nameOf(inexact ? fa : fa.exactAttribute()); } else { throw new SqlIllegalArgumentException("Scalar function ({}) not allowed (yet) as arguments for LIKE", - Expressions.name(e.left())); + Expressions.name(e.field())); } if (e instanceof Like) { @@ -462,21 +463,21 @@ final class QueryTranslator { q = new QueryStringQuery(e.location(), p.asLuceneWildcard(), target); } else { - q = new WildcardQuery(e.location(), nameOf(e.left()), p.asLuceneWildcard()); + q = new WildcardQuery(e.location(), nameOf(e.field()), p.asLuceneWildcard()); } } if (e instanceof RLike) { - String pattern = stringValueOf(e.right()); + String pattern = ((RLike) e).pattern(); if (inexact) { q = new QueryStringQuery(e.location(), "/" + pattern + "/", target); } else { - q = new RegexQuery(e.location(), nameOf(e.left()), pattern); + q = new RegexQuery(e.location(), nameOf(e.field()), pattern); } } - return q != null ? new QueryTranslation(wrapIfNested(q, e.left())) : null; + return q != null ? new QueryTranslation(wrapIfNested(q, e.field())) : null; } } @@ -529,8 +530,16 @@ final class QueryTranslator { if (onAggs) { aggFilter = new AggFilter(not.id().toString(), not.asScript()); } else { - query = handleQuery(not, not.field(), - () -> new NotQuery(not.location(), toQuery(not.field(), false).query)); + Expression e = not.field(); + Query wrappedQuery = toQuery(not.field(), false).query; + Query q = wrappedQuery instanceof ScriptQuery ? new ScriptQuery(not.location(), + not.asScript()) : new NotQuery(not.location(), wrappedQuery); + + if (e instanceof FieldAttribute) { + query = wrapIfNested(q, e); + } + + query = q; } return new QueryTranslation(query, aggFilter); @@ -547,8 +556,14 @@ final class QueryTranslator { if (onAggs) { aggFilter = new AggFilter(isNotNull.id().toString(), isNotNull.asScript()); } else { - query = handleQuery(isNotNull, isNotNull.field(), - () -> new ExistsQuery(isNotNull.location(), nameOf(isNotNull.field()))); + Query q = null; + if (isNotNull.field() instanceof FieldAttribute) { + q = new ExistsQuery(isNotNull.location(), nameOf(isNotNull.field())); + } else { + q = new ScriptQuery(isNotNull.location(), isNotNull.asScript()); + } + final Query qu = q; + query = handleQuery(isNotNull, isNotNull.field(), () -> qu); } return new QueryTranslation(query, aggFilter); @@ -565,8 +580,15 @@ final class QueryTranslator { if (onAggs) { aggFilter = new AggFilter(isNull.id().toString(), isNull.asScript()); } else { - query = handleQuery(isNull, isNull.field(), - () -> new NotQuery(isNull.location(), new ExistsQuery(isNull.location(), nameOf(isNull.field())))); + Query q = null; + if (isNull.field() instanceof FieldAttribute) { + q = new NotQuery(isNull.location(), new ExistsQuery(isNull.location(), nameOf(isNull.field()))); + } else { + q = new ScriptQuery(isNull.location(), isNull.asScript()); + } + final Query qu = q; + + query = handleQuery(isNull, isNull.field(), () -> qu); } return new QueryTranslation(query, aggFilter); @@ -678,7 +700,14 @@ final class QueryTranslator { aggFilter = new AggFilter(at.id().toString(), in.asScript()); } else { - query = handleQuery(in, ne, () -> new TermsQuery(in.location(), ne.name(), in.list())); + Query q = null; + if (in.value() instanceof FieldAttribute) { + q = new TermsQuery(in.location(), ne.name(), in.list()); + } else { + q = new ScriptQuery(in.location(), in.asScript()); + } + Query qu = q; + query = handleQuery(in, ne, () -> qu); } return new QueryTranslation(query, aggFilter); } @@ -719,6 +748,25 @@ final class QueryTranslator { } } } + + static class Scalars extends ExpressionTranslator { + + @Override + protected QueryTranslation asQuery(ScalarFunction f, boolean onAggs) { + ScriptTemplate script = f.asScript(); + + Query query = null; + AggFilter aggFilter = null; + + if (onAggs) { + aggFilter = new AggFilter(f.id().toString(), script); + } else { + query = handleQuery(f, f, () -> new ScriptQuery(f.location(), script)); + } + + return new QueryTranslation(query, aggFilter); + } + } // @@ -862,8 +910,9 @@ final class QueryTranslator { protected static Query handleQuery(ScalarFunction sf, Expression field, Supplier query) { + Query q = query.get(); if (field instanceof FieldAttribute) { - return wrapIfNested(query.get(), field); + return wrapIfNested(q, field); } return new ScriptQuery(sf.location(), sf.asScript()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index 989f94672df..cce721e78fd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -45,7 +45,7 @@ public class TransportSqlClearCursorAction extends HandledTransportAction listener) { Cursor cursor = Cursors.decodeFromString(request.getCursor()); planExecutor.cleanCursor( - new Configuration(DateUtils.UTC_TZ, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, + new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, request.mode(), "", ""), cursor, ActionListener.wrap( success -> listener.onResponse(new SqlClearCursorResponse(success)), listener::onFailure)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 5a794572b90..738cd77af1e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -71,7 +71,7 @@ public class TransportSqlQueryAction extends HandledTransportAction createSourceBuilder() { return new DateHistogramValuesSourceBuilder(id()) .interval(interval) - .timeZone(DateTimeZone.forTimeZone(timeZone)); + .timeZone(DateUtils.zoneIdToDateTimeZone(zoneId)); } @Override protected GroupByKey copy(String id, String fieldName, ScriptTemplate script, Direction direction) { - return new GroupByDateHistogram(id, fieldName, script, direction, interval, timeZone); + return new GroupByDateHistogram(id, fieldName, script, direction, interval, zoneId); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), interval, timeZone); + return Objects.hash(super.hashCode(), interval, zoneId); } @Override @@ -60,7 +60,7 @@ public class GroupByDateHistogram extends GroupByKey { if (super.equals(obj)) { GroupByDateHistogram other = (GroupByDateHistogram) obj; return Objects.equals(interval, other.interval) - && Objects.equals(timeZone, other.timeZone); + && Objects.equals(zoneId, other.zoneId); } return false; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupByRef.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupByRef.java index 66c05a1339d..95ab6b3b410 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupByRef.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupByRef.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.querydsl.container; import org.elasticsearch.xpack.sql.execution.search.AggRef; -import java.util.TimeZone; +import java.time.ZoneId; /** * Reference to a GROUP BY agg (typically this gets translated to a composite key). @@ -20,12 +20,12 @@ public class GroupByRef extends AggRef { private final String key; private final Property property; - private final TimeZone timeZone; + private final ZoneId zoneId; - public GroupByRef(String key, Property property, TimeZone timeZone) { + public GroupByRef(String key, Property property, ZoneId zoneId) { this.key = key; this.property = property == null ? Property.VALUE : property; - this.timeZone = timeZone; + this.zoneId = zoneId; } public String key() { @@ -36,8 +36,8 @@ public class GroupByRef extends AggRef { return property; } - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java index 4e2965809f2..6eb6ad19ad4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java @@ -10,12 +10,12 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.sql.proto.Mode; +import java.time.ZoneId; import java.time.ZonedDateTime; -import java.util.TimeZone; // Typed object holding properties for a given query public class Configuration { - private final TimeZone timeZone; + private final ZoneId zoneId; private final int pageSize; private final TimeValue requestTimeout; private final TimeValue pageTimeout; @@ -27,9 +27,9 @@ public class Configuration { @Nullable private QueryBuilder filter; - public Configuration(TimeZone tz, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter, Mode mode, + public Configuration(ZoneId zi, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter, Mode mode, String username, String clusterName) { - this.timeZone = tz; + this.zoneId = zi.normalized(); this.pageSize = pageSize; this.requestTimeout = requestTimeout; this.pageTimeout = pageTimeout; @@ -37,11 +37,11 @@ public class Configuration { this.mode = mode == null ? Mode.PLAIN : mode; this.username = username; this.clusterName = clusterName; - this.now = ZonedDateTime.now(timeZone.toZoneId().normalized()); + this.now = ZonedDateTime.now(zoneId); } - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } public int pageSize() { @@ -74,4 +74,4 @@ public class Configuration { public ZonedDateTime now() { return now; } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java index 8e774bf6a4f..6aa56914a63 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java @@ -16,15 +16,13 @@ import java.time.LocalDateTime; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; -import java.util.TimeZone; public class DateUtils { // TODO: do we have a java.time based parser we can use instead? private static final DateTimeFormatter UTC_DATE_FORMATTER = ISODateTimeFormat.dateOptionalTimeParser().withZoneUTC(); - public static TimeZone UTC_TZ = TimeZone.getTimeZone("UTC"); - public static ZoneId UTC_ZI = ZoneId.of("Z"); + public static ZoneId UTC = ZoneId.of("Z"); private DateUtils() {} @@ -33,7 +31,7 @@ public class DateUtils { * Creates a date from the millis since epoch (thus the time-zone is UTC). */ public static ZonedDateTime of(long millis) { - return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC_ZI); + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC); } /** diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index b5b19004eee..4e9fc1475e3 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -130,4 +130,9 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS String space(Number) String substring(String, Number, Number) String ucase(String) + +# +# Casting +# + def cast(Object, String) } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java index d0c1c06239d..cd6fa79cb55 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java @@ -15,7 +15,7 @@ public class TestUtils { private TestUtils() {} - public static final Configuration TEST_CFG = new Configuration(DateUtils.UTC_TZ, Protocol.FETCH_SIZE, + public static final Configuration TEST_CFG = new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, Mode.PLAIN, null, null); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 18f544767fa..fcb46d7f8d4 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -212,7 +212,7 @@ public class VerifierErrorMessagesTests extends ESTestCase { } public void testGroupByOrderByScalarOverNonGrouped() { - assertEquals("1:50: Cannot order by non-grouped column [YEAR(date [UTC])], expected [text]", + assertEquals("1:50: Cannot order by non-grouped column [YEAR(date [Z])], expected [text]", error("SELECT MAX(int) FROM test GROUP BY text ORDER BY YEAR(date)")); } @@ -222,7 +222,7 @@ public class VerifierErrorMessagesTests extends ESTestCase { } public void testGroupByOrderByScalarOverNonGrouped_WithHaving() { - assertEquals("1:71: Cannot order by non-grouped column [YEAR(date [UTC])], expected [text]", + assertEquals("1:71: Cannot order by non-grouped column [YEAR(date [Z])], expected [text]", error("SELECT MAX(int) FROM test GROUP BY text HAVING MAX(int) > 10 ORDER BY YEAR(date)")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java index c0125a365aa..135ae74dd20 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef.Property; import org.elasticsearch.xpack.sql.util.DateUtils; import java.io.IOException; -import java.util.TimeZone; +import java.time.ZoneId; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; @@ -24,7 +24,7 @@ import static java.util.Collections.singletonMap; public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase { public static CompositeKeyExtractor randomCompositeKeyExtractor() { - return new CompositeKeyExtractor(randomAlphaOfLength(16), randomFrom(asList(Property.values())), randomSafeTimeZone()); + return new CompositeKeyExtractor(randomAlphaOfLength(16), randomFrom(asList(Property.values())), randomSafeZone()); } @Override @@ -39,13 +39,13 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase< @Override protected CompositeKeyExtractor mutateInstance(CompositeKeyExtractor instance) throws IOException { - return new CompositeKeyExtractor(instance.key() + "mutated", instance.property(), instance.timeZone()); + return new CompositeKeyExtractor(instance.key() + "mutated", instance.property(), instance.zoneId()); } public void testExtractBucketCount() { Bucket bucket = new TestBucket(emptyMap(), randomLong(), new Aggregations(emptyList())); CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.COUNT, - randomTimeZone()); + randomZone()); assertEquals(bucket.getDocCount(), extractor.extract(bucket)); } @@ -58,15 +58,15 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase< } public void testExtractDate() { - CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomSafeTimeZone()); + CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomSafeZone()); long millis = System.currentTimeMillis(); Bucket bucket = new TestBucket(singletonMap(extractor.key(), millis), randomLong(), new Aggregations(emptyList())); - assertEquals(DateUtils.of(millis, extractor.timeZone().toZoneId()), extractor.extract(bucket)); + assertEquals(DateUtils.of(millis, extractor.zoneId()), extractor.extract(bucket)); } public void testExtractIncorrectDateKey() { - CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomTimeZone()); + CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomZone()); Object value = new Object(); Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), new Aggregations(emptyList())); @@ -79,7 +79,7 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase< * back to DateTimeZone which we currently still need to do internally, * e.g. in bwc serialization and in the extract() method */ - private static TimeZone randomSafeTimeZone() { - return randomValueOtherThanMany(tz -> tz.getID().startsWith("SystemV"), () -> randomTimeZone()); + private static ZoneId randomSafeZone() { + return randomValueOtherThanMany(zi -> zi.getId().startsWith("SystemV"), () -> randomZone()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java index cbd2bf8bfde..a23fbff4a99 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java @@ -20,9 +20,9 @@ import org.elasticsearch.xpack.sql.tree.LocationTests; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; +import java.time.ZoneId; import java.util.Arrays; import java.util.List; -import java.util.TimeZone; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.sql.expression.function.FunctionRegistry.def; @@ -104,10 +104,10 @@ public class FunctionRegistryTests extends ESTestCase { public void testDateTimeFunction() { boolean urIsExtract = randomBoolean(); UnresolvedFunction ur = uf(urIsExtract ? EXTRACT : STANDARD, mock(Expression.class)); - TimeZone providedTimeZone = randomTimeZone(); + ZoneId providedTimeZone = randomZone().normalized(); Configuration providedConfiguration = randomConfiguration(providedTimeZone); - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e, TimeZone tz) -> { - assertEquals(providedTimeZone, tz); + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e, ZoneId zi) -> { + assertEquals(providedTimeZone, zi); assertSame(e, ur.children().get(0)); return new DummyFunction(l); }, "DUMMY_FUNCTION")); @@ -232,7 +232,7 @@ public class FunctionRegistryTests extends ESTestCase { } private Configuration randomConfiguration() { - return new Configuration(randomTimeZone(), + return new Configuration(randomZone(), randomIntBetween(0, 1000), new TimeValue(randomNonNegativeLong()), new TimeValue(randomNonNegativeLong()), @@ -242,8 +242,8 @@ public class FunctionRegistryTests extends ESTestCase { randomAlphaOfLength(10)); } - private Configuration randomConfiguration(TimeZone providedTimeZone) { - return new Configuration(providedTimeZone, + private Configuration randomConfiguration(ZoneId providedZoneId) { + return new Configuration(providedZoneId, randomIntBetween(0, 1000), new TimeValue(randomNonNegativeLong()), new TimeValue(randomNonNegativeLong()), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java index e61690decdf..de2fc69a263 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java @@ -19,8 +19,7 @@ import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.TypesTests; - -import java.util.TimeZone; +import org.elasticsearch.xpack.sql.util.DateUtils; public class DatabaseFunctionTests extends ESTestCase { @@ -29,7 +28,7 @@ public class DatabaseFunctionTests extends ESTestCase { SqlParser parser = new SqlParser(); EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-basic.json", true)); Analyzer analyzer = new Analyzer( - new Configuration(TimeZone.getTimeZone("UTC"), Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, + new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, randomFrom(Mode.values()), null, clusterName), new FunctionRegistry(), IndexResolution.valid(test), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java index 047c2a01842..7b1e86af5d5 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java @@ -19,8 +19,7 @@ import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.TypesTests; - -import java.util.TimeZone; +import org.elasticsearch.xpack.sql.util.DateUtils; public class UserFunctionTests extends ESTestCase { @@ -28,7 +27,7 @@ public class UserFunctionTests extends ESTestCase { SqlParser parser = new SqlParser(); EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-basic.json", true)); Analyzer analyzer = new Analyzer( - new Configuration(TimeZone.getTimeZone("UTC"), Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, + new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, randomFrom(Mode.values()), null, randomAlphaOfLengthBetween(1, 15)), new FunctionRegistry(), IndexResolution.valid(test), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java index 30c5fa6cb4e..03f9c949d29 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java @@ -10,12 +10,11 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; import java.io.IOException; -import java.util.TimeZone; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; public class DateTimeProcessorTests extends AbstractWireSerializingTestCase { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); public static DateTimeProcessor randomDateTimeProcessor() { return new DateTimeProcessor(randomFrom(DateTimeExtractor.values()), UTC); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java index 305fa528e1f..164fe1fe931 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java @@ -20,7 +20,7 @@ public class DateTimeTestUtils { public static ZonedDateTime dateTime(int year, int month, int day, int hour, int minute) { DateTime dateTime = new DateTime(year, month, day, hour, minute, DateTimeZone.UTC); - ZonedDateTime zdt = ZonedDateTime.of(year, month, day, hour, minute, 0, 0, DateUtils.UTC_ZI); + ZonedDateTime zdt = ZonedDateTime.of(year, month, day, hour, minute, 0, 0, DateUtils.UTC); assertEquals(dateTime.getMillis() / 1000, zdt.toEpochSecond()); return zdt; } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java index c134446a2c3..6bd4a8fe1ba 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java @@ -9,24 +9,24 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.type.DataType; -import java.util.TimeZone; +import java.time.ZoneId; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; public class DayOfYearTests extends ESTestCase { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); public void testAsColumnProcessor() { assertEquals(1, extract(dateTime(0), UTC)); - assertEquals(1, extract(dateTime(0), TimeZone.getTimeZone("GMT+01:00"))); - assertEquals(365, extract(dateTime(0), TimeZone.getTimeZone("GMT-01:00"))); + assertEquals(1, extract(dateTime(0), ZoneId.of("GMT+01:00"))); + assertEquals(365, extract(dateTime(0), ZoneId.of("GMT-01:00"))); } - private Object extract(Object value, TimeZone timeZone) { - return build(value, timeZone).asPipe().asProcessor().process(value); + private Object extract(Object value, ZoneId zoneId) { + return build(value, zoneId).asPipe().asProcessor().process(value); } - private DayOfYear build(Object value, TimeZone timeZone) { - return new DayOfYear(null, new Literal(null, value, DataType.DATE), timeZone); + private DayOfYear build(Object value, ZoneId zoneId) { + return new DayOfYear(null, new Literal(null, value, DataType.DATE), zoneId); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java index 379cf5f7e09..3531152c69b 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java @@ -13,14 +13,13 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDate import org.junit.Assume; import java.io.IOException; -import java.util.TimeZone; +import java.time.ZoneId; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; public class NamedDateTimeProcessorTests extends AbstractWireSerializingTestCase { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); - public static NamedDateTimeProcessor randomNamedDateTimeProcessor() { return new NamedDateTimeProcessor(randomFrom(NameExtractor.values()), UTC); } @@ -56,7 +55,7 @@ public class NamedDateTimeProcessorTests extends AbstractWireSerializingTestCase public void testValidDayNamesWithNonUTCTimeZone() { assumeJava9PlusAndCompatLocaleProviderSetting(); - NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.DAY_NAME, TimeZone.getTimeZone("GMT-10:00")); + NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.DAY_NAME, ZoneId.of("GMT-10:00")); assertEquals("Wednesday", proc.process(dateTime(0))); assertEquals("Friday", proc.process(dateTime(-64164233612338L))); assertEquals("Monday", proc.process(dateTime(64164233612338L))); @@ -83,7 +82,7 @@ public class NamedDateTimeProcessorTests extends AbstractWireSerializingTestCase public void testValidMonthNamesWithNonUTCTimeZone() { assumeJava9PlusAndCompatLocaleProviderSetting(); - NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.MONTH_NAME, TimeZone.getTimeZone("GMT-3:00")); + NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.MONTH_NAME, ZoneId.of("GMT-03:00")); assertEquals("December", proc.process(dateTime(0))); assertEquals("August", proc.process(dateTime(-64165813612338L))); // GMT: Tuesday, September 1, -0064 2:53:07.662 AM assertEquals("April", proc.process(dateTime(64164233612338L))); // GMT: Monday, April 14, 4003 2:13:32.338 PM diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessorTests.java index 23cffe514b9..6fb007e4321 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessorTests.java @@ -10,13 +10,13 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor; import java.io.IOException; -import java.util.TimeZone; +import java.time.ZoneId; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; public class NonIsoDateTimeProcessorTests extends AbstractWireSerializingTestCase { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); public static NonIsoDateTimeProcessor randomNonISODateTimeProcessor() { return new NonIsoDateTimeProcessor(randomFrom(NonIsoDateTimeExtractor.values()), UTC); @@ -52,7 +52,7 @@ public class NonIsoDateTimeProcessorTests extends AbstractWireSerializingTestCas } public void testNonISOWeekOfYearInNonUTCTimeZone() { - NonIsoDateTimeProcessor proc = new NonIsoDateTimeProcessor(NonIsoDateTimeExtractor.WEEK_OF_YEAR, TimeZone.getTimeZone("GMT-10:00")); + NonIsoDateTimeProcessor proc = new NonIsoDateTimeProcessor(NonIsoDateTimeExtractor.WEEK_OF_YEAR, ZoneId.of("GMT-10:00")); assertEquals(2, proc.process(dateTime(568372930000L))); assertEquals(5, proc.process(dateTime(981278530000L))); assertEquals(7, proc.process(dateTime(224241730000L))); @@ -78,7 +78,7 @@ public class NonIsoDateTimeProcessorTests extends AbstractWireSerializingTestCas } public void testNonISODayOfWeekInNonUTCTimeZone() { - NonIsoDateTimeProcessor proc = new NonIsoDateTimeProcessor(NonIsoDateTimeExtractor.DAY_OF_WEEK, TimeZone.getTimeZone("GMT-10:00")); + NonIsoDateTimeProcessor proc = new NonIsoDateTimeProcessor(NonIsoDateTimeExtractor.DAY_OF_WEEK, ZoneId.of("GMT-10:00")); assertEquals(2, proc.process(dateTime(568372930000L))); assertEquals(7, proc.process(dateTime(981278530000L))); assertEquals(2, proc.process(dateTime(224241730000L))); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java index 29e5d31db21..353fe0834a3 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java @@ -8,14 +8,13 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.test.ESTestCase; -import java.util.TimeZone; +import java.time.ZoneId; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; public class QuarterProcessorTests extends ESTestCase { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); - public void testQuarterWithUTCTimezone() { QuarterProcessor proc = new QuarterProcessor(UTC); @@ -30,12 +29,12 @@ public class QuarterProcessorTests extends ESTestCase { } public void testValidDayNamesWithNonUTCTimeZone() { - QuarterProcessor proc = new QuarterProcessor(TimeZone.getTimeZone("GMT-10:00")); + QuarterProcessor proc = new QuarterProcessor(ZoneId.of("GMT-10:00")); assertEquals(4, proc.process(dateTime(0L))); assertEquals(4, proc.process(dateTime(-5400, 1, 1, 5, 0))); assertEquals(1, proc.process(dateTime(30, 4, 1, 9, 59))); - proc = new QuarterProcessor(TimeZone.getTimeZone("GMT+10:00")); + proc = new QuarterProcessor(ZoneId.of("GMT+10:00")); assertEquals(4, proc.process(dateTime(10902, 9, 30, 14, 1))); assertEquals(3, proc.process(dateTime(10902, 9, 30, 13, 59))); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java index 748718d0a3a..2618392a067 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java @@ -78,7 +78,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testAddYearMonthIntervalToDate() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); Literal r = interval(t, INTERVAL_HOUR); @@ -87,7 +87,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testAddDayTimeIntervalToDate() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); @@ -96,7 +96,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testAddDayTimeIntervalToDateReverse() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); @@ -125,7 +125,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testSubYearMonthIntervalToDate() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); Literal r = interval(t, INTERVAL_HOUR); @@ -134,7 +134,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testSubYearMonthIntervalToDateIllegal() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); Literal r = interval(t, INTERVAL_HOUR); @@ -149,7 +149,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testSubDayTimeIntervalToDate() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 514c36ddf72..2412342c69c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -23,8 +23,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfYear; -import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.IsoWeekOfYear; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Year; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ACos; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ASin; @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.sql.expression.function.scalar.math.E; import org.elasticsearch.xpack.sql.expression.function.scalar.math.Floor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Ascii; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Repeat; import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.sql.expression.predicate.Range; @@ -87,11 +88,11 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.util.CollectionUtils; +import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.TimeZone; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; @@ -99,6 +100,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.expression.Literal.NULL; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; import static org.hamcrest.Matchers.contains; public class OptimizerTests extends ESTestCase { @@ -320,14 +322,13 @@ public class OptimizerTests extends ESTestCase { public void testConstantFoldingLikes() { assertEquals(Literal.TRUE, - new ConstantFolding().rule(new Like(EMPTY, Literal.of(EMPTY, "test_emp"), new LikePattern(EMPTY, "test%", (char) 0))) + new ConstantFolding().rule(new Like(EMPTY, Literal.of(EMPTY, "test_emp"), new LikePattern("test%", (char) 0))) .canonical()); assertEquals(Literal.TRUE, - new ConstantFolding().rule(new RLike(EMPTY, Literal.of(EMPTY, "test_emp"), Literal.of(EMPTY, "test.emp"))).canonical()); + new ConstantFolding().rule(new RLike(EMPTY, Literal.of(EMPTY, "test_emp"), "test.emp")).canonical()); } public void testConstantFoldingDatetime() { - final TimeZone UTC = TimeZone.getTimeZone("UTC"); Expression cast = new Cast(EMPTY, Literal.of(EMPTY, "2018-01-19T10:23:27Z"), DataType.DATE); assertEquals(2018, foldFunction(new Year(EMPTY, cast, UTC))); assertEquals(1, foldFunction(new MonthOfYear(EMPTY, cast, UTC))); @@ -407,7 +408,7 @@ public class OptimizerTests extends ESTestCase { public void testGenericNullableExpression() { FoldNull rule = new FoldNull(); // date-time - assertNullLiteral(rule.rule(new DayName(EMPTY, Literal.NULL, randomTimeZone()))); + assertNullLiteral(rule.rule(new DayName(EMPTY, Literal.NULL, randomZone()))); // math function assertNullLiteral(rule.rule(new Cos(EMPTY, Literal.NULL))); // string function @@ -418,7 +419,7 @@ public class OptimizerTests extends ESTestCase { // comparison assertNullLiteral(rule.rule(new GreaterThan(EMPTY, getFieldAttribute(), Literal.NULL))); // regex - assertNullLiteral(rule.rule(new RLike(EMPTY, getFieldAttribute(), Literal.NULL))); + assertNullLiteral(rule.rule(new RLike(EMPTY, Literal.NULL, "123"))); } public void testSimplifyCoalesceNulls() { @@ -520,6 +521,13 @@ public class OptimizerTests extends ESTestCase { assertEquals(ONE, e.children().get(0)); assertEquals(TWO, e.children().get(1)); } + + public void testConcatFoldingIsNotNull() { + FoldNull foldNull = new FoldNull(); + assertEquals(1, foldNull.rule(new Concat(EMPTY, Literal.NULL, ONE)).fold()); + assertEquals(1, foldNull.rule(new Concat(EMPTY, ONE, Literal.NULL)).fold()); + assertEquals(StringUtils.EMPTY, foldNull.rule(new Concat(EMPTY, Literal.NULL, Literal.NULL)).fold()); + } // // Logical simplifications diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java index 6a6a1e2dd8e..bb85921369a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java @@ -240,10 +240,10 @@ public class QueryFolderTests extends ESTestCase { assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec ee = (EsQueryExec) p; assertThat(ee.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), - endsWith("{\"script\":{" + - "\"source\":\"InternalSqlScriptUtils.docValue(doc,params.v0)\",\"lang\":\"painless\"," + - "\"params\":{\"v0\":\"keyword\"}},\"missing_bucket\":true," + - "\"value_type\":\"ip\",\"order\":\"asc\"}}}]}}}")); + endsWith("{\"script\":{\"source\":\"InternalSqlScriptUtils.cast(" + + "InternalSqlScriptUtils.docValue(doc,params.v0),params.v1)\"," + + "\"lang\":\"painless\",\"params\":{\"v0\":\"keyword\",\"v1\":\"IP\"}}," + + "\"missing_bucket\":true,\"value_type\":\"ip\",\"order\":\"asc\"}}}]}}}")); assertEquals(2, ee.output().size()); assertThat(ee.output().get(0).toString(), startsWith("COUNT(1){a->")); assertThat(ee.output().get(1).toString(), startsWith("a{s->")); @@ -263,4 +263,14 @@ public class QueryFolderTests extends ESTestCase { assertThat(ee.output().get(0).toString(), startsWith("COUNT(1){a->")); assertThat(ee.output().get(1).toString(), startsWith("a{s->")); } + + public void testConcatIsNotFoldedForNull() { + PhysicalPlan p = plan("SELECT keyword FROM test WHERE CONCAT(keyword, null) IS NULL"); + assertEquals(LocalExec.class, p.getClass()); + LocalExec le = (LocalExec) p; + assertEquals(EmptyExecutable.class, le.executable().getClass()); + EmptyExecutable ee = (EmptyExecutable) le.executable(); + assertEquals(1, ee.output().size()); + assertThat(ee.output().get(0).toString(), startsWith("keyword{f}#")); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java index 963498bb9b6..cc91cdf6eab 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.sql.expression.predicate.conditional.IfNull; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.FullTextPredicate; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InPipe; +import org.elasticsearch.xpack.sql.expression.predicate.regex.Like; import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.tree.NodeTests.ChildrenAreAProperty; import org.elasticsearch.xpack.sql.tree.NodeTests.Dummy; @@ -449,14 +450,12 @@ public class NodeSubclassTests> extends ESTestCas } return b.toString(); } - } else if (toBuildClass == LikePattern.class) { - /* - * The pattern and escape character have to be valid together - * so we pick an escape character that isn't used - */ - if (argClass == char.class) { - return randomFrom('\\', '|', '/', '`'); + } else if (toBuildClass == Like.class) { + + if (argClass == LikePattern.class) { + return new LikePattern(randomAlphaOfLength(16), randomFrom('\\', '|', '/', '`')); } + } else if (toBuildClass == Histogram.class) { if (argClass == Expression.class) { return LiteralTests.randomLiteral(); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index 064014a321d..ffe68e1765f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -111,7 +111,7 @@ public class DataTypeConversionTests extends ESTestCase { assertEquals(dateTime(18000000L), conversion.convert("1970-01-01T00:00:00-05:00")); // double check back and forth conversion - ZonedDateTime dt = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime dt = ZonedDateTime.now(DateUtils.UTC); Conversion forward = conversionFor(DATE, KEYWORD); Conversion back = conversionFor(KEYWORD, DATE); assertEquals(dt, back.convert(forward.convert(dt))); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml index 43f25a11db0..81389ac8524 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml @@ -5,7 +5,7 @@ setup: - do: cluster.health: - wait_for_status: yellow + wait_for_status: yellow - do: security.put_user: @@ -79,7 +79,93 @@ teardown: body: token: $token - - match: { created: true } + - match: { created: true} + - match: { invalidated_tokens: 1 } + - match: { previously_invalidated_tokens: 0 } + - match: { error_count: 0 } + + - do: + catch: unauthorized + headers: + Authorization: Bearer ${token} + security.authenticate: {} + +--- +"Test invalidate user's tokens": + + - do: + security.get_token: + body: + grant_type: "password" + username: "token_user" + password: "x-pack-test-password" + + - match: { type: "Bearer" } + - is_true: access_token + - set: { access_token: token } + - match: { expires_in: 1200 } + - is_false: scope + + - do: + headers: + Authorization: Bearer ${token} + security.authenticate: {} + + - match: { username: "token_user" } + - match: { roles.0: "superuser" } + - match: { full_name: "Token User" } + + - do: + security.invalidate_token: + body: + username: "token_user" + + - match: { created: true} + - match: { invalidated_tokens: 2 } + - match: { previously_invalidated_tokens: 0 } + - match: { error_count: 0 } + + - do: + catch: unauthorized + headers: + Authorization: Bearer ${token} + security.authenticate: {} + + +--- +"Test invalidate realm's tokens": + + - do: + security.get_token: + body: + grant_type: "password" + username: "token_user" + password: "x-pack-test-password" + + - match: { type: "Bearer" } + - is_true: access_token + - set: { access_token: token } + - match: { expires_in: 1200 } + - is_false: scope + + - do: + headers: + Authorization: Bearer ${token} + security.authenticate: {} + + - match: { username: "token_user" } + - match: { roles.0: "superuser" } + - match: { full_name: "Token User" } + + - do: + security.invalidate_token: + body: + realm_name: "default_native" + + - match: { created: true} + - match: { invalidated_tokens: 2 } + - match: { previously_invalidated_tokens: 0 } + - match: { error_count: 0 } - do: catch: unauthorized diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java index f62de14b931..c2a079e519f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; +import org.elasticsearch.common.util.LazyInitializable; import java.io.IOException; import java.io.InputStream; @@ -35,8 +36,8 @@ public abstract class NotificationService { private final Settings bootSettings; private final List> pluginSecureSettings; // all are guarded by this - private volatile Map accounts; - private volatile Account defaultAccount; + private volatile Map> accounts; + private volatile LazyInitializable defaultAccount; // cached cluster setting, required when recreating the notification clients // using the new "reloaded" secure settings private volatile Settings cachedClusterSettings; @@ -59,7 +60,7 @@ public abstract class NotificationService { this.pluginSecureSettings = pluginSecureSettings; } - private synchronized void clusterSettingsConsumer(Settings settings) { + protected synchronized void clusterSettingsConsumer(Settings settings) { // update cached cluster settings this.cachedClusterSettings = settings; // use these new dynamic cluster settings together with the previously cached @@ -102,13 +103,13 @@ public abstract class NotificationService { public Account getAccount(String name) { // note this is not final since we mock it in tests and that causes // trouble since final methods can't be mocked... - final Map accounts; - final Account defaultAccount; + final Map> accounts; + final LazyInitializable defaultAccount; synchronized (this) { // must read under sync block otherwise it might be inconsistent accounts = this.accounts; defaultAccount = this.defaultAccount; } - Account theAccount = accounts.getOrDefault(name, defaultAccount); + LazyInitializable theAccount = accounts.getOrDefault(name, defaultAccount); if (theAccount == null && name == null) { throw new IllegalArgumentException("no accounts of type [" + type + "] configured. " + "Please set up an account using the [xpack.notification." + type +"] settings"); @@ -116,7 +117,7 @@ public abstract class NotificationService { if (theAccount == null) { throw new IllegalArgumentException("no account found for name: [" + name + "]"); } - return theAccount; + return theAccount.getOrCompute(); } private String getNotificationsAccountPrefix() { @@ -124,27 +125,27 @@ public abstract class NotificationService { } private Set getAccountNames(Settings settings) { - // secure settings are not responsible for the client names - final Settings noSecureSettings = Settings.builder().put(settings, false).build(); - return noSecureSettings.getByPrefix(getNotificationsAccountPrefix()).names(); + return settings.getByPrefix(getNotificationsAccountPrefix()).names(); } private @Nullable String getDefaultAccountName(Settings settings) { return settings.get("xpack.notification." + type + ".default_account"); } - private Map createAccounts(Settings settings, Set accountNames, + private Map> createAccounts(Settings settings, Set accountNames, BiFunction accountFactory) { - final Map accounts = new HashMap<>(); + final Map> accounts = new HashMap<>(); for (final String accountName : accountNames) { final Settings accountSettings = settings.getAsSettings(getNotificationsAccountPrefix() + accountName); - final Account account = accountFactory.apply(accountName, accountSettings); - accounts.put(accountName, account); + accounts.put(accountName, new LazyInitializable<>(() -> { + return accountFactory.apply(accountName, accountSettings); + })); } return Collections.unmodifiableMap(accounts); } - private @Nullable Account findDefaultAccountOrNull(Settings settings, Map accounts) { + private @Nullable LazyInitializable findDefaultAccountOrNull(Settings settings, + Map> accounts) { final String defaultAccountName = getDefaultAccountName(settings); if (defaultAccountName == null) { if (accounts.isEmpty()) { @@ -153,7 +154,7 @@ public abstract class NotificationService { return accounts.values().iterator().next(); } } else { - final Account account = accounts.get(defaultAccountName); + final LazyInitializable account = accounts.get(defaultAccountName); if (account == null) { throw new SettingsException("could not find default account [" + defaultAccountName + "]"); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java index 13761948adc..287b3976dea 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ObjectPath; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -53,7 +54,6 @@ import org.elasticsearch.xpack.core.watcher.execution.Wid; import org.elasticsearch.xpack.core.watcher.history.WatchRecord; import org.elasticsearch.xpack.core.watcher.input.ExecutableInput; import org.elasticsearch.xpack.core.watcher.input.Input; -import org.elasticsearch.common.xcontent.ObjectPath; import org.elasticsearch.xpack.core.watcher.transform.ExecutableTransform; import org.elasticsearch.xpack.core.watcher.transform.Transform; import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; @@ -88,6 +88,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import static java.util.Arrays.asList; import static java.util.Collections.singletonMap; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -1148,7 +1149,8 @@ public class ExecutionServiceTests extends ESTestCase { if (request.id().equals(id)) { listener.onResponse(response); } else { - GetResult notFoundResult = new GetResult(request.index(), request.type(), request.id(), -1, false, null, null); + GetResult notFoundResult = + new GetResult(request.index(), request.type(), request.id(), UNASSIGNED_SEQ_NO, 0, -1, false, null, null); listener.onResponse(new GetResponse(notFoundResult)); } return null; @@ -1162,7 +1164,8 @@ public class ExecutionServiceTests extends ESTestCase { if (request.id().equals(id)) { listener.onFailure(e); } else { - GetResult notFoundResult = new GetResult(request.index(), request.type(), request.id(), -1, false, null, null); + GetResult notFoundResult = + new GetResult(request.index(), request.type(), request.id(), UNASSIGNED_SEQ_NO, 0, -1, false, null, null); listener.onResponse(new GetResponse(notFoundResult)); } return null; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java index 184ff56c213..efbefdd6408 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java @@ -5,12 +5,27 @@ */ package org.elasticsearch.xpack.watcher.notification; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureSettings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.watcher.notification.NotificationService; +import java.io.IOException; +import java.io.InputStream; +import java.security.GeneralSecurityException; +import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.is; @@ -25,6 +40,7 @@ public class NotificationServiceTests extends ESTestCase { assertThat(service.getAccount(accountName), is(accountName)); // single account, this will also be the default assertThat(service.getAccount("non-existing"), is(accountName)); + assertThat(service.getAccount(null), is(accountName)); } public void testMultipleAccountsWithExistingDefault() { @@ -80,16 +96,160 @@ public class NotificationServiceTests extends ESTestCase { is("no accounts of type [test] configured. Please set up an account using the [xpack.notification.test] settings")); } + public void testAccountWithSecureSettings() throws Exception { + final Setting secureSetting1 = SecureSetting.secureString("xpack.notification.test.account.secure_only", null); + final Setting secureSetting2 = SecureSetting.secureString("xpack.notification.test.account.mixed.secure", null); + final Map secureSettingsMap = new HashMap<>(); + secureSettingsMap.put(secureSetting1.getKey(), "secure_only".toCharArray()); + secureSettingsMap.put(secureSetting2.getKey(), "mixed_secure".toCharArray()); + Settings settings = Settings.builder() + .put("xpack.notification.test.account.unsecure_only", "bar") + .put("xpack.notification.test.account.mixed.unsecure", "mixed_unsecure") + .setSecureSettings(secureSettingsFromMap(secureSettingsMap)) + .build(); + TestNotificationService service = new TestNotificationService(settings, Arrays.asList(secureSetting1, secureSetting2)); + assertThat(service.getAccount("secure_only"), is("secure_only")); + assertThat(service.getAccount("unsecure_only"), is("unsecure_only")); + assertThat(service.getAccount("mixed"), is("mixed")); + assertThat(service.getAccount(null), anyOf(is("secure_only"), is("unsecure_only"), is("mixed"))); + } + + public void testAccountCreationCached() { + String accountName = randomAlphaOfLength(10); + Settings settings = Settings.builder().put("xpack.notification.test.account." + accountName, "bar").build(); + final AtomicInteger validationInvocationCount = new AtomicInteger(0); + + TestNotificationService service = new TestNotificationService(settings, (String name, Settings accountSettings) -> { + validationInvocationCount.incrementAndGet(); + }); + assertThat(validationInvocationCount.get(), is(0)); + assertThat(service.getAccount(accountName), is(accountName)); + assertThat(validationInvocationCount.get(), is(1)); + if (randomBoolean()) { + assertThat(service.getAccount(accountName), is(accountName)); + } else { + assertThat(service.getAccount(null), is(accountName)); + } + // counter is still 1 because the account is cached + assertThat(validationInvocationCount.get(), is(1)); + } + + public void testAccountUpdateSettings() throws Exception { + final Setting secureSetting = SecureSetting.secureString("xpack.notification.test.account.x.secure", null); + final Setting setting = Setting.simpleString("xpack.notification.test.account.x.dynamic", Setting.Property.Dynamic, + Setting.Property.NodeScope); + final AtomicReference secureSettingValue = new AtomicReference(randomAlphaOfLength(4)); + final AtomicReference settingValue = new AtomicReference(randomAlphaOfLength(4)); + final Map secureSettingsMap = new HashMap<>(); + final AtomicInteger validationInvocationCount = new AtomicInteger(0); + secureSettingsMap.put(secureSetting.getKey(), secureSettingValue.get().toCharArray()); + final Settings.Builder settingsBuilder = Settings.builder() + .put(setting.getKey(), settingValue.get()) + .setSecureSettings(secureSettingsFromMap(secureSettingsMap)); + final TestNotificationService service = new TestNotificationService(settingsBuilder.build(), Arrays.asList(secureSetting), + (String name, Settings accountSettings) -> { + assertThat(accountSettings.get("dynamic"), is(settingValue.get())); + assertThat(SecureSetting.secureString("secure", null).get(accountSettings), is(secureSettingValue.get())); + validationInvocationCount.incrementAndGet(); + }); + assertThat(validationInvocationCount.get(), is(0)); + service.getAccount(null); + assertThat(validationInvocationCount.get(), is(1)); + // update secure setting only + updateSecureSetting(secureSettingValue, secureSetting, secureSettingsMap, settingsBuilder, service); + assertThat(validationInvocationCount.get(), is(1)); + service.getAccount(null); + assertThat(validationInvocationCount.get(), is(2)); + updateDynamicClusterSetting(settingValue, setting, settingsBuilder, service); + assertThat(validationInvocationCount.get(), is(2)); + service.getAccount(null); + assertThat(validationInvocationCount.get(), is(3)); + // update both + if (randomBoolean()) { + // update secure first + updateSecureSetting(secureSettingValue, secureSetting, secureSettingsMap, settingsBuilder, service); + // update cluster second + updateDynamicClusterSetting(settingValue, setting, settingsBuilder, service); + } else { + // update cluster first + updateDynamicClusterSetting(settingValue, setting, settingsBuilder, service); + // update secure second + updateSecureSetting(secureSettingValue, secureSetting, secureSettingsMap, settingsBuilder, service); + } + assertThat(validationInvocationCount.get(), is(3)); + service.getAccount(null); + assertThat(validationInvocationCount.get(), is(4)); + } + + private static void updateDynamicClusterSetting(AtomicReference settingValue, Setting setting, + Settings.Builder settingsBuilder, TestNotificationService service) { + settingValue.set(randomAlphaOfLength(4)); + settingsBuilder.put(setting.getKey(), settingValue.get()); + service.clusterSettingsConsumer(settingsBuilder.build()); + } + + private static void updateSecureSetting(AtomicReference secureSettingValue, Setting secureSetting, + Map secureSettingsMap, Settings.Builder settingsBuilder, TestNotificationService service) { + secureSettingValue.set(randomAlphaOfLength(4)); + secureSettingsMap.put(secureSetting.getKey(), secureSettingValue.get().toCharArray()); + service.reload(settingsBuilder.build()); + } + private static class TestNotificationService extends NotificationService { - TestNotificationService(Settings settings) { - super("test", settings, Collections.emptyList()); + private final BiConsumer validator; + + TestNotificationService(Settings settings, List> secureSettings, BiConsumer validator) { + super("test", settings, secureSettings); + this.validator = validator; reload(settings); } + TestNotificationService(Settings settings, List> secureSettings) { + this(settings, secureSettings, (x, y) -> {}); + } + + TestNotificationService(Settings settings) { + this(settings, Collections.emptyList(), (x, y) -> {}); + } + + TestNotificationService(Settings settings, BiConsumer validator) { + this(settings, Collections.emptyList(), validator); + } + @Override protected String createAccount(String name, Settings accountSettings) { + validator.accept(name, accountSettings); return name; } } + + private static SecureSettings secureSettingsFromMap(Map secureSettingsMap) { + return new SecureSettings() { + + @Override + public boolean isLoaded() { + return true; + } + + @Override + public SecureString getString(String setting) throws GeneralSecurityException { + return new SecureString(secureSettingsMap.get(setting)); + } + + @Override + public Set getSettingNames() { + return secureSettingsMap.keySet(); + } + + @Override + public InputStream getFile(String setting) throws GeneralSecurityException { + return null; + } + + @Override + public void close() throws IOException { + } + }; + } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java index 7d3960a9344..7b5d6c7f081 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java @@ -128,7 +128,7 @@ public class HipChatServiceTests extends ESTestCase { .put("xpack.notification.hipchat.account." + accountName + ".auth_token", "_token"); SettingsException e = expectThrows(SettingsException.class, () -> new HipChatService(settingsBuilder.build(), httpClient, - new ClusterSettings(settingsBuilder.build(), new HashSet<>(HipChatService.getSettings())))); + new ClusterSettings(settingsBuilder.build(), new HashSet<>(HipChatService.getSettings()))).getAccount(null)); assertThat(e.getMessage(), containsString("missing required [room] setting for [integration] account profile")); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java index 0b57c856744..57c189d328e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java @@ -38,6 +38,7 @@ import java.time.Clock; import java.util.Collections; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.eq; @@ -67,7 +68,7 @@ public class TransportAckWatchActionTests extends ESTestCase { String watchId = "my_watch_id"; doAnswer(invocation -> { ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(new GetResponse(new GetResult(Watch.INDEX, Watch.DOC_TYPE, watchId, -1, false, + listener.onResponse(new GetResponse(new GetResult(Watch.INDEX, Watch.DOC_TYPE, watchId, UNASSIGNED_SEQ_NO, 0, -1, false, BytesArray.EMPTY, Collections.emptyMap()))); return null; }).when(client).get(anyObject(), anyObject());