diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml
index 6e628eab0cb..55fdcecb084 100644
--- a/buildSrc/src/main/resources/checkstyle_suppressions.xml
+++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml
@@ -64,17 +64,11 @@
-
-
-
-
-
-
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
index 6791b5f8259..c7a54a9ac32 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
@@ -268,8 +268,14 @@ final class RequestConverters {
}
static Request sourceExists(GetRequest getRequest) {
- Request request = new Request(HttpHead.METHOD_NAME, endpoint(getRequest.index(), getRequest.type(), getRequest.id(), "_source"));
-
+ String optionalType = getRequest.type();
+ String endpoint;
+ if (optionalType.equals(MapperService.SINGLE_MAPPING_NAME)) {
+ endpoint = endpoint(getRequest.index(), "_source", getRequest.id());
+ } else {
+ endpoint = endpoint(getRequest.index(), optionalType, getRequest.id(), "_source");
+ }
+ Request request = new Request(HttpHead.METHOD_NAME, endpoint);
Params parameters = new Params(request);
parameters.withPreference(getRequest.preference());
parameters.withRouting(getRequest.routing());
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java
index 09b57e68ff5..b442336ca4d 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java
@@ -39,6 +39,10 @@ public final class AutoFollowStats {
static final ParseField RECENT_AUTO_FOLLOW_ERRORS = new ParseField("recent_auto_follow_errors");
static final ParseField LEADER_INDEX = new ParseField("leader_index");
static final ParseField AUTO_FOLLOW_EXCEPTION = new ParseField("auto_follow_exception");
+ static final ParseField AUTO_FOLLOWED_CLUSTERS = new ParseField("auto_followed_clusters");
+ static final ParseField CLUSTER_NAME = new ParseField("cluster_name");
+ static final ParseField TIME_SINCE_LAST_CHECK_MILLIS = new ParseField("time_since_last_check_millis");
+ static final ParseField LAST_SEEN_METADATA_VERSION = new ParseField("last_seen_metadata_version");
@SuppressWarnings("unchecked")
static final ConstructingObjectParser STATS_PARSER = new ConstructingObjectParser<>("auto_follow_stats",
@@ -48,6 +52,10 @@ public final class AutoFollowStats {
(Long) args[2],
new TreeMap<>(
((List>) args[3])
+ .stream()
+ .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))),
+ new TreeMap<>(
+ ((List>) args[4])
.stream()
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))
));
@@ -57,6 +65,11 @@ public final class AutoFollowStats {
"auto_follow_stats_errors",
args -> new AbstractMap.SimpleEntry<>((String) args[0], (ElasticsearchException) args[1]));
+ private static final ConstructingObjectParser, Void> AUTO_FOLLOWED_CLUSTERS_PARSER =
+ new ConstructingObjectParser<>(
+ "auto_followed_clusters",
+ args -> new AbstractMap.SimpleEntry<>((String) args[0], new AutoFollowedCluster((Long) args[1], (Long) args[2])));
+
static {
AUTO_FOLLOW_EXCEPTIONS_PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_INDEX);
AUTO_FOLLOW_EXCEPTIONS_PARSER.declareObject(
@@ -64,26 +77,35 @@ public final class AutoFollowStats {
(p, c) -> ElasticsearchException.fromXContent(p),
AUTO_FOLLOW_EXCEPTION);
+ AUTO_FOLLOWED_CLUSTERS_PARSER.declareString(ConstructingObjectParser.constructorArg(), CLUSTER_NAME);
+ AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_SINCE_LAST_CHECK_MILLIS);
+ AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_SEEN_METADATA_VERSION);
+
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED);
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS);
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED);
STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOW_EXCEPTIONS_PARSER,
RECENT_AUTO_FOLLOW_ERRORS);
+ STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOWED_CLUSTERS_PARSER,
+ AUTO_FOLLOWED_CLUSTERS);
}
private final long numberOfFailedFollowIndices;
private final long numberOfFailedRemoteClusterStateRequests;
private final long numberOfSuccessfulFollowIndices;
private final NavigableMap recentAutoFollowErrors;
+ private final NavigableMap autoFollowedClusters;
AutoFollowStats(long numberOfFailedFollowIndices,
long numberOfFailedRemoteClusterStateRequests,
long numberOfSuccessfulFollowIndices,
- NavigableMap recentAutoFollowErrors) {
+ NavigableMap recentAutoFollowErrors,
+ NavigableMap autoFollowedClusters) {
this.numberOfFailedFollowIndices = numberOfFailedFollowIndices;
this.numberOfFailedRemoteClusterStateRequests = numberOfFailedRemoteClusterStateRequests;
this.numberOfSuccessfulFollowIndices = numberOfSuccessfulFollowIndices;
this.recentAutoFollowErrors = recentAutoFollowErrors;
+ this.autoFollowedClusters = autoFollowedClusters;
}
public long getNumberOfFailedFollowIndices() {
@@ -102,4 +124,27 @@ public final class AutoFollowStats {
return recentAutoFollowErrors;
}
+ public NavigableMap getAutoFollowedClusters() {
+ return autoFollowedClusters;
+ }
+
+ public static class AutoFollowedCluster {
+
+ private final long timeSinceLastCheckMillis;
+ private final long lastSeenMetadataVersion;
+
+ public AutoFollowedCluster(long timeSinceLastCheckMillis, long lastSeenMetadataVersion) {
+ this.timeSinceLastCheckMillis = timeSinceLastCheckMillis;
+ this.lastSeenMetadataVersion = lastSeenMetadataVersion;
+ }
+
+ public long getTimeSinceLastCheckMillis() {
+ return timeSinceLastCheckMillis;
+ }
+
+ public long getLastSeenMetadataVersion() {
+ return lastSeenMetadataVersion;
+ }
+ }
+
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
index b50d2c1265e..fa0f1c5708c 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
@@ -73,6 +73,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.VersionType;
+import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.rankeval.PrecisionAtK;
@@ -115,6 +116,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.StringJoiner;
+import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
@@ -156,6 +158,58 @@ public class RequestConvertersTests extends ESTestCase {
getAndExistsWithTypeTest(RequestConverters::get, HttpGet.METHOD_NAME);
}
+ public void testSourceExists() throws IOException {
+ doTestSourceExists((index, id) -> new GetRequest(index, id));
+ }
+
+ public void testSourceExistsWithType() throws IOException {
+ String type = frequently() ? randomAlphaOfLengthBetween(3, 10) : MapperService.SINGLE_MAPPING_NAME;
+ doTestSourceExists((index, id) -> new GetRequest(index, type, id));
+ }
+
+ private static void doTestSourceExists(BiFunction requestFunction) throws IOException {
+ String index = randomAlphaOfLengthBetween(3, 10);
+ String id = randomAlphaOfLengthBetween(3, 10);
+ final GetRequest getRequest = requestFunction.apply(index, id);
+
+ Map expectedParams = new HashMap<>();
+ if (randomBoolean()) {
+ String preference = randomAlphaOfLengthBetween(3, 10);
+ getRequest.preference(preference);
+ expectedParams.put("preference", preference);
+ }
+ if (randomBoolean()) {
+ String routing = randomAlphaOfLengthBetween(3, 10);
+ getRequest.routing(routing);
+ expectedParams.put("routing", routing);
+ }
+ if (randomBoolean()) {
+ boolean realtime = randomBoolean();
+ getRequest.realtime(realtime);
+ if (realtime == false) {
+ expectedParams.put("realtime", "false");
+ }
+ }
+ if (randomBoolean()) {
+ boolean refresh = randomBoolean();
+ getRequest.refresh(refresh);
+ if (refresh) {
+ expectedParams.put("refresh", "true");
+ }
+ }
+ Request request = RequestConverters.sourceExists(getRequest);
+ assertEquals(HttpHead.METHOD_NAME, request.getMethod());
+ String type = getRequest.type();
+ if (type.equals(MapperService.SINGLE_MAPPING_NAME)) {
+ assertEquals("/" + index + "/_source/" + id, request.getEndpoint());
+ } else {
+ assertEquals("/" + index + "/" + type + "/" + id + "/_source", request.getEndpoint());
+ }
+
+ assertEquals(expectedParams, request.getParameters());
+ assertNull(request.getEntity());
+ }
+
public void testMultiGet() throws IOException {
Map expectedParams = new HashMap<>();
MultiGetRequest multiGetRequest = new MultiGetRequest();
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java
index 039e31151c4..8d53b5cde08 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java
@@ -20,6 +20,7 @@
package org.elasticsearch.client.ccr;
import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.client.ccr.AutoFollowStats.AutoFollowedCluster;
import org.elasticsearch.client.ccr.IndicesFollowStats.ShardFollowStats;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.unit.ByteSizeUnit;
@@ -185,6 +186,19 @@ public class CcrStatsResponseTests extends ESTestCase {
builder.endObject();
}
builder.endArray();
+ builder.startArray(AutoFollowStats.AUTO_FOLLOWED_CLUSTERS.getPreferredName());
+ for (Map.Entry entry : autoFollowStats.getAutoFollowedClusters().entrySet()) {
+ builder.startObject();
+ {
+ builder.field(AutoFollowStats.CLUSTER_NAME.getPreferredName(), entry.getKey());
+ builder.field(AutoFollowStats.TIME_SINCE_LAST_CHECK_MILLIS.getPreferredName(),
+ entry.getValue().getTimeSinceLastCheckMillis());
+ builder.field(AutoFollowStats.LAST_SEEN_METADATA_VERSION.getPreferredName(),
+ entry.getValue().getLastSeenMetadataVersion());
+ }
+ builder.endObject();
+ }
+ builder.endArray();
}
builder.endObject();
@@ -315,11 +329,16 @@ public class CcrStatsResponseTests extends ESTestCase {
for (int i = 0; i < count; i++) {
readExceptions.put("" + i, new ElasticsearchException(new IllegalStateException("index [" + i + "]")));
}
+ final NavigableMap autoFollowClusters = new TreeMap<>();
+ for (int i = 0; i < count; i++) {
+ autoFollowClusters.put("" + i, new AutoFollowedCluster(randomLong(), randomNonNegativeLong()));
+ }
return new AutoFollowStats(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
- readExceptions
+ readExceptions,
+ autoFollowClusters
);
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
index a9430b67aef..5279c19a415 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
@@ -1265,7 +1265,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
assertEquals(3, getResponse.getSourceAsMap().size());
//tag::get-response
String index = getResponse.getIndex();
- String type = getResponse.getType();
String id = getResponse.getId();
if (getResponse.isExists()) {
long version = getResponse.getVersion();
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java
index 8bd285cd31f..6cd56774086 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java
@@ -1317,6 +1317,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/36362")
public void testInvalidateToken() throws Exception {
RestHighLevelClient client = highLevelClient();
diff --git a/docs/java-rest/high-level/security/invalidate-token.asciidoc b/docs/java-rest/high-level/security/invalidate-token.asciidoc
index ecb3fedb56f..65e0f15bd86 100644
--- a/docs/java-rest/high-level/security/invalidate-token.asciidoc
+++ b/docs/java-rest/high-level/security/invalidate-token.asciidoc
@@ -36,4 +36,4 @@ The returned +{response}+ contains a single property:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
---------------------------------------------------
+--------------------------------------------------
\ No newline at end of file
diff --git a/docs/plugins/ingest-attachment.asciidoc b/docs/plugins/ingest-attachment.asciidoc
index 2f9564294d0..a3d716ff2d9 100644
--- a/docs/plugins/ingest-attachment.asciidoc
+++ b/docs/plugins/ingest-attachment.asciidoc
@@ -63,6 +63,8 @@ Returns this:
"_type": "_doc",
"_id": "my_id",
"_version": 1,
+ "_seq_no": 22,
+ "_primary_term": 1,
"_source": {
"data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
"attachment": {
@@ -74,7 +76,7 @@ Returns this:
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
To specify only some fields to be extracted:
@@ -146,6 +148,8 @@ Returns this:
"_type": "_doc",
"_id": "my_id",
"_version": 1,
+ "_seq_no": 35,
+ "_primary_term": 1,
"_source": {
"data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
"attachment": {
@@ -157,7 +161,7 @@ Returns this:
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
[source,js]
@@ -194,6 +198,8 @@ Returns this:
"_type": "_doc",
"_id": "my_id_2",
"_version": 1,
+ "_seq_no": 40,
+ "_primary_term": 1,
"_source": {
"data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
"max_size": 5,
@@ -206,7 +212,7 @@ Returns this:
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
[[ingest-attachment-with-arrays]]
@@ -285,6 +291,8 @@ Returns this:
"_type" : "_doc",
"_id" : "my_id",
"_version" : 1,
+ "_seq_no" : 50,
+ "_primary_term" : 1,
"found" : true,
"_source" : {
"attachments" : [
@@ -312,7 +320,7 @@ Returns this:
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
Note that the `target_field` needs to be set, otherwise the
diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc
index 5d22a31baa8..f4795f6620a 100644
--- a/docs/plugins/ingest-geoip.asciidoc
+++ b/docs/plugins/ingest-geoip.asciidoc
@@ -75,6 +75,8 @@ Which returns:
"_type": "_doc",
"_id": "my_id",
"_version": 1,
+ "_seq_no": 55,
+ "_primary_term": 1,
"_source": {
"ip": "8.8.8.8",
"geoip": {
@@ -85,7 +87,7 @@ Which returns:
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term":1/"_primary_term" : $body._primary_term/]
Here is an example that uses the default country database and adds the
geographical information to the `geo` field based on the `ip` field`. Note that
@@ -124,6 +126,8 @@ returns this:
"_type": "_doc",
"_id": "my_id",
"_version": 1,
+ "_seq_no": 65,
+ "_primary_term": 1,
"_source": {
"ip": "8.8.8.8",
"geo": {
@@ -133,7 +137,7 @@ returns this:
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
Not all IP addresses find geo information from the database, When this
@@ -174,13 +178,15 @@ Which returns:
"_type" : "_doc",
"_id" : "my_id",
"_version" : 1,
+ "_seq_no" : 71,
+ "_primary_term": 1,
"found" : true,
"_source" : {
"ip" : "80.231.5.0"
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
[[ingest-geoip-mappings-note]]
===== Recognizing Location as a Geopoint
diff --git a/docs/plugins/ingest-user-agent.asciidoc b/docs/plugins/ingest-user-agent.asciidoc
index 57594eab573..a0e6d3257f3 100644
--- a/docs/plugins/ingest-user-agent.asciidoc
+++ b/docs/plugins/ingest-user-agent.asciidoc
@@ -57,6 +57,8 @@ Which returns
"_type": "_doc",
"_id": "my_id",
"_version": 1,
+ "_seq_no": 22,
+ "_primary_term": 1,
"_source": {
"agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",
"user_agent": {
@@ -73,7 +75,7 @@ Which returns
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term": 1/"_primary_term" : $body._primary_term/]
===== Using a custom regex file
To use a custom regex file for parsing the user agents, that file has to be put into the `config/ingest-user-agent` directory and
diff --git a/docs/reference/ccr/apis/get-ccr-stats.asciidoc b/docs/reference/ccr/apis/get-ccr-stats.asciidoc
index b8491e8a601..d849a99c459 100644
--- a/docs/reference/ccr/apis/get-ccr-stats.asciidoc
+++ b/docs/reference/ccr/apis/get-ccr-stats.asciidoc
@@ -105,7 +105,8 @@ The API returns the following results:
"number_of_failed_follow_indices" : 0,
"number_of_failed_remote_cluster_state_requests" : 0,
"number_of_successful_follow_indices" : 1,
- "recent_auto_follow_errors" : []
+ "recent_auto_follow_errors" : [],
+ "auto_followed_clusters" : []
},
"follow_stats" : {
"indices" : [
@@ -151,6 +152,7 @@ The API returns the following results:
// TESTRESPONSE[s/"number_of_failed_remote_cluster_state_requests" : 0/"number_of_failed_remote_cluster_state_requests" : $body.auto_follow_stats.number_of_failed_remote_cluster_state_requests/]
// TESTRESPONSE[s/"number_of_successful_follow_indices" : 1/"number_of_successful_follow_indices" : $body.auto_follow_stats.number_of_successful_follow_indices/]
// TESTRESPONSE[s/"recent_auto_follow_errors" : \[\]/"recent_auto_follow_errors" : $body.auto_follow_stats.recent_auto_follow_errors/]
+// TESTRESPONSE[s/"auto_followed_clusters" : \[\]/"auto_followed_clusters" : $body.auto_follow_stats.auto_followed_clusters/]
// TESTRESPONSE[s/"leader_global_checkpoint" : 1024/"leader_global_checkpoint" : $body.follow_stats.indices.0.shards.0.leader_global_checkpoint/]
// TESTRESPONSE[s/"leader_max_seq_no" : 1536/"leader_max_seq_no" : $body.follow_stats.indices.0.shards.0.leader_max_seq_no/]
// TESTRESPONSE[s/"follower_global_checkpoint" : 768/"follower_global_checkpoint" : $body.follow_stats.indices.0.shards.0.follower_global_checkpoint/]
diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc
index ec6ef28534f..5271b976f96 100644
--- a/docs/reference/docs/get.asciidoc
+++ b/docs/reference/docs/get.asciidoc
@@ -1,9 +1,9 @@
[[docs-get]]
== Get API
-The get API allows to get a typed JSON document from the index based on
+The get API allows to get a JSON document from the index based on
its id. The following example gets a JSON document from an index called
-twitter, under a type called `_doc`, with id valued 0:
+twitter with id valued 0:
[source,js]
--------------------------------------------------
@@ -21,6 +21,8 @@ The result of the above get operation is:
"_type" : "_doc",
"_id" : "0",
"_version" : 1,
+ "_seq_no" : 10,
+ "_primary_term" : 1,
"found": true,
"_source" : {
"user" : "kimchy",
@@ -30,9 +32,9 @@ The result of the above get operation is:
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
-The above result includes the `_index`, `_type`, `_id` and `_version`
+The above result includes the `_index`, `_id` and `_version`
of the document we wish to retrieve, including the actual `_source`
of the document if it could be found (as indicated by the `found`
field in the response).
@@ -156,6 +158,8 @@ The result of the above get operation is:
"_type": "_doc",
"_id": "1",
"_version": 1,
+ "_seq_no" : 22,
+ "_primary_term" : 1,
"found": true,
"fields": {
"tags": [
@@ -164,7 +168,7 @@ The result of the above get operation is:
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
Field values fetched from the document itself are always returned as an array.
@@ -199,6 +203,8 @@ The result of the above get operation is:
"_type": "_doc",
"_id": "2",
"_version": 1,
+ "_seq_no" : 13,
+ "_primary_term" : 1,
"_routing": "user1",
"found": true,
"fields": {
@@ -208,7 +214,7 @@ The result of the above get operation is:
}
}
--------------------------------------------------
-// TESTRESPONSE
+// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
Also only leaf fields can be returned via the `stored_field` option. So object fields can't be returned and such requests
will fail.
@@ -217,13 +223,13 @@ will fail.
[[_source]]
=== Getting the +_source+ directly
-Use the `/{index}/{type}/{id}/_source` endpoint to get
+Use the `/{index}/_source/{id}` endpoint to get
just the `_source` field of the document,
without any additional content around it. For example:
[source,js]
--------------------------------------------------
-GET twitter/_doc/1/_source
+GET twitter/_source/1
--------------------------------------------------
// CONSOLE
// TEST[continued]
@@ -232,7 +238,7 @@ You can also use the same source filtering parameters to control which parts of
[source,js]
--------------------------------------------------
-GET twitter/_doc/1/_source?_source_includes=*.id&_source_excludes=entities'
+GET twitter/_source/1/?_source_includes=*.id&_source_excludes=entities'
--------------------------------------------------
// CONSOLE
// TEST[continued]
@@ -242,7 +248,7 @@ An existing document will not have a _source if it is disabled in the <>.
-
-Alternatively, you can use an <> to send data to
+<>. Alternatively, you can use an <> to send data to
a separate _monitoring cluster_.
+IMPORTANT: The {es} {monitor-features} use ingest pipelines, therefore the
+cluster that stores the monitoring data must have at least one
+<>.
+
For more information about typical monitoring architectures,
see {stack-ov}/how-monitoring-works.html[How Monitoring Works].
--
diff --git a/docs/reference/monitoring/configuring-metricbeat.asciidoc b/docs/reference/monitoring/configuring-metricbeat.asciidoc
index 6098336538b..dd7811b3421 100644
--- a/docs/reference/monitoring/configuring-metricbeat.asciidoc
+++ b/docs/reference/monitoring/configuring-metricbeat.asciidoc
@@ -164,6 +164,10 @@ output.elasticsearch:
<1> In this example, the data is stored on a monitoring cluster with nodes
`es-mon-1` and `es-mon-2`.
+IMPORTANT: The {es} {monitor-features} use ingest pipelines, therefore the
+cluster that stores the monitoring data must have at least one
+<>.
+
For more information about these configuration options, see
{metricbeat-ref}/elasticsearch-output.html[Configure the {es} output].
--
diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml
index 4695991f3c3..d4b39c5e99a 100644
--- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml
+++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml
@@ -23,7 +23,7 @@ teardown:
]
}
- match: { acknowledged: true }
-
+# default pipeline via index
- do:
indices.create:
index: test
@@ -48,7 +48,7 @@ teardown:
id: 1
- match: { _source.bytes_source_field: "1kb" }
- match: { _source.bytes_target_field: 1024 }
-
+# default pipeline via alias
- do:
index:
index: test_alias
@@ -63,12 +63,101 @@ teardown:
id: 2
- match: { _source.bytes_source_field: "1kb" }
- match: { _source.bytes_target_field: 1024 }
+# default pipeline via upsert
+ - do:
+ update:
+ index: test
+ type: test
+ id: 3
+ body:
+ script:
+ source: "ctx._source.ran_script = true"
+ lang: "painless"
+ upsert: { "bytes_source_field":"1kb" }
+ - do:
+ get:
+ index: test
+ type: test
+ id: 3
+ - match: { _source.bytes_source_field: "1kb" }
+ - match: { _source.bytes_target_field: 1024 }
+# default pipeline via scripted upsert
+ - do:
+ update:
+ index: test
+ type: test
+ id: 4
+ body:
+ script:
+ source: "ctx._source.bytes_source_field = '1kb'"
+ lang: "painless"
+ upsert : {}
+ scripted_upsert: true
+ - do:
+ get:
+ index: test
+ type: test
+ id: 4
+ - match: { _source.bytes_source_field: "1kb" }
+ - match: { _source.bytes_target_field: 1024 }
+# default pipeline via doc_as_upsert
+ - do:
+ update:
+ index: test
+ type: test
+ id: 5
+ body:
+ doc: { "bytes_source_field":"1kb" }
+ doc_as_upsert: true
+ - do:
+ get:
+ index: test
+ type: test
+ id: 5
+ - match: { _source.bytes_source_field: "1kb" }
+ - match: { _source.bytes_target_field: 1024 }
+# default pipeline via bulk upsert
+# note - bulk scripted upsert's execute the pipeline before the script, so any data referenced by the pipeline
+# needs to be in the upsert, not the script
+ - do:
+ bulk:
+ refresh: true
+ body: |
+ {"update":{"_id":"6","_index":"test","_type":"test"}}
+ {"script":"ctx._source.ran_script = true","upsert":{"bytes_source_field":"1kb"}}
+ {"update":{"_id":"7","_index":"test","_type":"test"}}
+ {"doc":{"bytes_source_field":"2kb"}, "doc_as_upsert":true}
+ {"update":{"_id":"8","_index":"test","_type":"test"}}
+ {"script": "ctx._source.ran_script = true","upsert":{"bytes_source_field":"3kb"}, "scripted_upsert" : true}
+ - do:
+ mget:
+ body:
+ docs:
+ - { _index: "test", _type: "_doc", _id: "6" }
+ - { _index: "test", _type: "_doc", _id: "7" }
+ - { _index: "test", _type: "_doc", _id: "8" }
+ - match: { docs.0._index: "test" }
+ - match: { docs.0._id: "6" }
+ - match: { docs.0._source.bytes_source_field: "1kb" }
+ - match: { docs.0._source.bytes_target_field: 1024 }
+ - is_false: docs.0._source.ran_script
+ - match: { docs.1._index: "test" }
+ - match: { docs.1._id: "7" }
+ - match: { docs.1._source.bytes_source_field: "2kb" }
+ - match: { docs.1._source.bytes_target_field: 2048 }
+ - match: { docs.2._index: "test" }
+ - match: { docs.2._id: "8" }
+ - match: { docs.2._source.bytes_source_field: "3kb" }
+ - match: { docs.2._source.bytes_target_field: 3072 }
+ - match: { docs.2._source.ran_script: true }
+
+# explicit no default pipeline
- do:
index:
index: test
type: test
- id: 3
+ id: 9
pipeline: "_none"
body: {bytes_source_field: "1kb"}
@@ -76,15 +165,15 @@ teardown:
get:
index: test
type: test
- id: 3
+ id: 9
- match: { _source.bytes_source_field: "1kb" }
- is_false: _source.bytes_target_field
-
+# bad request
- do:
catch: bad_request
index:
index: test
type: test
- id: 4
+ id: 10
pipeline: ""
body: {bytes_source_field: "1kb"}
diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml
index 3be038aca24..accc30faa21 100644
--- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml
+++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml
@@ -57,3 +57,44 @@ teardown:
type: test
id: 2
- match: { _source.foo: "blub" }
+
+---
+"Test Drop Processor On Failure":
+- do:
+ ingest.put_pipeline:
+ id: "my_pipeline_with_failure"
+ body: >
+ {
+ "description" : "pipeline with on failure drop",
+ "processors": [
+ {
+ "fail": {
+ "message": "failed",
+ "on_failure": [
+ {
+ "drop": {}
+ }
+ ]
+ }
+ }
+ ]
+ }
+- match: { acknowledged: true }
+
+- do:
+ index:
+ index: test
+ type: test
+ id: 3
+ pipeline: "my_pipeline_with_failure"
+ body: {
+ foo: "bar"
+ }
+
+- do:
+ catch: missing
+ get:
+ index: test
+ type: test
+ id: 3
+- match: { found: false }
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java
index f00a30a62c4..ac21be1f5c0 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java
@@ -73,38 +73,6 @@ public final class AnalyzerCaster {
} else if (expected == Double.class) {
return PainlessCast.originalTypetoTargetType(def.class, Double.class, explicit);
}
- } else if (actual == Object.class) {
- if (expected == byte.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Object.class, Byte.class, true, byte.class);
- } else if (expected == short.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Object.class, Short.class, true, short.class);
- } else if (expected == char.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Object.class, Character.class, true, char.class);
- } else if (expected == int.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Object.class, Integer.class, true, int.class);
- } else if (expected == long.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Object.class, Long.class, true, long.class);
- } else if (expected == float.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Object.class, Float.class, true, float.class);
- } else if (expected == double.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Object.class, Double.class, true, double.class);
- }
- } else if (actual == Number.class) {
- if (expected == byte.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Number.class, Byte.class, true, byte.class);
- } else if (expected == short.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Number.class, Short.class, true, short.class);
- } else if (expected == char.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Number.class, Character.class, true, char.class);
- } else if (expected == int.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Number.class, Integer.class, true, int.class);
- } else if (expected == long.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Number.class, Long.class, true, long.class);
- } else if (expected == float.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Number.class, Float.class, true, float.class);
- } else if (expected == double.class && explicit && internal) {
- return PainlessCast.unboxTargetType(Number.class, Double.class, true, double.class);
- }
} else if (actual == String.class) {
if (expected == char.class && explicit) {
return PainlessCast.originalTypetoTargetType(String.class, char.class, true);
@@ -140,8 +108,6 @@ public final class AnalyzerCaster {
return PainlessCast.boxTargetType(byte.class, byte.class, explicit, byte.class);
} else if (expected == Short.class && internal) {
return PainlessCast.boxTargetType(byte.class, short.class, explicit, short.class);
- } else if (expected == Character.class && explicit && internal) {
- return PainlessCast.boxTargetType(byte.class, char.class, true, char.class);
} else if (expected == Integer.class && internal) {
return PainlessCast.boxTargetType(byte.class, int.class, explicit, int.class);
} else if (expected == Long.class && internal) {
@@ -170,12 +136,8 @@ public final class AnalyzerCaster {
return PainlessCast.originalTypetoTargetType(short.class, float.class, explicit);
} else if (expected == double.class) {
return PainlessCast.originalTypetoTargetType(short.class, double.class, explicit);
- } else if (expected == Byte.class && explicit && internal) {
- return PainlessCast.boxTargetType(short.class, byte.class, true, byte.class);
} else if (expected == Short.class && internal) {
return PainlessCast.boxTargetType(short.class, short.class, explicit, short.class);
- } else if (expected == Character.class && explicit && internal) {
- return PainlessCast.boxTargetType(short.class, char.class, true, char.class);
} else if (expected == Integer.class && internal) {
return PainlessCast.boxTargetType(short.class, int.class, explicit, int.class);
} else if (expected == Long.class && internal) {
@@ -206,10 +168,6 @@ public final class AnalyzerCaster {
return PainlessCast.originalTypetoTargetType(char.class, float.class, explicit);
} else if (expected == double.class) {
return PainlessCast.originalTypetoTargetType(char.class, double.class, explicit);
- } else if (expected == Byte.class && explicit && internal) {
- return PainlessCast.boxTargetType(char.class, byte.class, true, byte.class);
- } else if (expected == Short.class && internal) {
- return PainlessCast.boxTargetType(char.class, short.class, explicit, short.class);
} else if (expected == Character.class && internal) {
return PainlessCast.boxTargetType(char.class, char.class, true, char.class);
} else if (expected == Integer.class && internal) {
@@ -240,12 +198,6 @@ public final class AnalyzerCaster {
return PainlessCast.originalTypetoTargetType(int.class, float.class, explicit);
} else if (expected == double.class) {
return PainlessCast.originalTypetoTargetType(int.class, double.class, explicit);
- } else if (expected == Byte.class && explicit && internal) {
- return PainlessCast.boxTargetType(int.class, byte.class, true, byte.class);
- } else if (expected == Short.class && explicit && internal) {
- return PainlessCast.boxTargetType(int.class, short.class, true, short.class);
- } else if (expected == Character.class && explicit && internal) {
- return PainlessCast.boxTargetType(int.class, char.class, true, char.class);
} else if (expected == Integer.class && internal) {
return PainlessCast.boxTargetType(int.class, int.class, explicit, int.class);
} else if (expected == Long.class && internal) {
@@ -274,14 +226,6 @@ public final class AnalyzerCaster {
return PainlessCast.originalTypetoTargetType(long.class, float.class, explicit);
} else if (expected == double.class) {
return PainlessCast.originalTypetoTargetType(long.class, double.class, explicit);
- } else if (expected == Byte.class && explicit && internal) {
- return PainlessCast.boxTargetType(long.class, byte.class, true, byte.class);
- } else if (expected == Short.class && explicit && internal) {
- return PainlessCast.boxTargetType(long.class, short.class, true, short.class);
- } else if (expected == Character.class && explicit && internal) {
- return PainlessCast.boxTargetType(long.class, char.class, true, char.class);
- } else if (expected == Integer.class && explicit && internal) {
- return PainlessCast.boxTargetType(long.class, int.class, true, int.class);
} else if (expected == Long.class && internal) {
return PainlessCast.boxTargetType(long.class, long.class, explicit, long.class);
} else if (expected == Float.class && internal) {
@@ -308,16 +252,6 @@ public final class AnalyzerCaster {
return PainlessCast.originalTypetoTargetType(float.class, long.class, true);
} else if (expected == double.class) {
return PainlessCast.originalTypetoTargetType(float.class, double.class, explicit);
- } else if (expected == Byte.class && explicit && internal) {
- return PainlessCast.boxTargetType(float.class, byte.class, true, byte.class);
- } else if (expected == Short.class && explicit && internal) {
- return PainlessCast.boxTargetType(float.class, short.class, true, short.class);
- } else if (expected == Character.class && explicit && internal) {
- return PainlessCast.boxTargetType(float.class, char.class, true, char.class);
- } else if (expected == Integer.class && explicit && internal) {
- return PainlessCast.boxTargetType(float.class, int.class, true, int.class);
- } else if (expected == Long.class && explicit && internal) {
- return PainlessCast.boxTargetType(float.class, long.class, true, long.class);
} else if (expected == Float.class && internal) {
return PainlessCast.boxTargetType(float.class, float.class, explicit, float.class);
} else if (expected == Double.class && internal) {
@@ -342,18 +276,6 @@ public final class AnalyzerCaster {
return PainlessCast.originalTypetoTargetType(double.class, long.class, true);
} else if (expected == float.class && explicit) {
return PainlessCast.originalTypetoTargetType(double.class, float.class, true);
- } else if (expected == Byte.class && explicit && internal) {
- return PainlessCast.boxTargetType(double.class, byte.class, true, byte.class);
- } else if (expected == Short.class && explicit && internal) {
- return PainlessCast.boxTargetType(double.class, short.class, true, short.class);
- } else if (expected == Character.class && explicit && internal) {
- return PainlessCast.boxTargetType(double.class, char.class, true, char.class);
- } else if (expected == Integer.class && explicit && internal) {
- return PainlessCast.boxTargetType(double.class, int.class, true, int.class);
- } else if (expected == Long.class && explicit && internal) {
- return PainlessCast.boxTargetType(double.class, long.class, true, long.class);
- } else if (expected == Float.class && explicit && internal) {
- return PainlessCast.boxTargetType(double.class, float.class, true, float.class);
} else if (expected == Double.class && internal) {
return PainlessCast.boxTargetType(double.class, double.class, explicit, double.class);
}
@@ -366,8 +288,6 @@ public final class AnalyzerCaster {
return PainlessCast.unboxOriginalType(byte.class, byte.class, explicit, byte.class);
} else if (expected == short.class && internal) {
return PainlessCast.unboxOriginalType(byte.class, short.class, explicit, byte.class);
- } else if (expected == char.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(byte.class, char.class, true, byte.class);
} else if (expected == int.class && internal) {
return PainlessCast.unboxOriginalType(byte.class, int.class, explicit, byte.class);
} else if (expected == long.class && internal) {
@@ -376,14 +296,20 @@ public final class AnalyzerCaster {
return PainlessCast.unboxOriginalType(byte.class, float.class, explicit, byte.class);
} else if (expected == double.class && internal) {
return PainlessCast.unboxOriginalType(byte.class, double.class, explicit, byte.class);
+ } else if (expected == Short.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, short.class);
+ } else if (expected == Integer.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, int.class);
+ } else if (expected == Long.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, long.class);
+ } else if (expected == Float.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, float.class);
+ } else if (expected == Double.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, double.class);
}
} else if (actual == Short.class) {
- if (expected == byte.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(short.class, byte.class, true, short.class);
- } else if (expected == short.class && internal) {
+ if (expected == short.class && internal) {
return PainlessCast.unboxOriginalType(short.class, short.class, explicit, short.class);
- } else if (expected == char.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(short.class, char.class, true, short.class);
} else if (expected == int.class && internal) {
return PainlessCast.unboxOriginalType(short.class, int.class, explicit, short.class);
} else if (expected == long.class && internal) {
@@ -392,13 +318,17 @@ public final class AnalyzerCaster {
return PainlessCast.unboxOriginalType(short.class, float.class, explicit, short.class);
} else if (expected == double.class && internal) {
return PainlessCast.unboxOriginalType(short.class, double.class, explicit, short.class);
+ } else if (expected == Integer.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, int.class);
+ } else if (expected == Long.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, long.class);
+ } else if (expected == Float.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, float.class);
+ } else if (expected == Double.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, double.class);
}
} else if (actual == Character.class) {
- if (expected == byte.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(char.class, byte.class, true, char.class);
- } else if (expected == short.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(char.class, short.class, true, char.class);
- } else if (expected == char.class && internal) {
+ if (expected == char.class && internal) {
return PainlessCast.unboxOriginalType(char.class, char.class, explicit, char.class);
} else if (expected == int.class && internal) {
return PainlessCast.unboxOriginalType(char.class, int.class, explicit, char.class);
@@ -408,15 +338,17 @@ public final class AnalyzerCaster {
return PainlessCast.unboxOriginalType(char.class, float.class, explicit, char.class);
} else if (expected == double.class && internal) {
return PainlessCast.unboxOriginalType(char.class, double.class, explicit, char.class);
+ } else if (expected == Integer.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, int.class);
+ } else if (expected == Long.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, long.class);
+ } else if (expected == Float.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, float.class);
+ } else if (expected == Double.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, double.class);
}
} else if (actual == Integer.class) {
- if (expected == byte.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(int.class, byte.class, true, int.class);
- } else if (expected == short.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(int.class, short.class, true, int.class);
- } else if (expected == char.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(int.class, char.class, true, int.class);
- } else if (expected == int.class && internal) {
+ if (expected == int.class && internal) {
return PainlessCast.unboxOriginalType(int.class, int.class, explicit, int.class);
} else if (expected == long.class && internal) {
return PainlessCast.unboxOriginalType(int.class, long.class, explicit, int.class);
@@ -424,61 +356,45 @@ public final class AnalyzerCaster {
return PainlessCast.unboxOriginalType(int.class, float.class, explicit, int.class);
} else if (expected == double.class && internal) {
return PainlessCast.unboxOriginalType(int.class, double.class, explicit, int.class);
+ } else if (expected == Long.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, long.class);
+ } else if (expected == Float.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, float.class);
+ } else if (expected == Double.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, double.class);
}
} else if (actual == Long.class) {
- if (expected == byte.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(long.class, byte.class, true, long.class);
- } else if (expected == short.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(long.class, short.class, true, long.class);
- } else if (expected == char.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(long.class, char.class, true, long.class);
- } else if (expected == int.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(long.class, int.class, true, long.class);
- } else if (expected == long.class && internal) {
+ if (expected == long.class && internal) {
return PainlessCast.unboxOriginalType(long.class, long.class, explicit, long.class);
} else if (expected == float.class && internal) {
return PainlessCast.unboxOriginalType(long.class, float.class, explicit, long.class);
} else if (expected == double.class && internal) {
return PainlessCast.unboxOriginalType(long.class, double.class, explicit, long.class);
+ } else if (expected == Float.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, long.class, float.class);
+ } else if (expected == Double.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, long.class, double.class);
}
} else if (actual == Float.class) {
- if (expected == byte.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(float.class, byte.class, true, float.class);
- } else if (expected == short.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(float.class, short.class, true, float.class);
- } else if (expected == char.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(float.class, char.class, true, float.class);
- } else if (expected == int.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(float.class, int.class, true, float.class);
- } else if (expected == long.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(float.class, long.class, true, float.class);
- } else if (expected == float.class && internal) {
+ if (expected == float.class && internal) {
return PainlessCast.unboxOriginalType(float.class, float.class, explicit, float.class);
} else if (expected == double.class && internal) {
return PainlessCast.unboxOriginalType(float.class, double.class, explicit, float.class);
+ } else if (expected == Double.class && internal) {
+ return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, float.class, double.class);
}
} else if (actual == Double.class) {
- if (expected == byte.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(double.class, byte.class, true, double.class);
- } else if (expected == short.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(double.class, short.class, true, double.class);
- } else if (expected == char.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(double.class, char.class, true, double.class);
- } else if (expected == int.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(double.class, int.class, true, double.class);
- } else if (expected == long.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(double.class, long.class, true, double.class);
- } else if (expected == float.class && explicit && internal) {
- return PainlessCast.unboxOriginalType(double.class, float.class, true, double.class);
- } else if (expected == double.class && internal) {
+ if (expected == double.class && internal) {
return PainlessCast.unboxOriginalType(double.class, double.class, explicit, double.class);
}
}
- if ( actual == def.class ||
+ if (
+ actual == def.class ||
(actual != void.class && expected == def.class) ||
- expected.isAssignableFrom(actual) ||
- (actual.isAssignableFrom(expected) && explicit)) {
+ expected.isAssignableFrom(actual) ||
+ (actual.isAssignableFrom(expected) && explicit)
+ ) {
return PainlessCast.originalTypetoTargetType(actual, expected, explicit);
} else {
throw location.createError(new ClassCastException("Cannot cast from " +
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
index a2433689db3..ea58e7df7b4 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
@@ -154,6 +154,10 @@ public final class MethodWriter extends GeneratorAdapter {
invokeStatic(UTILITY_TYPE, CHAR_TO_STRING);
} else if (cast.originalType == String.class && cast.targetType == char.class) {
invokeStatic(UTILITY_TYPE, STRING_TO_CHAR);
+ } else if (cast.unboxOriginalType != null && cast.boxTargetType != null) {
+ unbox(getType(cast.unboxOriginalType));
+ writeCast(cast.unboxOriginalType, cast.boxTargetType);
+ box(getType(cast.boxTargetType));
} else if (cast.unboxOriginalType != null) {
unbox(getType(cast.unboxOriginalType));
writeCast(cast.originalType, cast.targetType);
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java
index 98968465d34..5a3fb848a61 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java
@@ -75,6 +75,15 @@ public class PainlessCast {
return new PainlessCast(originalType, targetType, explicitCast, null, null, null, boxTargetType);
}
+ /** Create a cast where the original type is unboxed, cast to a target type, and the target type is boxed. */
+ public static PainlessCast unboxOriginalTypeToBoxTargetType(boolean explicitCast, Class> unboxOriginalType, Class> boxTargetType) {
+
+ Objects.requireNonNull(unboxOriginalType);
+ Objects.requireNonNull(boxTargetType);
+
+ return new PainlessCast(null, null, explicitCast, unboxOriginalType, null, null, boxTargetType);
+ }
+
public final Class> originalType;
public final Class> targetType;
public final boolean explicitCast;
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BoxedCastTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BoxedCastTests.java
new file mode 100644
index 00000000000..67a2b683ab6
--- /dev/null
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BoxedCastTests.java
@@ -0,0 +1,511 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.painless;
+
+public class BoxedCastTests extends ScriptTestCase {
+
+ public void testMethodCallByteToBoxedCasts() {
+ assertEquals(0, exec("byte u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("byte u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("byte u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("byte u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("byte u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("byte u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("byte u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Byte u = Byte.valueOf((byte)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ assertEquals(0, exec("byte u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("byte u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("byte u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("byte u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("byte u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("byte u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("byte u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Byte u = Byte.valueOf((byte)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ assertEquals(0, exec("def u = (byte)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (byte)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (byte)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (byte)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (byte)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (byte)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (byte)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ assertEquals(0, exec("def u = (byte)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (byte)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (byte)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (byte)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (byte)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (byte)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (byte)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+ }
+
+ public void testMethodCallShortToBoxedCasts() {
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("short u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("short u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("short u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("short u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("short u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("short u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("short u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Short u = Short.valueOf((short)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("Short u = Short.valueOf((short)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Short u = Short.valueOf((short)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("Short u = Short.valueOf((short)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("Short u = Short.valueOf((short)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Short u = Short.valueOf((short)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Short u = Short.valueOf((short)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("short u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("short u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("short u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("short u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("short u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("short u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("short u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Short u = Short.valueOf((short)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Short u = Short.valueOf((short)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (short)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (short)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (short)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (short)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (short)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (short)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (short)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (short)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (short)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (short)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (short)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (short)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (short)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (short)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+ }
+
+ public void testMethodCallCharacterToBoxedCasts() {
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("char u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("char u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ assertEquals(0, exec("char u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("char u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("char u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("char u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("char u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Character u = Character.valueOf((char)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Character u = Character.valueOf((char)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ assertEquals(0, exec("Character u = Character.valueOf((char)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("Character u = Character.valueOf((char)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("Character u = Character.valueOf((char)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Character u = Character.valueOf((char)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Character u = Character.valueOf((char)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("char u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("char u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ assertEquals(0, exec("char u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("char u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("char u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("char u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("char u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Character u = Character.valueOf((char)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Character u = Character.valueOf((char)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
+ assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (char)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (char)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (char)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (char)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (char)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (char)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (char)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (char)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (char)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (char)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (char)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (char)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (char)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (char)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+ }
+
+ public void testMethodCallIntegerToBoxedCasts() {
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("int u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("int u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("int u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("int u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("int u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("int u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("int u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Integer u = Integer.valueOf((int)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Integer u = Integer.valueOf((int)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Integer u = Integer.valueOf((int)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("int u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("int u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("int u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("int u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("int u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("int u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("int u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Integer u = Integer.valueOf((int)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Integer u = Integer.valueOf((int)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Integer u = Integer.valueOf((int)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (int)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (int)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (int)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (int)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (int)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (int)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (int)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (int)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (int)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (int)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (int)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (int)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (int)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (int)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+ }
+
+ public void testMethodCallLongToBoxedCasts() {
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("long u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("long u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("long u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("long u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("long u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("long u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("long u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Long u = Long.valueOf((long)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Long u = Long.valueOf((long)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Long u = Long.valueOf((long)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Long u = Long.valueOf((long)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("Long u = Long.valueOf((long)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Long u = Long.valueOf((long)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Long u = Long.valueOf((long)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("long u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("long u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("long u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("long u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("long u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("long u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("long u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Long u = Long.valueOf((long)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Long u = Long.valueOf((long)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Long u = Long.valueOf((long)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Long u = Long.valueOf((long)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (long)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (long)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (long)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (long)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (long)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (long)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (long)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (long)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (long)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (long)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (long)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (long)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (long)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (long)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+ }
+
+ public void testMethodCallFloatToBoxedCasts() {
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("float u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("float u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("float u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("float u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("float u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("float u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("float u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Float u = Float.valueOf((float)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Float u = Float.valueOf((float)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Float u = Float.valueOf((float)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Float u = Float.valueOf((float)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Float u = Float.valueOf((float)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Float u = Float.valueOf((float)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Float u = Float.valueOf((float)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("float u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("float u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("float u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("float u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("float u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("float u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("float u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Float u = Float.valueOf((float)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Float u = Float.valueOf((float)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Float u = Float.valueOf((float)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Float u = Float.valueOf((float)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Float u = Float.valueOf((float)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("Float u = Float.valueOf((float)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Float u = Float.valueOf((float)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (float)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (float)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (float)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (float)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (float)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (float)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (float)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (float)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (float)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (float)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (float)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (float)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (float)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (float)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+ }
+
+ public void testMethodCallDoubleToBoxedCasts() {
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("double u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Double u = Double.valueOf((double)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("double u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("double u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("Double u = Double.valueOf((double)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("Double u = Double.valueOf((double)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (double)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
+
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
+ expectScriptThrows(ClassCastException.class,
+ () -> exec("def u = (double)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
+ assertEquals(0, exec("def u = (double)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
+ }
+}
diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java
index be9c3f83f3f..d4fe0fe1ddd 100644
--- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java
+++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java
@@ -54,6 +54,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
+import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
import static org.hamcrest.Matchers.equalTo;
public class PercolateQueryBuilderTests extends AbstractQueryTestCase {
@@ -152,12 +153,13 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase listener) {
final long startTime = relativeTime();
@@ -207,12 +225,12 @@ public class TransportBulkAction extends HandledTransportAction indicesMetaData = metaData.indices();
for (DocWriteRequest> actionRequest : bulkRequest.requests) {
- if (actionRequest instanceof IndexRequest) {
- IndexRequest indexRequest = (IndexRequest) actionRequest;
+ IndexRequest indexRequest = getIndexWriteRequest(actionRequest);
+ if(indexRequest != null){
String pipeline = indexRequest.getPipeline();
if (pipeline == null) {
- IndexMetaData indexMetaData = indicesMetaData.get(indexRequest.index());
- if (indexMetaData == null) {
+ IndexMetaData indexMetaData = indicesMetaData.get(actionRequest.index());
+ if (indexMetaData == null && indexRequest.index() != null) {
//check the alias
AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(indexRequest.index());
if (indexOrAlias != null && indexOrAlias.isAlias()) {
@@ -626,7 +644,7 @@ public class TransportBulkAction extends HandledTransportAction> sourceAndContent =
XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType());
updateResponse.setGetResult(UpdateHelper.extractGetResult(updateRequest, concreteIndex,
+ indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(),
indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes));
}
} else if (translatedResult == DocWriteResponse.Result.DELETED) {
@@ -315,7 +316,8 @@ public class TransportShardBulkAction extends TransportWriteAction
* The operation requires the {@link #index()}, {@link #type(String)} and {@link #id(String)}
@@ -84,7 +84,6 @@ public class GetRequest extends SingleShardRequest implements Realti
* @param index The index to get the document from
* @param type The type of the document
* @param id The id of the document
- *
* @deprecated Types are in the process of being removed, use {@link GetRequest(String, String)} instead.
*/
@Deprecated
@@ -127,7 +126,6 @@ public class GetRequest extends SingleShardRequest implements Realti
/**
* Sets the type of the document to fetch.
- *
* @deprecated Types are in the process of being removed.
*/
@Deprecated
diff --git a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java
index b39ceb49c59..fbcb47b5fad 100644
--- a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java
@@ -90,6 +90,20 @@ public class GetResponse extends ActionResponse implements Iterable> sourceAndContent =
XContentHelper.convertToMap(upsertSourceBytes, true, upsertRequest.getContentType());
- update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(),
- sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
+ update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(),
+ response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), sourceAndContent.v2(),
+ sourceAndContent.v1(), upsertSourceBytes));
} else {
update.setGetResult(null);
}
@@ -205,7 +206,8 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(),
response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(),
response.getVersion(), response.getResult());
- update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(),
+ update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(),
+ response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(),
result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes));
update.setForcedRefresh(response.forcedRefresh());
listener.onResponse(update);
@@ -216,10 +218,11 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
DeleteRequest deleteRequest = result.action();
client.bulk(toSingleItemBulkRequest(deleteRequest), wrapBulkResponse(
ActionListener.wrap(response -> {
- UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(),
- response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(),
- response.getVersion(), response.getResult());
- update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(),
+ UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(),
+ response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(),
+ response.getResult());
+ update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(),
+ response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(),
result.updatedSourceAsMap(), result.updateSourceContentType(), null));
update.setForcedRefresh(response.forcedRefresh());
listener.onResponse(update);
diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java
index 3ef89b997a1..255161c8f32 100644
--- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java
+++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java
@@ -209,8 +209,8 @@ public class UpdateHelper {
if (detectNoop && noop) {
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(),
getResult.getVersion(), DocWriteResponse.Result.NOOP);
- update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap,
- updateSourceContentType, getResult.internalSourceRef()));
+ update.setGetResult(extractGetResult(request, request.index(), getResult.getSeqNo(), getResult.getPrimaryTerm(),
+ getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef()));
return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType);
} else {
final IndexRequest finalIndexRequest = Requests.indexRequest(request.index())
@@ -270,10 +270,9 @@ public class UpdateHelper {
// If it was neither an INDEX or DELETE operation, treat it as a noop
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(),
getResult.getVersion(), DocWriteResponse.Result.NOOP);
- update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap,
- updateSourceContentType, getResult.internalSourceRef()));
+ update.setGetResult(extractGetResult(request, request.index(), getResult.getSeqNo(), getResult.getPrimaryTerm(),
+ getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef()));
return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType);
-
}
}
@@ -293,7 +292,7 @@ public class UpdateHelper {
/**
* Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response.
*/
- public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version,
+ public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long seqNo, long primaryTerm, long version,
final Map source, XContentType sourceContentType,
@Nullable final BytesReference sourceAsBytes) {
if (request.fetchSource() == null || request.fetchSource().fetchSource() == false) {
@@ -318,7 +317,8 @@ public class UpdateHelper {
}
// TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType)
- return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceFilteredAsBytes, Collections.emptyMap());
+ return new GetResult(concreteIndex, request.type(), request.id(), seqNo, primaryTerm, version, true, sourceFilteredAsBytes,
+ Collections.emptyMap());
}
public static class Result {
diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java
index 9e33e62622a..03d721b26fe 100644
--- a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java
@@ -162,8 +162,9 @@ public class UpdateResponse extends DocWriteResponse {
update = new UpdateResponse(shardId, type, id, version, result);
}
if (getResult != null) {
- update.setGetResult(new GetResult(update.getIndex(), update.getType(), update.getId(), update.getVersion(),
- getResult.isExists(),getResult.internalSourceRef(), getResult.getFields()));
+ update.setGetResult(new GetResult(update.getIndex(), update.getType(), update.getId(),
+ getResult.getSeqNo(), getResult.getPrimaryTerm(), update.getVersion(),
+ getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
}
update.setForcedRefresh(forcedRefresh);
return update;
diff --git a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java
index c229a826ee8..d71a3f94d40 100644
--- a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java
+++ b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java
@@ -46,7 +46,7 @@ import java.util.UUID;
public class RestoreInProgress extends AbstractNamedDiffable implements Custom, Iterable {
/**
- * Fallback UUID used for restore operations that were started before v7.0 and don't have a uuid in the cluster state.
+ * Fallback UUID used for restore operations that were started before v6.6 and don't have a uuid in the cluster state.
*/
public static final String BWC_UUID = new UUID(0, 0).toString();
@@ -436,7 +436,7 @@ public class RestoreInProgress extends AbstractNamedDiffable implements
final ImmutableOpenMap.Builder entriesBuilder = ImmutableOpenMap.builder(count);
for (int i = 0; i < count; i++) {
final String uuid;
- if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
+ if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
uuid = in.readString();
} else {
uuid = BWC_UUID;
@@ -468,7 +468,7 @@ public class RestoreInProgress extends AbstractNamedDiffable implements
out.writeVInt(entries.size());
for (ObjectCursor v : entries.values()) {
Entry entry = v.value;
- if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
+ if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
out.writeString(entry.uuid);
}
entry.snapshot().writeTo(out);
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
index f1dd843d798..5d23971dddb 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
@@ -1531,14 +1531,14 @@ public class IndexMetaData implements Diffable, ToXContentFragmen
if (sourceNumberOfShards < targetNumberOfShards) { // split
factor = targetNumberOfShards / sourceNumberOfShards;
if (factor * sourceNumberOfShards != targetNumberOfShards || factor <= 1) {
- throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a must be a " +
+ throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a " +
"factor of ["
+ targetNumberOfShards + "]");
}
} else if (sourceNumberOfShards > targetNumberOfShards) { // shrink
factor = sourceNumberOfShards / targetNumberOfShards;
if (factor * targetNumberOfShards != sourceNumberOfShards || factor <= 1) {
- throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a must be a " +
+ throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a " +
"multiple of ["
+ targetNumberOfShards + "]");
}
diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java
index 3654d66ad58..25a605088ef 100644
--- a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java
+++ b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java
@@ -222,7 +222,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject {
}
SnapshotRecoverySource(StreamInput in) throws IOException {
- if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
+ if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
restoreUUID = in.readString();
} else {
restoreUUID = RestoreInProgress.BWC_UUID;
@@ -250,7 +250,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject {
@Override
protected void writeAdditionalFields(StreamOutput out) throws IOException {
- if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
+ if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
out.writeString(restoreUUID);
}
snapshot.writeTo(out);
diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java
index ba70c703550..b98d766dd4e 100644
--- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java
+++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java
@@ -20,6 +20,7 @@
package org.elasticsearch.index.get;
import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory;
@@ -33,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
+import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
@@ -53,6 +55,8 @@ public class GetResult implements Streamable, Iterable, ToXConten
public static final String _TYPE = "_type";
public static final String _ID = "_id";
private static final String _VERSION = "_version";
+ private static final String _SEQ_NO = "_seq_no";
+ private static final String _PRIMARY_TERM = "_primary_term";
private static final String FOUND = "found";
private static final String FIELDS = "fields";
@@ -60,6 +64,8 @@ public class GetResult implements Streamable, Iterable, ToXConten
private String type;
private String id;
private long version;
+ private long seqNo;
+ private long primaryTerm;
private boolean exists;
private Map fields;
private Map sourceAsMap;
@@ -69,11 +75,17 @@ public class GetResult implements Streamable, Iterable, ToXConten
GetResult() {
}
- public GetResult(String index, String type, String id, long version, boolean exists, BytesReference source,
- Map fields) {
+ public GetResult(String index, String type, String id, long seqNo, long primaryTerm, long version, boolean exists,
+ BytesReference source, Map fields) {
this.index = index;
this.type = type;
this.id = id;
+ this.seqNo = seqNo;
+ this.primaryTerm = primaryTerm;
+ assert (seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO && primaryTerm == 0) || (seqNo >= 0 && primaryTerm >= 1) :
+ "seqNo: " + seqNo + " primaryTerm: " + primaryTerm;
+ assert exists || (seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO && primaryTerm == 0) :
+ "doc not found but seqNo/primaryTerm are set";
this.version = version;
this.exists = exists;
this.source = source;
@@ -118,6 +130,20 @@ public class GetResult implements Streamable, Iterable, ToXConten
return version;
}
+ /**
+ * The sequence number assigned to the last operation to have changed this document, if found.
+ */
+ public long getSeqNo() {
+ return seqNo;
+ }
+
+ /**
+ * The primary term of the last primary that has changed this document, if found.
+ */
+ public long getPrimaryTerm() {
+ return primaryTerm;
+ }
+
/**
* The source of the document if exists.
*/
@@ -213,6 +239,11 @@ public class GetResult implements Streamable, Iterable, ToXConten
}
public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params) throws IOException {
+ if (seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { // seqNo may not be assigned if read from an old node
+ builder.field(_SEQ_NO, seqNo);
+ builder.field(_PRIMARY_TERM, primaryTerm);
+ }
+
List metaFields = new ArrayList<>();
List otherFields = new ArrayList<>();
if (fields != null && !fields.isEmpty()) {
@@ -282,6 +313,8 @@ public class GetResult implements Streamable, Iterable, ToXConten
String currentFieldName = parser.currentName();
long version = -1;
+ long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
+ long primaryTerm = 0;
Boolean found = null;
BytesReference source = null;
Map fields = new HashMap<>();
@@ -297,6 +330,10 @@ public class GetResult implements Streamable, Iterable, ToXConten
id = parser.text();
} else if (_VERSION.equals(currentFieldName)) {
version = parser.longValue();
+ } else if (_SEQ_NO.equals(currentFieldName)) {
+ seqNo = parser.longValue();
+ } else if (_PRIMARY_TERM.equals(currentFieldName)) {
+ primaryTerm = parser.longValue();
} else if (FOUND.equals(currentFieldName)) {
found = parser.booleanValue();
} else {
@@ -326,7 +363,7 @@ public class GetResult implements Streamable, Iterable, ToXConten
}
}
}
- return new GetResult(index, type, id, version, found, source, fields);
+ return new GetResult(index, type, id, seqNo, primaryTerm, version, found, source, fields);
}
public static GetResult fromXContent(XContentParser parser) throws IOException {
@@ -347,6 +384,13 @@ public class GetResult implements Streamable, Iterable, ToXConten
index = in.readString();
type = in.readOptionalString();
id = in.readString();
+ if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
+ seqNo = in.readZLong();
+ primaryTerm = in.readVLong();
+ } else {
+ seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
+ primaryTerm = 0L;
+ }
version = in.readLong();
exists = in.readBoolean();
if (exists) {
@@ -372,6 +416,10 @@ public class GetResult implements Streamable, Iterable, ToXConten
out.writeString(index);
out.writeOptionalString(type);
out.writeString(id);
+ if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
+ out.writeZLong(seqNo);
+ out.writeVLong(primaryTerm);
+ }
out.writeLong(version);
out.writeBoolean(exists);
if (exists) {
@@ -397,6 +445,8 @@ public class GetResult implements Streamable, Iterable, ToXConten
}
GetResult getResult = (GetResult) o;
return version == getResult.version &&
+ seqNo == getResult.seqNo &&
+ primaryTerm == getResult.primaryTerm &&
exists == getResult.exists &&
Objects.equals(index, getResult.index) &&
Objects.equals(type, getResult.type) &&
@@ -407,7 +457,7 @@ public class GetResult implements Streamable, Iterable, ToXConten
@Override
public int hashCode() {
- return Objects.hash(version, exists, index, type, id, fields, sourceAsMap());
+ return Objects.hash(version, seqNo, primaryTerm, exists, index, type, id, fields, sourceAsMap());
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java
index fc1796dfcc5..6d58b981ddc 100644
--- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java
+++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java
@@ -45,6 +45,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.Uid;
+import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
@@ -112,7 +113,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
public GetResult get(Engine.GetResult engineGetResult, String id, String type,
String[] fields, FetchSourceContext fetchSourceContext) {
if (!engineGetResult.exists()) {
- return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
+ return new GetResult(shardId.getIndexName(), type, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, -1, false, null, null);
}
currentMetric.inc();
@@ -168,7 +169,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
}
if (get == null || get.exists() == false) {
- return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
+ return new GetResult(shardId.getIndexName(), type, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, -1, false, null, null);
}
try {
@@ -233,7 +234,8 @@ public final class ShardGetService extends AbstractIndexShardComponent {
}
}
- return new GetResult(shardId.getIndexName(), type, id, get.version(), get.exists(), source, fields);
+ return new GetResult(shardId.getIndexName(), type, id, get.docIdAndVersion().seqNo, get.docIdAndVersion().primaryTerm,
+ get.version(), get.exists(), source, fields);
}
private static FieldsVisitor buildFieldsVisitors(String[] fields, FetchSourceContext fetchSourceContext) {
diff --git a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java
index 3b8281bd471..a095d7647d9 100644
--- a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java
+++ b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java
@@ -134,7 +134,9 @@ public class CompoundProcessor implements Processor {
if (onFailureProcessors.isEmpty()) {
throw compoundProcessorException;
} else {
- executeOnFailure(ingestDocument, compoundProcessorException);
+ if (executeOnFailure(ingestDocument, compoundProcessorException) == false) {
+ return null;
+ }
break;
}
} finally {
@@ -145,13 +147,17 @@ public class CompoundProcessor implements Processor {
return ingestDocument;
}
-
- void executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception {
+ /**
+ * @return true if execution should continue, false if document is dropped.
+ */
+ boolean executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception {
try {
putFailureMetadata(ingestDocument, exception);
for (Processor processor : onFailureProcessors) {
try {
- processor.execute(ingestDocument);
+ if (processor.execute(ingestDocument) == null) {
+ return false;
+ }
} catch (Exception e) {
throw newCompoundProcessorException(e, processor.getType(), processor.getTag());
}
@@ -159,6 +165,7 @@ public class CompoundProcessor implements Processor {
} finally {
removeFailureMetadata(ingestDocument);
}
+ return true;
}
private void putFailureMetadata(IngestDocument ingestDocument, ElasticsearchException cause) {
diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java
index 705e77028a1..6951e33d5e7 100644
--- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java
+++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java
@@ -24,11 +24,11 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
+import org.elasticsearch.action.bulk.TransportBulkAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.ingest.DeletePipelineRequest;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
-import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
@@ -388,13 +388,7 @@ public class IngestService implements ClusterStateApplier {
@Override
protected void doRun() {
for (DocWriteRequest> actionRequest : actionRequests) {
- IndexRequest indexRequest = null;
- if (actionRequest instanceof IndexRequest) {
- indexRequest = (IndexRequest) actionRequest;
- } else if (actionRequest instanceof UpdateRequest) {
- UpdateRequest updateRequest = (UpdateRequest) actionRequest;
- indexRequest = updateRequest.docAsUpsert() ? updateRequest.doc() : updateRequest.upsertRequest();
- }
+ IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(actionRequest);
if (indexRequest == null) {
continue;
}
diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java
index c48529d420c..af376bf7c3c 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java
@@ -19,12 +19,14 @@
package org.elasticsearch.rest.action.document;
+import org.apache.logging.log4j.LogManager;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
@@ -49,8 +51,14 @@ import static org.elasticsearch.rest.RestStatus.OK;
*/
public class RestGetSourceAction extends BaseRestHandler {
+ private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetSourceAction.class));
+ static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get_source and exist_source"
+ + "requests is deprecated.";
+
public RestGetSourceAction(final Settings settings, final RestController controller) {
super(settings);
+ controller.registerHandler(GET, "/{index}/_source/{id}", this);
+ controller.registerHandler(HEAD, "/{index}/_source/{id}", this);
controller.registerHandler(GET, "/{index}/{type}/{id}/_source", this);
controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this);
}
@@ -62,7 +70,13 @@ public class RestGetSourceAction extends BaseRestHandler {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
- final GetRequest getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id"));
+ final GetRequest getRequest;
+ if (request.hasParam("type")) {
+ deprecationLogger.deprecatedAndMaybeLog("get_source_with_types", TYPES_DEPRECATION_MESSAGE);
+ getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id"));
+ } else {
+ getRequest = new GetRequest(request.param("index"), request.param("id"));
+ }
getRequest.refresh(request.paramAsBoolean("refresh", getRequest.refresh()));
getRequest.routing(request.param("routing"));
getRequest.preference(request.param("preference"));
diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java
index 3d8ea384546..7fd68852ce2 100644
--- a/server/src/main/java/org/elasticsearch/search/SearchHit.java
+++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java
@@ -19,16 +19,6 @@
package org.elasticsearch.search;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.OriginalIndices;
@@ -61,6 +51,16 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.transport.RemoteClusterAware;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.Collections.unmodifiableMap;
@@ -311,10 +311,17 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable 0) {
- sortValues = new Object[size];
- for (int i = 0; i < sortValues.length; i++) {
- byte type = in.readByte();
- if (type == 0) {
- sortValues[i] = null;
- } else if (type == 1) {
- sortValues[i] = in.readString();
- } else if (type == 2) {
- sortValues[i] = in.readInt();
- } else if (type == 3) {
- sortValues[i] = in.readLong();
- } else if (type == 4) {
- sortValues[i] = in.readFloat();
- } else if (type == 5) {
- sortValues[i] = in.readDouble();
- } else if (type == 6) {
- sortValues[i] = in.readByte();
- } else if (type == 7) {
- sortValues[i] = in.readShort();
- } else if (type == 8) {
- sortValues[i] = in.readBoolean();
- } else {
- throw new IOException("Can't match type [" + type + "]");
- }
- }
+ SearchSortValues(StreamInput in) throws IOException {
+ this.formattedSortValues = in.readArray(Lucene::readSortValue, Object[]::new);
+ if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
+ this.rawSortValues = in.readArray(Lucene::readSortValue, Object[]::new);
} else {
- sortValues = new Object[0];
+ this.rawSortValues = EMPTY_ARRAY;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeVInt(sortValues.length);
- for (Object sortValue : sortValues) {
- if (sortValue == null) {
- out.writeByte((byte) 0);
- } else {
- Class type = sortValue.getClass();
- if (type == String.class) {
- out.writeByte((byte) 1);
- out.writeString((String) sortValue);
- } else if (type == Integer.class) {
- out.writeByte((byte) 2);
- out.writeInt((Integer) sortValue);
- } else if (type == Long.class) {
- out.writeByte((byte) 3);
- out.writeLong((Long) sortValue);
- } else if (type == Float.class) {
- out.writeByte((byte) 4);
- out.writeFloat((Float) sortValue);
- } else if (type == Double.class) {
- out.writeByte((byte) 5);
- out.writeDouble((Double) sortValue);
- } else if (type == Byte.class) {
- out.writeByte((byte) 6);
- out.writeByte((Byte) sortValue);
- } else if (type == Short.class) {
- out.writeByte((byte) 7);
- out.writeShort((Short) sortValue);
- } else if (type == Boolean.class) {
- out.writeByte((byte) 8);
- out.writeBoolean((Boolean) sortValue);
- } else {
- throw new IOException("Can't handle sort field value of type [" + type + "]");
- }
- }
+ out.writeArray(Lucene::writeSortValue, this.formattedSortValues);
+ if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
+ out.writeArray(Lucene::writeSortValue, this.rawSortValues);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- if (sortValues.length > 0) {
+ if (formattedSortValues.length > 0) {
builder.startArray(Fields.SORT);
- for (Object sortValue : sortValues) {
+ for (Object sortValue : formattedSortValues) {
builder.value(sortValue);
}
builder.endArray();
@@ -142,24 +99,37 @@ public class SearchSortValues implements ToXContentFragment, Writeable {
return new SearchSortValues(parser.list().toArray());
}
- public Object[] sortValues() {
- return sortValues;
+ /**
+ * Returns the formatted version of the values that sorting was performed against
+ */
+ public Object[] getFormattedSortValues() {
+ return formattedSortValues;
+ }
+
+ /**
+ * Returns the raw version of the values that sorting was performed against
+ */
+ public Object[] getRawSortValues() {
+ return rawSortValues;
}
@Override
- public boolean equals(Object obj) {
- if (this == obj) {
+ public boolean equals(Object o) {
+ if (this == o) {
return true;
}
- if (obj == null || getClass() != obj.getClass()) {
+ if (o == null || getClass() != o.getClass()) {
return false;
}
- SearchSortValues other = (SearchSortValues) obj;
- return Arrays.equals(sortValues, other.sortValues);
+ SearchSortValues that = (SearchSortValues) o;
+ return Arrays.equals(formattedSortValues, that.formattedSortValues) &&
+ Arrays.equals(rawSortValues, that.rawSortValues);
}
@Override
public int hashCode() {
- return Arrays.hashCode(sortValues);
+ int result = Arrays.hashCode(formattedSortValues);
+ result = 31 * result + Arrays.hashCode(rawSortValues);
+ return result;
}
}
diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java
index 2c36af8638f..237e73e572a 100644
--- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java
+++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java
@@ -183,10 +183,11 @@ public abstract class RemoteClusterAware {
* (ProxyAddresss, [SeedNodeSuppliers]). If a cluster is configured with a proxy address all seed nodes will point to
* {@link TransportAddress#META_ADDRESS} and their configured address will be used as the hostname for the generated discovery node.
*/
- protected static Map>>> buildRemoteClustersDynamicConfig(Settings settings) {
- final Map>>> remoteSeeds =
+ protected static Map>>>> buildRemoteClustersDynamicConfig(
+ final Settings settings) {
+ final Map>>>> remoteSeeds =
buildRemoteClustersDynamicConfig(settings, REMOTE_CLUSTERS_SEEDS);
- final Map>>> searchRemoteSeeds =
+ final Map>>>> searchRemoteSeeds =
buildRemoteClustersDynamicConfig(settings, SEARCH_REMOTE_CLUSTERS_SEEDS);
// sort the intersection for predictable output order
final NavigableSet intersection =
@@ -205,7 +206,7 @@ public abstract class RemoteClusterAware {
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}
- private static Map>>> buildRemoteClustersDynamicConfig(
+ private static Map>>>> buildRemoteClustersDynamicConfig(
final Settings settings, final Setting.AffixSetting> seedsSetting) {
final Stream>> allConcreteSettings = seedsSetting.getAllConcreteSettings(settings);
return allConcreteSettings.collect(
@@ -214,9 +215,9 @@ public abstract class RemoteClusterAware {
List addresses = concreteSetting.get(settings);
final boolean proxyMode =
REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).existsOrFallbackExists(settings);
- List> nodes = new ArrayList<>(addresses.size());
+ List>> nodes = new ArrayList<>(addresses.size());
for (String address : addresses) {
- nodes.add(() -> buildSeedNode(clusterName, address, proxyMode));
+ nodes.add(Tuple.tuple(address, () -> buildSeedNode(clusterName, address, proxyMode)));
}
return new Tuple<>(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).get(settings), nodes);
}));
@@ -304,16 +305,24 @@ public abstract class RemoteClusterAware {
(namespace, value) -> {});
}
-
- protected static InetSocketAddress parseSeedAddress(String remoteHost) {
- String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost));
+ static InetSocketAddress parseSeedAddress(String remoteHost) {
+ final Tuple hostPort = parseHostPort(remoteHost);
+ final String host = hostPort.v1();
+ assert hostPort.v2() != null : remoteHost;
+ final int port = hostPort.v2();
InetAddress hostAddress;
try {
hostAddress = InetAddress.getByName(host);
} catch (UnknownHostException e) {
throw new IllegalArgumentException("unknown host [" + host + "]", e);
}
- return new InetSocketAddress(hostAddress, parsePort(remoteHost));
+ return new InetSocketAddress(hostAddress, port);
+ }
+
+ public static Tuple parseHostPort(final String remoteHost) {
+ final String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost));
+ final int port = parsePort(remoteHost);
+ return Tuple.tuple(host, port);
}
private static int parsePort(String remoteHost) {
diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java
index 87dd99e6590..7ea55925262 100644
--- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java
+++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java
@@ -35,6 +35,7 @@ import org.elasticsearch.action.support.ContextPreservingActionListener;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
@@ -95,7 +96,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
private final Predicate nodePredicate;
private final ThreadPool threadPool;
private volatile String proxyAddress;
- private volatile List> seedNodes;
+ private volatile List>> seedNodes;
private volatile boolean skipUnavailable;
private final ConnectHandler connectHandler;
private final TimeValue initialConnectionTimeout;
@@ -111,7 +112,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
* @param nodePredicate a predicate to filter eligible remote nodes to connect to
* @param proxyAddress the proxy address
*/
- RemoteClusterConnection(Settings settings, String clusterAlias, List> seedNodes,
+ RemoteClusterConnection(Settings settings, String clusterAlias, List>> seedNodes,
TransportService transportService, int maxNumRemoteConnections, Predicate nodePredicate,
String proxyAddress) {
this(settings, clusterAlias, seedNodes, transportService, maxNumRemoteConnections, nodePredicate, proxyAddress,
@@ -119,7 +120,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
}
// Public for tests to pass a StubbableConnectionManager
- RemoteClusterConnection(Settings settings, String clusterAlias, List> seedNodes,
+ RemoteClusterConnection(Settings settings, String clusterAlias, List>> seedNodes,
TransportService transportService, int maxNumRemoteConnections, Predicate nodePredicate,
String proxyAddress, ConnectionManager connectionManager) {
this.transportService = transportService;
@@ -155,7 +156,10 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
/**
* Updates the list of seed nodes for this cluster connection
*/
- synchronized void updateSeedNodes(String proxyAddress, List> seedNodes, ActionListener connectListener) {
+ synchronized void updateSeedNodes(
+ final String proxyAddress,
+ final List>> seedNodes,
+ final ActionListener connectListener) {
this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes));
this.proxyAddress = proxyAddress;
connectHandler.connect(connectListener);
@@ -465,7 +469,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
maybeConnect();
}
});
- collectRemoteNodes(seedNodes.iterator(), transportService, connectionManager, listener);
+ collectRemoteNodes(seedNodes.stream().map(Tuple::v2).iterator(), transportService, connectionManager, listener);
}
});
}
@@ -672,10 +676,13 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
* Get the information about remote nodes to be rendered on {@code _remote/info} requests.
*/
public RemoteConnectionInfo getConnectionInfo() {
- List seedNodeAddresses = seedNodes.stream().map(node -> node.get().getAddress()).collect
- (Collectors.toList());
- return new RemoteConnectionInfo(clusterAlias, seedNodeAddresses, maxNumRemoteConnections, connectedNodes.size(),
- initialConnectionTimeout, skipUnavailable);
+ return new RemoteConnectionInfo(
+ clusterAlias,
+ seedNodes.stream().map(Tuple::v1).collect(Collectors.toList()),
+ maxNumRemoteConnections,
+ connectedNodes.size(),
+ initialConnectionTimeout,
+ skipUnavailable);
}
int getNumNodesConnected() {
diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java
index fda0b90f19e..cb802f13fdb 100644
--- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java
+++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java
@@ -201,7 +201,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
* @param seeds a cluster alias to discovery node mapping representing the remote clusters seeds nodes
* @param connectionListener a listener invoked once every configured cluster has been connected to
*/
- private synchronized void updateRemoteClusters(Map>>> seeds,
+ private synchronized void updateRemoteClusters(Map>>>> seeds,
ActionListener connectionListener) {
if (seeds.containsKey(LOCAL_CLUSTER_GROUP_KEY)) {
throw new IllegalArgumentException("remote clusters must not have the empty string as its key");
@@ -212,8 +212,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
} else {
CountDown countDown = new CountDown(seeds.size());
remoteClusters.putAll(this.remoteClusters);
- for (Map.Entry>>> entry : seeds.entrySet()) {
- List> seedList = entry.getValue().v2();
+ for (Map.Entry>>>> entry : seeds.entrySet()) {
+ List>> seedList = entry.getValue().v2();
String proxyAddress = entry.getValue().v1();
RemoteClusterConnection remote = this.remoteClusters.get(entry.getKey());
@@ -408,9 +408,10 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
final List addresses,
final String proxyAddress,
final ActionListener connectionListener) {
- final List> nodes = addresses.stream().>map(address -> () ->
- buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress))
- ).collect(Collectors.toList());
+ final List>> nodes =
+ addresses.stream().>>map(address -> Tuple.tuple(address, () ->
+ buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress)))
+ ).collect(Collectors.toList());
updateRemoteClusters(Collections.singletonMap(clusterAlias, new Tuple<>(proxyAddress, nodes)), connectionListener);
}
@@ -421,7 +422,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
void initializeRemoteClusters() {
final TimeValue timeValue = REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings);
final PlainActionFuture future = new PlainActionFuture<>();
- Map>>> seeds = RemoteClusterAware.buildRemoteClustersDynamicConfig(settings);
+ Map>>>> seeds =
+ RemoteClusterAware.buildRemoteClustersDynamicConfig(settings);
updateRemoteClusters(seeds, future);
try {
future.get(timeValue.millis(), TimeUnit.MILLISECONDS);
diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java
index c2024e39228..7c51ca7b9c8 100644
--- a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java
+++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java
@@ -16,9 +16,11 @@
* specific language governing permissions and limitations
* under the License.
*/
+
package org.elasticsearch.transport;
import org.elasticsearch.Version;
+import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
@@ -27,25 +29,29 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
-import static java.util.Collections.emptyList;
-
import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Arrays;
import java.util.List;
import java.util.Objects;
+import java.util.stream.Collectors;
+
+import static java.util.Collections.emptyList;
/**
* This class encapsulates all remote cluster information to be rendered on
* {@code _remote/info} requests.
*/
public final class RemoteConnectionInfo implements ToXContentFragment, Writeable {
- final List seedNodes;
+ final List seedNodes;
final int connectionsPerCluster;
final TimeValue initialConnectionTimeout;
final int numNodesConnected;
final String clusterAlias;
final boolean skipUnavailable;
- RemoteConnectionInfo(String clusterAlias, List seedNodes,
+ RemoteConnectionInfo(String clusterAlias, List seedNodes,
int connectionsPerCluster, int numNodesConnected,
TimeValue initialConnectionTimeout, boolean skipUnavailable) {
this.clusterAlias = clusterAlias;
@@ -57,7 +63,17 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable
}
public RemoteConnectionInfo(StreamInput input) throws IOException {
- seedNodes = input.readList(TransportAddress::new);
+ if (input.getVersion().onOrAfter(Version.V_7_0_0)) {
+ seedNodes = Arrays.asList(input.readStringArray());
+ } else {
+ // versions prior to 7.0.0 sent the resolved transport address of the seed nodes
+ final List transportAddresses = input.readList(TransportAddress::new);
+ seedNodes =
+ transportAddresses
+ .stream()
+ .map(a -> a.address().getHostString() + ":" + a.address().getPort())
+ .collect(Collectors.toList());
+ }
if (input.getVersion().before(Version.V_7_0_0)) {
/*
* Versions before 7.0 sent the HTTP addresses of all nodes in the
@@ -78,7 +94,26 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeList(seedNodes);
+ if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
+ out.writeStringArray(seedNodes.toArray(new String[0]));
+ } else {
+ // versions prior to 7.0.0 received the resolved transport address of the seed nodes
+ out.writeList(seedNodes
+ .stream()
+ .map(
+ s -> {
+ final Tuple hostPort = RemoteClusterAware.parseHostPort(s);
+ assert hostPort.v2() != null : s;
+ try {
+ return new TransportAddress(
+ InetAddress.getByAddress(hostPort.v1(), TransportAddress.META_ADDRESS.getAddress()),
+ hostPort.v2());
+ } catch (final UnknownHostException e) {
+ throw new AssertionError(e);
+ }
+ })
+ .collect(Collectors.toList()));
+ }
if (out.getVersion().before(Version.V_7_0_0)) {
/*
* Versions before 7.0 sent the HTTP addresses of all nodes in the
@@ -104,8 +139,8 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable
builder.startObject(clusterAlias);
{
builder.startArray("seeds");
- for (TransportAddress addr : seedNodes) {
- builder.value(addr.toString());
+ for (String addr : seedNodes) {
+ builder.value(addr);
}
builder.endArray();
builder.field("connected", numNodesConnected > 0);
@@ -136,4 +171,5 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable
return Objects.hash(seedNodes, connectionsPerCluster, initialConnectionTimeout,
numNodesConnected, clusterAlias, skipUnavailable);
}
+
}
diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java
index f25f8844153..219aee9ebe2 100644
--- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java
+++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java
@@ -28,6 +28,7 @@ import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.AutoCreateIndex;
+import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateApplier;
@@ -408,6 +409,57 @@ public class TransportBulkActionIngestTests extends ESTestCase {
validateDefaultPipeline(new IndexRequest(WITH_DEFAULT_PIPELINE_ALIAS, "type", "id"));
}
+ public void testUseDefaultPipelineWithBulkUpsert() throws Exception {
+ Exception exception = new Exception("fake exception");
+ BulkRequest bulkRequest = new BulkRequest();
+ IndexRequest indexRequest1 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id1").source(Collections.emptyMap());
+ IndexRequest indexRequest2 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id2").source(Collections.emptyMap());
+ IndexRequest indexRequest3 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id3").source(Collections.emptyMap());
+ UpdateRequest upsertRequest = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id1").upsert(indexRequest1).script(mockScript("1"));
+ UpdateRequest docAsUpsertRequest = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id2").doc(indexRequest2).docAsUpsert(true);
+ // this test only covers the mechanics that scripted bulk upserts will execute a default pipeline. However, in practice scripted
+ // bulk upserts with a default pipeline are a bit surprising since the script executes AFTER the pipeline.
+ UpdateRequest scriptedUpsert = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id2").upsert(indexRequest3).script(mockScript("1"))
+ .scriptedUpsert(true);
+ bulkRequest.add(upsertRequest).add(docAsUpsertRequest).add(scriptedUpsert);
+
+ AtomicBoolean responseCalled = new AtomicBoolean(false);
+ AtomicBoolean failureCalled = new AtomicBoolean(false);
+ assertNull(indexRequest1.getPipeline());
+ assertNull(indexRequest2.getPipeline());
+ assertNull(indexRequest3.getPipeline());
+ action.execute(null, bulkRequest, ActionListener.wrap(
+ response -> {
+ BulkItemResponse itemResponse = response.iterator().next();
+ assertThat(itemResponse.getFailure().getMessage(), containsString("fake exception"));
+ responseCalled.set(true);
+ },
+ e -> {
+ assertThat(e, sameInstance(exception));
+ failureCalled.set(true);
+ }));
+
+ // check failure works, and passes through to the listener
+ assertFalse(action.isExecuted); // haven't executed yet
+ assertFalse(responseCalled.get());
+ assertFalse(failureCalled.get());
+ verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any());
+ assertEquals(indexRequest1.getPipeline(), "default_pipeline");
+ assertEquals(indexRequest2.getPipeline(), "default_pipeline");
+ assertEquals(indexRequest3.getPipeline(), "default_pipeline");
+ completionHandler.getValue().accept(exception);
+ assertTrue(failureCalled.get());
+
+ // now check success of the transport bulk action
+ indexRequest1.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing
+ indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing
+ indexRequest3.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing
+ completionHandler.getValue().accept(null);
+ assertTrue(action.isExecuted);
+ assertFalse(responseCalled.get()); // listener would only be called by real index action, not our mocked one
+ verifyZeroInteractions(transportService);
+ }
+
public void testCreateIndexBeforeRunPipeline() throws Exception {
Exception exception = new Exception("fake exception");
IndexRequest indexRequest = new IndexRequest("missing_index", "type", "id");
@@ -445,6 +497,7 @@ public class TransportBulkActionIngestTests extends ESTestCase {
indexRequest.source(Collections.emptyMap());
AtomicBoolean responseCalled = new AtomicBoolean(false);
AtomicBoolean failureCalled = new AtomicBoolean(false);
+ assertNull(indexRequest.getPipeline());
singleItemBulkWriteAction.execute(null, indexRequest, ActionListener.wrap(
response -> {
responseCalled.set(true);
@@ -459,6 +512,7 @@ public class TransportBulkActionIngestTests extends ESTestCase {
assertFalse(responseCalled.get());
assertFalse(failureCalled.get());
verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any());
+ assertEquals(indexRequest.getPipeline(), "default_pipeline");
completionHandler.getValue().accept(exception);
assertTrue(failureCalled.get());
diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java
index a058cf47741..162ef56553d 100644
--- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java
@@ -23,8 +23,10 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.bulk.TransportBulkActionTookTests.Resolver;
import org.elasticsearch.action.delete.DeleteRequest;
+import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.AutoCreateIndex;
+import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@@ -132,4 +134,23 @@ public class TransportBulkActionTests extends ESTestCase {
throw new AssertionError(exception);
}));
}
+
+ public void testGetIndexWriteRequest() throws Exception {
+ IndexRequest indexRequest = new IndexRequest("index", "type", "id1").source(Collections.emptyMap());
+ UpdateRequest upsertRequest = new UpdateRequest("index", "type", "id1").upsert(indexRequest).script(mockScript("1"));
+ UpdateRequest docAsUpsertRequest = new UpdateRequest("index", "type", "id2").doc(indexRequest).docAsUpsert(true);
+ UpdateRequest scriptedUpsert = new UpdateRequest("index", "type", "id2").upsert(indexRequest).script(mockScript("1"))
+ .scriptedUpsert(true);
+
+ assertEquals(TransportBulkAction.getIndexWriteRequest(indexRequest), indexRequest);
+ assertEquals(TransportBulkAction.getIndexWriteRequest(upsertRequest), indexRequest);
+ assertEquals(TransportBulkAction.getIndexWriteRequest(docAsUpsertRequest), indexRequest);
+ assertEquals(TransportBulkAction.getIndexWriteRequest(scriptedUpsert), indexRequest);
+
+ DeleteRequest deleteRequest = new DeleteRequest("index", "id");
+ assertNull(TransportBulkAction.getIndexWriteRequest(deleteRequest));
+
+ UpdateRequest badUpsertRequest = new UpdateRequest("index", "type", "id1");
+ assertNull(TransportBulkAction.getIndexWriteRequest(badUpsertRequest));
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java
index ca5c35ccab3..2a04a976677 100644
--- a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java
+++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java
@@ -65,7 +65,7 @@ public class ExplainResponseTests extends AbstractStreamableXContentTestCase nowInMillis);
Streamable action = result.action();
assertThat(action, instanceOf(IndexRequest.class));
@@ -372,7 +373,7 @@ public class UpdateRequestTests extends ESTestCase {
.script(mockInlineScript("ctx._timestamp = ctx._now"))
.scriptedUpsert(true);
// We simulate that the document is not existing yet
- GetResult getResult = new GetResult("test", "type1", "2", 0, true, new BytesArray("{}"), null);
+ GetResult getResult = new GetResult("test", "type1", "2", 0, 1, 0, true, new BytesArray("{}"), null);
UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> 42L);
Streamable action = result.action();
assertThat(action, instanceOf(IndexRequest.class));
@@ -381,7 +382,7 @@ public class UpdateRequestTests extends ESTestCase {
public void testIndexTimeout() {
final GetResult getResult =
- new GetResult("test", "type", "1", 0, true, new BytesArray("{\"f\":\"v\"}"), null);
+ new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null);
final UpdateRequest updateRequest =
new UpdateRequest("test", "type", "1")
.script(mockInlineScript("return"))
@@ -391,7 +392,7 @@ public class UpdateRequestTests extends ESTestCase {
public void testDeleteTimeout() {
final GetResult getResult =
- new GetResult("test", "type", "1", 0, true, new BytesArray("{\"f\":\"v\"}"), null);
+ new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null);
final UpdateRequest updateRequest =
new UpdateRequest("test", "type", "1")
.script(mockInlineScript("ctx.op = delete"))
@@ -402,7 +403,7 @@ public class UpdateRequestTests extends ESTestCase {
public void testUpsertTimeout() throws IOException {
final boolean exists = randomBoolean();
final BytesReference source = exists ? new BytesArray("{\"f\":\"v\"}") : null;
- final GetResult getResult = new GetResult("test", "type", "1", 0, exists, source, null);
+ final GetResult getResult = new GetResult("test", "type", "1", UNASSIGNED_SEQ_NO, 0, 0, exists, source, null);
final XContentBuilder sourceBuilder = jsonBuilder();
sourceBuilder.startObject();
{
@@ -535,7 +536,7 @@ public class UpdateRequestTests extends ESTestCase {
}
public void testRoutingExtraction() throws Exception {
- GetResult getResult = new GetResult("test", "type", "1", 0, false, null, null);
+ GetResult getResult = new GetResult("test", "type", "1", UNASSIGNED_SEQ_NO, 0, 0, false, null, null);
IndexRequest indexRequest = new IndexRequest("test", "type", "1");
// There is no routing and parent because the document doesn't exist
@@ -545,7 +546,7 @@ public class UpdateRequestTests extends ESTestCase {
assertNull(UpdateHelper.calculateRouting(getResult, indexRequest));
// Doc exists but has no source or fields
- getResult = new GetResult("test", "type", "1", 0, true, null, null);
+ getResult = new GetResult("test", "type", "1", 0, 1, 0, true, null, null);
// There is no routing and parent on either request
assertNull(UpdateHelper.calculateRouting(getResult, indexRequest));
@@ -554,7 +555,7 @@ public class UpdateRequestTests extends ESTestCase {
fields.put("_routing", new DocumentField("_routing", Collections.singletonList("routing1")));
// Doc exists and has the parent and routing fields
- getResult = new GetResult("test", "type", "1", 0, true, null, fields);
+ getResult = new GetResult("test", "type", "1", 0, 1, 0, true, null, fields);
// Use the get result parent and routing
assertThat(UpdateHelper.calculateRouting(getResult, indexRequest), equalTo("routing1"));
@@ -563,7 +564,7 @@ public class UpdateRequestTests extends ESTestCase {
@SuppressWarnings("deprecated") // VersionType.FORCE is deprecated
public void testCalculateUpdateVersion() throws Exception {
long randomVersion = randomIntBetween(0, 100);
- GetResult getResult = new GetResult("test", "type", "1", randomVersion, true, new BytesArray("{}"), null);
+ GetResult getResult = new GetResult("test", "type", "1", 0, 1, randomVersion, true, new BytesArray("{}"), null);
UpdateRequest request = new UpdateRequest("test", "type1", "1");
long version = UpdateHelper.calculateUpdateVersion(request, getResult);
@@ -580,7 +581,7 @@ public class UpdateRequestTests extends ESTestCase {
public void testNoopDetection() throws Exception {
ShardId shardId = new ShardId("test", "", 0);
- GetResult getResult = new GetResult("test", "type", "1", 0, true,
+ GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true,
new BytesArray("{\"body\": \"foo\"}"),
null);
@@ -611,7 +612,7 @@ public class UpdateRequestTests extends ESTestCase {
public void testUpdateScript() throws Exception {
ShardId shardId = new ShardId("test", "", 0);
- GetResult getResult = new GetResult("test", "type", "1", 0, true,
+ GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true,
new BytesArray("{\"body\": \"bar\"}"),
null);
diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java
index c8d63f73732..8ec0423b406 100644
--- a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java
+++ b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java
@@ -74,11 +74,12 @@ public class UpdateResponseTests extends ESTestCase {
UpdateResponse updateResponse = new UpdateResponse(new ReplicationResponse.ShardInfo(3, 2),
new ShardId("books", "books_uuid", 2), "book", "1", 7, 17, 2, UPDATED);
- updateResponse.setGetResult(new GetResult("books", "book", "1", 2, true, source, fields));
+ updateResponse.setGetResult(new GetResult("books", "book", "1",0, 1, 2, true, source, fields));
String output = Strings.toString(updateResponse);
assertEquals("{\"_index\":\"books\",\"_type\":\"book\",\"_id\":\"1\",\"_version\":2,\"result\":\"updated\"," +
- "\"_shards\":{\"total\":3,\"successful\":2,\"failed\":0},\"_seq_no\":7,\"_primary_term\":17,\"get\":{\"found\":true," +
+ "\"_shards\":{\"total\":3,\"successful\":2,\"failed\":0},\"_seq_no\":7,\"_primary_term\":17,\"get\":{" +
+ "\"_seq_no\":0,\"_primary_term\":1,\"found\":true," +
"\"_source\":{\"title\":\"Book title\",\"isbn\":\"ABC-123\"},\"fields\":{\"isbn\":[\"ABC-123\"],\"title\":[\"Book " +
"title\"]}}}", output);
}
diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java
index 393f7f6b1d4..1fdea596afb 100644
--- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java
@@ -227,7 +227,7 @@ public class IndexMetaDataTests extends ESTestCase {
assertEquals("the number of target shards (0) must be greater than the shard id: 0",
expectThrows(IllegalArgumentException.class, () -> IndexMetaData.selectSplitShard(0, metaData, 0)).getMessage());
- assertEquals("the number of source shards [2] must be a must be a factor of [3]",
+ assertEquals("the number of source shards [2] must be a factor of [3]",
expectThrows(IllegalArgumentException.class, () -> IndexMetaData.selectSplitShard(0, metaData, 3)).getMessage());
assertEquals("the number of routing shards [4] must be a multiple of the target shards [8]",
@@ -285,6 +285,6 @@ public class IndexMetaDataTests extends ESTestCase {
Settings notAFactorySettings = Settings.builder().put("index.number_of_shards", 2).put("index.number_of_routing_shards", 3).build();
iae = expectThrows(IllegalArgumentException.class,
() -> IndexMetaData.INDEX_NUMBER_OF_ROUTING_SHARDS_SETTING.get(notAFactorySettings));
- assertEquals("the number of source shards [2] must be a must be a factor of [3]", iae.getMessage());
+ assertEquals("the number of source shards [2] must be a factor of [3]", iae.getMessage());
}
}
diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java
index 6cbd83e5b24..ec89e085f07 100644
--- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java
@@ -154,7 +154,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase {
MetaDataCreateIndexService.validateShrinkIndex(state, "source", Collections.emptySet(), "target", targetSettings)
).getMessage());
- assertEquals("the number of source shards [8] must be a must be a multiple of [3]",
+ assertEquals("the number of source shards [8] must be a multiple of [3]",
expectThrows(IllegalArgumentException.class, () ->
MetaDataCreateIndexService.validateShrinkIndex(createClusterState("source", 8, randomIntBetween(0, 10),
Settings.builder().put("index.blocks.write", true).build()), "source", Collections.emptySet(), "target",
@@ -221,7 +221,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase {
).getMessage());
- assertEquals("the number of source shards [3] must be a must be a factor of [4]",
+ assertEquals("the number of source shards [3] must be a factor of [4]",
expectThrows(IllegalArgumentException.class, () ->
MetaDataCreateIndexService.validateSplitIndex(createClusterState("source", 3, randomIntBetween(0, 10),
Settings.builder().put("index.blocks.write", true).build()), "source", Collections.emptySet(), "target",
diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java
index ea894a2edd0..1891be362b8 100644
--- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java
+++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java
@@ -531,24 +531,26 @@ public class LuceneTests extends ESTestCase {
}
public static Object randomSortValue() {
- switch(randomIntBetween(0, 8)) {
+ switch(randomIntBetween(0, 9)) {
case 0:
- return randomAlphaOfLengthBetween(3, 10);
+ return null;
case 1:
- return randomInt();
+ return randomAlphaOfLengthBetween(3, 10);
case 2:
- return randomLong();
+ return randomInt();
case 3:
- return randomFloat();
+ return randomLong();
case 4:
- return randomDouble();
+ return randomFloat();
case 5:
- return randomByte();
+ return randomDouble();
case 6:
- return randomShort();
+ return randomByte();
case 7:
- return randomBoolean();
+ return randomShort();
case 8:
+ return randomBoolean();
+ case 9:
return new BytesRef(randomAlphaOfLengthBetween(3, 10));
default:
throw new UnsupportedOperationException();
diff --git a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java
index 5ada31b6129..c87a896d318 100644
--- a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java
+++ b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java
@@ -55,7 +55,8 @@ public class EnvironmentTests extends ESTestCase {
Environment environment = newEnvironment();
assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue());
assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue());
- environment = newEnvironment(Settings.builder().putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
+ environment = newEnvironment(Settings.builder()
+ .putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue());
assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue());
assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue());
diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
index 7a24ebaf048..63635f5cbe7 100644
--- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
+++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
@@ -352,7 +352,8 @@ public class NodeEnvironmentTests extends ESTestCase {
for (int i = 0; i < iters; i++) {
int shard = randomIntBetween(0, counts.length - 1);
try {
- try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "fooUUID", shard), scaledRandomIntBetween(0, 10))) {
+ try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "fooUUID", shard),
+ scaledRandomIntBetween(0, 10))) {
counts[shard].value++;
countsAtomic[shard].incrementAndGet();
assertEquals(flipFlop[shard].incrementAndGet(), 1);
@@ -386,7 +387,9 @@ public class NodeEnvironmentTests extends ESTestCase {
final Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "myindexUUID").build();
IndexSettings s1 = IndexSettingsModule.newIndexSettings("myindex", indexSettings);
- IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder().put(indexSettings).put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build());
+ IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder()
+ .put(indexSettings)
+ .put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build());
Index index = new Index("myindex", "myindexUUID");
ShardId sid = new ShardId(index, 0);
diff --git a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java
index 1cc2612041f..0dc6b2573ea 100644
--- a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java
+++ b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java
@@ -44,6 +44,7 @@ import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.index.get.DocumentFieldTests.randomDocumentField;
+import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
@@ -72,15 +73,16 @@ public class GetResultTests extends ESTestCase {
public void testToXContent() throws IOException {
{
- GetResult getResult = new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " +
+ GetResult getResult = new GetResult("index", "type", "id", 0, 1, 1, true, new BytesArray("{ \"field1\" : " +
"\"value1\", \"field2\":\"value2\"}"), singletonMap("field1", new DocumentField("field1",
singletonList("value1"))));
String output = Strings.toString(getResult);
- assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" " +
- ": \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", output);
+ assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," +
+ "\"found\":true,\"_source\":{ \"field1\" : \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}",
+ output);
}
{
- GetResult getResult = new GetResult("index", "type", "id", 1, false, null, null);
+ GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null);
String output = Strings.toString(getResult);
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"found\":false}", output);
}
@@ -92,7 +94,7 @@ public class GetResultTests extends ESTestCase {
GetResult getResult = tuple.v1();
// We don't expect to retrieve the index/type/id of the GetResult because they are not rendered
// by the toXContentEmbedded method.
- GetResult expectedGetResult = new GetResult(null, null, null, -1,
+ GetResult expectedGetResult = new GetResult(null, null, null, tuple.v2().getSeqNo(), tuple.v2().getPrimaryTerm(), -1,
tuple.v2().isExists(), tuple.v2().sourceRef(), tuple.v2().getFields());
boolean humanReadable = randomBoolean();
@@ -118,16 +120,16 @@ public class GetResultTests extends ESTestCase {
fields.put("foo", new DocumentField("foo", singletonList("bar")));
fields.put("baz", new DocumentField("baz", Arrays.asList("baz_0", "baz_1")));
- GetResult getResult = new GetResult("index", "type", "id", 2, true,
+ GetResult getResult = new GetResult("index", "type", "id", 0, 1, 2, true,
new BytesArray("{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}"), fields);
BytesReference originalBytes = toXContentEmbedded(getResult, XContentType.JSON, false);
- assertEquals("{\"found\":true,\"_source\":{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}," +
+ assertEquals("{\"_seq_no\":0,\"_primary_term\":1,\"found\":true,\"_source\":{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}," +
"\"fields\":{\"foo\":[\"bar\"],\"baz\":[\"baz_0\",\"baz_1\"]}}", originalBytes.utf8ToString());
}
public void testToXContentEmbeddedNotFound() throws IOException {
- GetResult getResult = new GetResult("index", "type", "id", 1, false, null, null);
+ GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null);
BytesReference originalBytes = toXContentEmbedded(getResult, XContentType.JSON, false);
assertEquals("{\"found\":false}", originalBytes.utf8ToString());
@@ -149,25 +151,34 @@ public class GetResultTests extends ESTestCase {
}
public static GetResult copyGetResult(GetResult getResult) {
- return new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
- getResult.isExists(), getResult.internalSourceRef(), getResult.getFields());
+ return new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
+ getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
+ getResult.isExists(), getResult.internalSourceRef(), getResult.getFields());
}
public static GetResult mutateGetResult(GetResult getResult) {
List> mutations = new ArrayList<>();
- mutations.add(() -> new GetResult(randomUnicodeOfLength(15), getResult.getType(), getResult.getId(), getResult.getVersion(),
+ mutations.add(() -> new GetResult(randomUnicodeOfLength(15), getResult.getType(), getResult.getId(),
+ getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
- mutations.add(() -> new GetResult(getResult.getIndex(), randomUnicodeOfLength(15), getResult.getId(), getResult.getVersion(),
- getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
- mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), randomUnicodeOfLength(15), getResult.getVersion(),
- getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
- mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), randomNonNegativeLong(),
- getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
- mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
- getResult.isExists() == false, getResult.internalSourceRef(), getResult.getFields()));
- mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
- getResult.isExists(), RandomObjects.randomSource(random()), getResult.getFields()));
- mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
+ mutations.add(() -> new GetResult(getResult.getIndex(), randomUnicodeOfLength(15), getResult.getId(),
+ getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
+ getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
+ mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), randomUnicodeOfLength(15),
+ getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
+ getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
+ mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
+ getResult.getSeqNo(), getResult.getPrimaryTerm(), randomNonNegativeLong(),
+ getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
+ mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
+ getResult.isExists() ? UNASSIGNED_SEQ_NO : getResult.getSeqNo(),
+ getResult.isExists() ? 0 : getResult.getPrimaryTerm(),
+ getResult.getVersion(), getResult.isExists() == false, getResult.internalSourceRef(), getResult.getFields()));
+ mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
+ getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), getResult.isExists(),
+ RandomObjects.randomSource(random()), getResult.getFields()));
+ mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
+ getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
getResult.isExists(), getResult.internalSourceRef(), randomDocumentFields(XContentType.JSON).v1()));
return randomFrom(mutations).get();
}
@@ -177,12 +188,16 @@ public class GetResultTests extends ESTestCase {
final String type = randomAlphaOfLengthBetween(3, 10);
final String id = randomAlphaOfLengthBetween(3, 10);
final long version;
+ final long seqNo;
+ final long primaryTerm;
final boolean exists;
BytesReference source = null;
Map fields = null;
Map expectedFields = null;
if (frequently()) {
version = randomNonNegativeLong();
+ seqNo = randomNonNegativeLong();
+ primaryTerm = randomLongBetween(1, 100);
exists = true;
if (frequently()) {
source = RandomObjects.randomSource(random());
@@ -193,11 +208,13 @@ public class GetResultTests extends ESTestCase {
expectedFields = tuple.v2();
}
} else {
+ seqNo = UNASSIGNED_SEQ_NO;
+ primaryTerm = 0;
version = -1;
exists = false;
}
- GetResult getResult = new GetResult(index, type, id, version, exists, source, fields);
- GetResult expectedGetResult = new GetResult(index, type, id, version, exists, source, expectedFields);
+ GetResult getResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, fields);
+ GetResult expectedGetResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, expectedFields);
return Tuple.tuple(getResult, expectedGetResult);
}
diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java
index ca9a21973aa..bcd2b4ef144 100644
--- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java
@@ -131,7 +131,8 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase {throw new RuntimeException("error");});
+ Processor processor2 = new Processor() {
+ @Override
+ public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
+ //Simulates the drop processor
+ return null;
+ }
+
+ @Override
+ public String getType() {
+ return "drop";
+ }
+
+ @Override
+ public String getTag() {
+ return null;
+ }
+ };
+
+ LongSupplier relativeTimeProvider = mock(LongSupplier.class);
+ when(relativeTimeProvider.getAsLong()).thenReturn(0L);
+ CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor1),
+ Collections.singletonList(processor2), relativeTimeProvider);
+ assertNull(compoundProcessor.execute(ingestDocument));
+ assertThat(processor1.getInvokedCounter(), equalTo(1));
+ assertStats(compoundProcessor, 1, 1, 0);
+ }
+
public void testSingleProcessorWithNestedFailures() throws Exception {
TestProcessor processor = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");});
TestProcessor processorToFail = new TestProcessor("id2", "second", ingestDocument -> {
diff --git a/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java
index 6624d4eb8de..0fb5f7ac114 100644
--- a/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java
+++ b/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java
@@ -76,14 +76,16 @@ public class FullRollingRestartIT extends ESIntegTestCase {
internalCluster().startNode(settings);
// make sure the cluster state is green, and all has been recovered
- assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3"));
+ assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
+ .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3"));
logger.info("--> add two more nodes");
internalCluster().startNode(settings);
internalCluster().startNode(settings);
// make sure the cluster state is green, and all has been recovered
- assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("5"));
+ assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
+ .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("5"));
logger.info("--> refreshing and checking data");
refresh();
@@ -94,11 +96,13 @@ public class FullRollingRestartIT extends ESIntegTestCase {
// now start shutting nodes down
internalCluster().stopRandomDataNode();
// make sure the cluster state is green, and all has been recovered
- assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("4"));
+ assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
+ .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("4"));
internalCluster().stopRandomDataNode();
// make sure the cluster state is green, and all has been recovered
- assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3"));
+ assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
+ .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3"));
logger.info("--> stopped two nodes, verifying data");
refresh();
@@ -109,12 +113,14 @@ public class FullRollingRestartIT extends ESIntegTestCase {
// closing the 3rd node
internalCluster().stopRandomDataNode();
// make sure the cluster state is green, and all has been recovered
- assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("2"));
+ assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
+ .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("2"));
internalCluster().stopRandomDataNode();
// make sure the cluster state is yellow, and all has been recovered
- assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForYellowStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("1"));
+ assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
+ .setWaitForYellowStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("1"));
logger.info("--> one node left, verifying data");
refresh();
@@ -133,7 +139,9 @@ public class FullRollingRestartIT extends ESIntegTestCase {
* to relocating to the restarting node since all had 2 shards and now one node has nothing allocated.
* We have a fix for this to wait until we have allocated unallocated shards now so this shouldn't happen.
*/
- prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6").put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0").put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get();
+ prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6")
+ .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0")
+ .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get();
for (int i = 0; i < 100; i++) {
client().prepareIndex("test", "type1", Long.toString(i))
@@ -152,7 +160,8 @@ public class FullRollingRestartIT extends ESIntegTestCase {
recoveryResponse = client().admin().indices().prepareRecoveries("test").get();
for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) {
- assertTrue("relocated from: " + recoveryState.getSourceNode() + " to: " + recoveryState.getTargetNode()+ "-- \nbefore: \n" + state,
+ assertTrue("relocated from: " + recoveryState.getSourceNode() + " to: " +
+ recoveryState.getTargetNode()+ "-- \nbefore: \n" + state,
recoveryState.getRecoverySource().getType() != RecoverySource.Type.PEER || recoveryState.getPrimary() == false);
}
}
diff --git a/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java
index 0d2235c30a4..c0345be6fae 100644
--- a/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java
+++ b/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java
@@ -53,14 +53,18 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllS
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
-@TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.cluster.service:TRACE,org.elasticsearch.index.seqno:TRACE,org.elasticsearch.indices.recovery:TRACE")
+@TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.cluster.service:TRACE," +
+ "org.elasticsearch.index.seqno:TRACE,org.elasticsearch.indices.recovery:TRACE")
public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
private final Logger logger = LogManager.getLogger(RecoveryWhileUnderLoadIT.class);
public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception {
logger.info("--> creating test index ...");
int numberOfShards = numberOfShards();
- assertAcked(prepareCreate("test", 1, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
+ assertAcked(prepareCreate("test", 1, Settings.builder()
+ .put(SETTING_NUMBER_OF_SHARDS, numberOfShards)
+ .put(SETTING_NUMBER_OF_REPLICAS, 1)
+ .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
final int totalNumDocs = scaledRandomIntBetween(200, 10000);
int waitFor = totalNumDocs / 10;
@@ -92,7 +96,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
logger.info("--> waiting for GREEN health status ...");
// make sure the cluster state is green, and all has been recovered
- assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus());
+ assertNoTimeout(client().admin().cluster().prepareHealth()
+ .setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus());
logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs);
waitForDocs(totalNumDocs, indexer);
@@ -113,7 +118,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
public void testRecoverWhileUnderLoadAllocateReplicasRelocatePrimariesTest() throws Exception {
logger.info("--> creating test index ...");
int numberOfShards = numberOfShards();
- assertAcked(prepareCreate("test", 1, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
+ assertAcked(prepareCreate("test", 1, Settings.builder()
+ .put(SETTING_NUMBER_OF_SHARDS, numberOfShards)
+ .put(SETTING_NUMBER_OF_REPLICAS, 1)
+ .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
final int totalNumDocs = scaledRandomIntBetween(200, 10000);
int waitFor = totalNumDocs / 10;
@@ -142,7 +150,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
allowNodes("test", 4);
logger.info("--> waiting for GREEN health status ...");
- assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus());
+ assertNoTimeout(client().admin().cluster().prepareHealth()
+ .setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus());
logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs);
@@ -164,7 +173,9 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
public void testRecoverWhileUnderLoadWithReducedAllowedNodes() throws Exception {
logger.info("--> creating test index ...");
int numberOfShards = numberOfShards();
- assertAcked(prepareCreate("test", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
+ assertAcked(prepareCreate("test", 2, Settings.builder()
+ .put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1)
+ .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
final int totalNumDocs = scaledRandomIntBetween(200, 10000);
int waitFor = totalNumDocs / 10;
@@ -194,7 +205,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
allowNodes("test", 4);
logger.info("--> waiting for GREEN health status ...");
- assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus().setWaitForNoRelocatingShards(true));
+ assertNoTimeout(client().admin().cluster().prepareHealth()
+ .setWaitForEvents(Priority.LANGUID).setTimeout("5m")
+ .setWaitForGreenStatus()
+ .setWaitForNoRelocatingShards(true));
logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs);
waitForDocs(totalNumDocs, indexer);
@@ -205,23 +219,31 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
logger.info("--> allow 3 nodes for index [test] ...");
allowNodes("test", 3);
logger.info("--> waiting for relocations ...");
- assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true));
+ assertNoTimeout(client().admin().cluster().prepareHealth()
+ .setWaitForEvents(Priority.LANGUID).setTimeout("5m")
+ .setWaitForNoRelocatingShards(true));
logger.info("--> allow 2 nodes for index [test] ...");
allowNodes("test", 2);
logger.info("--> waiting for relocations ...");
- assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true));
+ assertNoTimeout(client().admin().cluster().prepareHealth()
+ .setWaitForEvents(Priority.LANGUID).setTimeout("5m")
+ .setWaitForNoRelocatingShards(true));
logger.info("--> allow 1 nodes for index [test] ...");
allowNodes("test", 1);
logger.info("--> waiting for relocations ...");
- assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true));
+ assertNoTimeout(client().admin().cluster().prepareHealth()
+ .setWaitForEvents(Priority.LANGUID).setTimeout("5m")
+ .setWaitForNoRelocatingShards(true));
logger.info("--> marking and waiting for indexing threads to stop ...");
indexer.stop();
logger.info("--> indexing threads stopped");
- assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true));
+ assertNoTimeout(client().admin().cluster().prepareHealth()
+ .setWaitForEvents(Priority.LANGUID).setTimeout("5m")
+ .setWaitForNoRelocatingShards(true));
logger.info("--> refreshing the index");
refreshAndAssert();
@@ -235,7 +257,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
final int numReplicas = 0;
logger.info("--> creating test index ...");
int allowNodes = 2;
- assertAcked(prepareCreate("test", 3, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
+ assertAcked(prepareCreate("test", 3, Settings.builder()
+ .put(SETTING_NUMBER_OF_SHARDS, numShards)
+ .put(SETTING_NUMBER_OF_REPLICAS, numReplicas)
+ .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
final int numDocs = scaledRandomIntBetween(200, 9999);
@@ -258,7 +283,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
logger.info("--> indexing threads stopped");
logger.info("--> bump up number of replicas to 1 and allow all nodes to hold the index");
allowNodes("test", 3);
- assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("number_of_replicas", 1)).get());
+ assertAcked(client().admin().indices().prepareUpdateSettings("test")
+ .setSettings(Settings.builder().put("number_of_replicas", 1)).get());
ensureGreen(TimeValue.timeValueMinutes(5));
logger.info("--> refreshing the index");
@@ -273,7 +299,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
SearchResponse[] iterationResults = new SearchResponse[iterations];
boolean error = false;
for (int i = 0; i < iterations; i++) {
- SearchResponse searchResponse = client().prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).addSort("id", SortOrder.ASC).get();
+ SearchResponse searchResponse = client().prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery())
+ .addSort("id", SortOrder.ASC).get();
logSearchResponse(numberOfShards, numberOfDocs, i, searchResponse);
iterationResults[i] = searchResponse;
if (searchResponse.getHits().getTotalHits().value != numberOfDocs) {
@@ -286,7 +313,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats().get();
for (ShardStats shardStats : indicesStatsResponse.getShards()) {
DocsStats docsStats = shardStats.getStats().docs;
- logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(), shardStats.getShardRouting().primary());
+ logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(),
+ shardStats.getShardRouting().primary());
}
ClusterService clusterService = clusterService();
@@ -332,12 +360,14 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
}
private void logSearchResponse(int numberOfShards, long numberOfDocs, int iteration, SearchResponse searchResponse) {
- logger.info("iteration [{}] - successful shards: {} (expected {})", iteration, searchResponse.getSuccessfulShards(), numberOfShards);
+ logger.info("iteration [{}] - successful shards: {} (expected {})", iteration,
+ searchResponse.getSuccessfulShards(), numberOfShards);
logger.info("iteration [{}] - failed shards: {} (expected 0)", iteration, searchResponse.getFailedShards());
if (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) {
logger.info("iteration [{}] - shard failures: {}", iteration, Arrays.toString(searchResponse.getShardFailures()));
}
- logger.info("iteration [{}] - returned documents: {} (expected {})", iteration, searchResponse.getHits().getTotalHits().value, numberOfDocs);
+ logger.info("iteration [{}] - returned documents: {} (expected {})", iteration,
+ searchResponse.getHits().getTotalHits().value, numberOfDocs);
}
private void refreshAndAssert() throws Exception {
diff --git a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java
index b27e4fd229a..62208a40488 100644
--- a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java
+++ b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java
@@ -133,7 +133,8 @@ public class RelocationIT extends ESIntegTestCase {
logger.info("--> start another node");
final String node_2 = internalCluster().startNode();
- ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("2").execute().actionGet();
+ ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID)
+ .setWaitForNodes("2").execute().actionGet();
assertThat(clusterHealthResponse.isTimedOut(), equalTo(false));
logger.info("--> relocate the shard from node1 to node2");
@@ -141,7 +142,8 @@ public class RelocationIT extends ESIntegTestCase {
.add(new MoveAllocationCommand("test", 0, node_1, node_2))
.execute().actionGet();
- clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet();
+ clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID)
+ .setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet();
assertThat(clusterHealthResponse.isTimedOut(), equalTo(false));
logger.info("--> verifying count again...");
@@ -155,7 +157,8 @@ public class RelocationIT extends ESIntegTestCase {
int numberOfReplicas = randomBoolean() ? 0 : 1;
int numberOfNodes = numberOfReplicas == 0 ? 2 : 3;
- logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", numberOfRelocations, numberOfReplicas, numberOfNodes);
+ logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})",
+ numberOfRelocations, numberOfReplicas, numberOfNodes);
String[] nodes = new String[numberOfNodes];
logger.info("--> starting [node1] ...");
@@ -172,8 +175,10 @@ public class RelocationIT extends ESIntegTestCase {
logger.info("--> starting [node{}] ...", i);
nodes[i - 1] = internalCluster().startNode();
if (i != numberOfNodes) {
- ClusterHealthResponse healthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID)
- .setWaitForNodes(Integer.toString(i)).setWaitForGreenStatus().execute().actionGet();
+ ClusterHealthResponse healthResponse = client().admin().cluster().prepareHealth()
+ .setWaitForEvents(Priority.LANGUID)
+ .setWaitForNodes(Integer.toString(i))
+ .setWaitForGreenStatus().execute().actionGet();
assertThat(healthResponse.isTimedOut(), equalTo(false));
}
}
@@ -202,7 +207,10 @@ public class RelocationIT extends ESIntegTestCase {
logger.debug("--> flushing");
client().admin().indices().prepareFlush().get();
}
- ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet();
+ ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth()
+ .setWaitForEvents(Priority.LANGUID)
+ .setWaitForNoRelocatingShards(true)
+ .setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet();
assertThat(clusterHealthResponse.isTimedOut(), equalTo(false));
indexer.pauseIndexing();
logger.info("--> DONE relocate the shard from {} to {}", fromNode, toNode);
@@ -218,7 +226,8 @@ public class RelocationIT extends ESIntegTestCase {
boolean ranOnce = false;
for (int i = 0; i < 10; i++) {
logger.info("--> START search test round {}", i + 1);
- SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields().execute().actionGet().getHits();
+ SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery())
+ .setSize((int) indexer.totalIndexedDocs()).storedFields().execute().actionGet().getHits();
ranOnce = true;
if (hits.getTotalHits().value != indexer.totalIndexedDocs()) {
int[] hitIds = new int[(int) indexer.totalIndexedDocs()];
@@ -252,7 +261,8 @@ public class RelocationIT extends ESIntegTestCase {
int numberOfReplicas = randomBoolean() ? 0 : 1;
int numberOfNodes = numberOfReplicas == 0 ? 2 : 3;
- logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", numberOfRelocations, numberOfReplicas, numberOfNodes);
+ logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})",
+ numberOfRelocations, numberOfReplicas, numberOfNodes);
String[] nodes = new String[numberOfNodes];
logger.info("--> starting [node_0] ...");
@@ -281,13 +291,15 @@ public class RelocationIT extends ESIntegTestCase {
final Semaphore postRecoveryShards = new Semaphore(0);
final IndexEventListener listener = new IndexEventListener() {
@Override
- public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState, IndexShardState currentState, @Nullable String reason) {
+ public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState,
+ IndexShardState currentState, @Nullable String reason) {
if (currentState == IndexShardState.POST_RECOVERY) {
postRecoveryShards.release();
}
}
};
- for (MockIndexEventListener.TestEventListener eventListener : internalCluster().getInstances(MockIndexEventListener.TestEventListener.class)) {
+ for (MockIndexEventListener.TestEventListener eventListener : internalCluster()
+ .getInstances(MockIndexEventListener.TestEventListener.class)) {
eventListener.setNewDelegate(listener);
}
@@ -327,7 +339,10 @@ public class RelocationIT extends ESIntegTestCase {
indexRandom(true, true, builders2);
// verify cluster was finished.
- assertFalse(client().admin().cluster().prepareHealth().setWaitForNoRelocatingShards(true).setWaitForEvents(Priority.LANGUID).setTimeout("30s").get().isTimedOut());
+ assertFalse(client().admin().cluster().prepareHealth()
+ .setWaitForNoRelocatingShards(true)
+ .setWaitForEvents(Priority.LANGUID)
+ .setTimeout("30s").get().isTimedOut());
logger.info("--> DONE relocate the shard from {} to {}", fromNode, toNode);
logger.debug("--> verifying all searches return the same number of docs");
@@ -374,17 +389,20 @@ public class RelocationIT extends ESIntegTestCase {
MockTransportService mockTransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, p_node);
for (DiscoveryNode node : clusterService.state().nodes()) {
if (!node.equals(clusterService.localNode())) {
- mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, node.getName()), new RecoveryCorruption(corruptionCount));
+ mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, node.getName()),
+ new RecoveryCorruption(corruptionCount));
}
}
- client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get();
+ client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder()
+ .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get();
corruptionCount.await();
logger.info("--> stopping replica assignment");
assertAcked(client().admin().cluster().prepareUpdateSettings()
- .setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none")));
+ .setTransientSettings(Settings.builder()
+ .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none")));
logger.info("--> wait for all replica shards to be removed, on all nodes");
assertBusy(() -> {
@@ -408,7 +426,8 @@ public class RelocationIT extends ESIntegTestCase {
Files.walkFileTree(shardLoc, new SimpleFileVisitor() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- assertThat("found a temporary recovery file: " + file, file.getFileName().toString(), not(startsWith("recovery.")));
+ assertThat("found a temporary recovery file: " + file, file.getFileName().toString(),
+ not(startsWith("recovery.")));
return FileVisitResult.CONTINUE;
}
});
@@ -496,13 +515,15 @@ public class RelocationIT extends ESIntegTestCase {
}
@Override
- public void sendRequest(Transport.Connection connection, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException {
+ public void sendRequest(Transport.Connection connection, long requestId, String action, TransportRequest request,
+ TransportRequestOptions options) throws IOException {
if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) {
RecoveryFileChunkRequest chunkRequest = (RecoveryFileChunkRequest) request;
if (chunkRequest.name().startsWith(IndexFileNames.SEGMENTS)) {
// corrupting the segments_N files in order to make sure future recovery re-send files
logger.debug("corrupting [{}] to {}. file name: [{}]", action, connection.getNode(), chunkRequest.name());
- assert chunkRequest.content().toBytesRef().bytes == chunkRequest.content().toBytesRef().bytes : "no internal reference!!";
+ assert chunkRequest.content().toBytesRef().bytes ==
+ chunkRequest.content().toBytesRef().bytes : "no internal reference!!";
byte[] array = chunkRequest.content().toBytesRef().bytes;
array[0] = (byte) ~array[0]; // flip one byte in the content
corruptionCount.countDown();
diff --git a/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java b/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java
index ac8688c9847..973c687ebe8 100644
--- a/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java
+++ b/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java
@@ -89,7 +89,8 @@ public class TruncatedRecoveryIT extends ESIntegTestCase {
// we have no replicas so far and make sure that we allocate the primary on the lucky node
assertAcked(prepareCreate("test")
.addMapping("type1", "field1", "type=text", "the_id", "type=text")
- .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards())
+ .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
+ .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards())
.put("index.routing.allocation.include._name", primariesNode.getNode().getName()))); // only allocate on the lucky node
// index some docs and check if they are coming back
@@ -112,7 +113,8 @@ public class TruncatedRecoveryIT extends ESIntegTestCase {
final CountDownLatch latch = new CountDownLatch(1);
final AtomicBoolean truncate = new AtomicBoolean(true);
for (NodeStats dataNode : dataNodeStats) {
- MockTransportService mockTransportService = ((MockTransportService) internalCluster().getInstance(TransportService.class, dataNode.getNode().getName()));
+ MockTransportService mockTransportService = ((MockTransportService) internalCluster()
+ .getInstance(TransportService.class, dataNode.getNode().getName()));
mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, unluckyNode.getNode().getName()),
(connection, requestId, action, request, options) -> {
if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) {
diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java
index b9fd724fb65..f012c1393c9 100644
--- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java
+++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java
@@ -23,25 +23,38 @@ import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.rest.RestRequest;
+import org.elasticsearch.rest.RestRequest.Method;
import org.elasticsearch.rest.RestResponse;
-import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.rest.action.document.RestGetSourceAction.RestGetSourceResponseListener;
import org.elasticsearch.test.rest.FakeRestChannel;
import org.elasticsearch.test.rest.FakeRestRequest;
+import org.elasticsearch.test.rest.RestActionTestCase;
import org.junit.AfterClass;
+import org.junit.Before;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
import static java.util.Collections.emptyMap;
+import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
import static org.elasticsearch.rest.RestStatus.OK;
-import static org.elasticsearch.rest.action.document.RestGetSourceAction.RestGetSourceResponseListener;
import static org.hamcrest.Matchers.equalTo;
-public class RestGetSourceActionTests extends ESTestCase {
+public class RestGetSourceActionTests extends RestActionTestCase {
private static RestRequest request = new FakeRestRequest();
private static FakeRestChannel channel = new FakeRestChannel(request, true, 0);
private static RestGetSourceResponseListener listener = new RestGetSourceResponseListener(channel, request);
+ @Before
+ public void setUpAction() {
+ new RestGetSourceAction(Settings.EMPTY, controller());
+ }
+
@AfterClass
public static void cleanupReferences() {
request = null;
@@ -49,9 +62,41 @@ public class RestGetSourceActionTests extends ESTestCase {
listener = null;
}
+ /**
+ * test deprecation is logged if type is used in path
+ */
+ public void testTypeInPath() {
+ for (Method method : Arrays.asList(Method.GET, Method.HEAD)) {
+ RestRequest request = new FakeRestRequest.Builder(xContentRegistry())
+ .withMethod(method)
+ .withPath("/some_index/some_type/id/_source")
+ .build();
+ dispatchRequest(request);
+ assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE);
+ }
+ }
+
+ /**
+ * test deprecation is logged if type is used as parameter
+ */
+ public void testTypeParameter() {
+ Map params = new HashMap<>();
+ params.put("type", "some_type");
+ for (Method method : Arrays.asList(Method.GET, Method.HEAD)) {
+ RestRequest request = new FakeRestRequest.Builder(xContentRegistry())
+ .withMethod(method)
+ .withPath("/some_index/_source/id")
+ .withParams(params)
+ .build();
+ dispatchRequest(request);
+ assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE);
+ }
+ }
+
public void testRestGetSourceAction() throws Exception {
final BytesReference source = new BytesArray("{\"foo\": \"bar\"}");
- final GetResponse response = new GetResponse(new GetResult("index1", "_doc", "1", -1, true, source, emptyMap()));
+ final GetResponse response =
+ new GetResponse(new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, true, source, emptyMap()));
final RestResponse restResponse = listener.buildResponse(response);
@@ -61,7 +106,8 @@ public class RestGetSourceActionTests extends ESTestCase {
}
public void testRestGetSourceActionWithMissingDocument() {
- final GetResponse response = new GetResponse(new GetResult("index1", "_doc", "1", -1, false, null, emptyMap()));
+ final GetResponse response =
+ new GetResponse(new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, false, null, emptyMap()));
final ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.buildResponse(response));
@@ -69,7 +115,8 @@ public class RestGetSourceActionTests extends ESTestCase {
}
public void testRestGetSourceActionWithMissingDocumentSource() {
- final GetResponse response = new GetResponse(new GetResult("index1", "_doc", "1", -1, true, null, emptyMap()));
+ final GetResponse response =
+ new GetResponse(new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, true, null, emptyMap()));
final ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.buildResponse(response));
diff --git a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java
index f6b8dc828f4..797b5dd888e 100644
--- a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java
+++ b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java
@@ -22,6 +22,7 @@ package org.elasticsearch.search;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lucene.LuceneTests;
import org.elasticsearch.common.xcontent.ToXContent;
@@ -31,23 +32,36 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.test.RandomObjects;
+import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.util.Arrays;
+import java.util.Base64;
public class SearchSortValuesTests extends AbstractSerializingTestCase {
public static SearchSortValues createTestItem(XContentType xContentType, boolean transportSerialization) {
int size = randomIntBetween(1, 20);
Object[] values = new Object[size];
- DocValueFormat[] sortValueFormats = new DocValueFormat[size];
- for (int i = 0; i < size; i++) {
- Object sortValue = randomSortValue(xContentType, transportSerialization);
- values[i] = sortValue;
- //make sure that for BytesRef, we provide a specific doc value format that overrides format(BytesRef)
- sortValueFormats[i] = sortValue instanceof BytesRef ? DocValueFormat.RAW : randomDocValueFormat();
+ if (transportSerialization) {
+ DocValueFormat[] sortValueFormats = new DocValueFormat[size];
+ for (int i = 0; i < size; i++) {
+ Object sortValue = randomSortValue(xContentType, transportSerialization);
+ values[i] = sortValue;
+ //make sure that for BytesRef, we provide a specific doc value format that overrides format(BytesRef)
+ sortValueFormats[i] = sortValue instanceof BytesRef ? DocValueFormat.RAW : randomDocValueFormat();
+ }
+ return new SearchSortValues(values, sortValueFormats);
+ } else {
+ //xcontent serialization doesn't write/parse the raw sort values, only the formatted ones
+ for (int i = 0; i < size; i++) {
+ Object sortValue = randomSortValue(xContentType, transportSerialization);
+ //make sure that BytesRef are not provided as formatted values
+ sortValue = sortValue instanceof BytesRef ? DocValueFormat.RAW.format((BytesRef)sortValue) : sortValue;
+ values[i] = sortValue;
+ }
+ return new SearchSortValues(values);
}
- return new SearchSortValues(values, sortValueFormats);
}
private static Object randomSortValue(XContentType xContentType, boolean transportSerialization) {
@@ -79,7 +93,7 @@ public class SearchSortValuesTests extends AbstractSerializingTestCase seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
+ Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) {
ConnectionManager connectionManager = connection.getConnectionManager();
- updateSeedNodes(connection, Arrays.asList(() -> seedNode));
+ updateSeedNodes(connection, seedNodes(seedNode));
assertTrue(connectionManager.nodeConnected(seedNode));
assertTrue(connectionManager.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
@@ -206,9 +208,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
+ Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) {
ConnectionManager connectionManager = connection.getConnectionManager();
- updateSeedNodes(connection, Arrays.asList(() -> seedNode));
+ updateSeedNodes(connection, seedNodes(seedNode));
assertTrue(connectionManager.nodeConnected(seedNode));
assertTrue(connectionManager.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
@@ -259,9 +261,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
+ Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) {
ConnectionManager connectionManager = connection.getConnectionManager();
- updateSeedNodes(connection, Arrays.asList(() -> seedNode));
+ updateSeedNodes(connection, seedNodes(seedNode));
assertTrue(connectionManager.nodeConnected(seedNode));
assertTrue(connectionManager.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
@@ -282,7 +284,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
knownNodes.add(discoverableTransport.getLocalDiscoNode());
knownNodes.add(incompatibleTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
- List> seedNodes = Arrays.asList(() -> incompatibleSeedNode, () -> seedNode);
+ List>> seedNodes = Arrays.asList(
+ Tuple.tuple(incompatibleSeedNode.toString(), () -> incompatibleSeedNode),
+ Tuple.tuple(seedNode.toString(), () -> seedNode));
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
@@ -317,9 +321,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
+ seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
ConnectionManager connectionManager = connection.getConnectionManager();
- updateSeedNodes(connection, Arrays.asList(() -> seedNode));
+ updateSeedNodes(connection, seedNodes(seedNode));
assertTrue(connectionManager.nodeConnected(seedNode));
assertTrue(connectionManager.nodeConnected(discoverableNode));
assertFalse(connectionManager.nodeConnected(spareNode));
@@ -367,9 +371,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false, null)) {
+ seedNodes(seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false, null)) {
ConnectionManager connectionManager = connection.getConnectionManager();
- updateSeedNodes(connection, Arrays.asList(() -> seedNode));
+ updateSeedNodes(connection, seedNodes(seedNode));
if (rejectedNode.equals(seedNode)) {
assertFalse(connectionManager.nodeConnected(seedNode));
assertTrue(connectionManager.nodeConnected(discoverableNode));
@@ -382,11 +386,15 @@ public class RemoteClusterConnectionTests extends ESTestCase {
}
}
}
- private void updateSeedNodes(RemoteClusterConnection connection, List> seedNodes) throws Exception {
+ private void updateSeedNodes(
+ final RemoteClusterConnection connection, final List>> seedNodes) throws Exception {
updateSeedNodes(connection, seedNodes, null);
}
- private void updateSeedNodes(RemoteClusterConnection connection, List> seedNodes, String proxyAddress)
+ private void updateSeedNodes(
+ final RemoteClusterConnection connection,
+ final List>> seedNodes,
+ final String proxyAddress)
throws Exception {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference exceptionAtomicReference = new AtomicReference<>();
@@ -428,9 +436,11 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
+ Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) {
ConnectionManager connectionManager = connection.getConnectionManager();
- expectThrows(Exception.class, () -> updateSeedNodes(connection, Arrays.asList(() -> seedNode)));
+ expectThrows(
+ Exception.class,
+ () -> updateSeedNodes(connection, Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode))));
assertFalse(connectionManager.nodeConnected(seedNode));
assertTrue(connection.assertNoRunningConnections());
}
@@ -481,7 +491,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) {
+ seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) {
connection.addConnectedNode(seedNode);
for (DiscoveryNode node : knownNodes) {
final Transport.Connection transportConnection = connection.getConnection(node);
@@ -524,7 +534,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
CountDownLatch listenerCalled = new CountDownLatch(1);
AtomicReference exceptionReference = new AtomicReference<>();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
+ seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
ActionListener listener = ActionListener.wrap(x -> {
listenerCalled.countDown();
fail("expected exception");
@@ -532,7 +542,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
exceptionReference.set(x);
listenerCalled.countDown();
});
- connection.updateSeedNodes(null, Arrays.asList(() -> seedNode), listener);
+ connection.updateSeedNodes(null, seedNodes(seedNode), listener);
acceptedLatch.await();
connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on
assertTrue(connection.assertNoRunningConnections());
@@ -548,6 +558,18 @@ public class RemoteClusterConnectionTests extends ESTestCase {
}
}
+ private List>> seedNodes(final DiscoveryNode... seedNodes) {
+ if (seedNodes.length == 0) {
+ return Collections.emptyList();
+ } else if (seedNodes.length == 1) {
+ return Collections.singletonList(Tuple.tuple(seedNodes[0].toString(), () -> seedNodes[0]));
+ } else {
+ return Arrays.stream(seedNodes)
+ .map(s -> Tuple.tuple(s.toString(), (Supplier)() -> s))
+ .collect(Collectors.toList());
+ }
+ }
+
public void testFetchShards() throws Exception {
List knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
@@ -559,11 +581,11 @@ public class RemoteClusterConnectionTests extends ESTestCase {
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
- List> nodes = Collections.singletonList(() -> seedNode);
+ final List>> seedNodes = seedNodes(seedNode);
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- nodes, service, Integer.MAX_VALUE, n -> true, null)) {
+ seedNodes, service, Integer.MAX_VALUE, n -> true, null)) {
if (randomBoolean()) {
- updateSeedNodes(connection, nodes);
+ updateSeedNodes(connection, seedNodes);
}
if (randomBoolean()) {
connection.updateSkipUnavailable(randomBoolean());
@@ -599,9 +621,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
- List> nodes = Collections.singletonList(() -> seedNode);
+ final List>> seedNodes = seedNodes(seedNode);
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- nodes, service, Integer.MAX_VALUE, n -> true, null)) {
+ seedNodes, service, Integer.MAX_VALUE, n -> true, null)) {
SearchRequest request = new SearchRequest("test-index");
Thread[] threads = new Thread[10];
for (int i = 0; i < threads.length; i++) {
@@ -655,7 +677,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Collections.singletonList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
+ seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
ConnectionManager connectionManager = connection.getConnectionManager();
SearchRequest request = new SearchRequest("test-index");
@@ -759,7 +781,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
knownNodes.add(discoverableTransport.getLocalDiscoNode());
knownNodes.add(seedTransport1.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
- List> seedNodes = Arrays.asList(() -> seedNode1, () -> seedNode);
+ List>> seedNodes = seedNodes(seedNode1, seedNode);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
@@ -839,7 +861,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
knownNodes.add(discoverableTransport.getLocalDiscoNode());
knownNodes.add(seedTransport1.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
- List> seedNodes = Arrays.asList(() -> seedNode1, () -> seedNode);
+ List>> seedNodes = seedNodes(seedNode1, seedNode);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
@@ -926,7 +948,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
knownNodes.add(transport3.getLocalDiscoNode());
knownNodes.add(transport2.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
- List> seedNodes = Arrays.asList(() -> node3, () -> node1, () -> node2);
+ List>> seedNodes = seedNodes(node3, node1, node2);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
@@ -958,44 +980,32 @@ public class RemoteClusterConnectionTests extends ESTestCase {
}
public void testRemoteConnectionInfo() throws IOException {
- RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster",
- Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
- 4, 3, TimeValue.timeValueMinutes(30), false);
+ RemoteConnectionInfo stats =
+ new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats);
- RemoteConnectionInfo stats1 = new RemoteConnectionInfo("test_cluster",
- Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
- 4, 4, TimeValue.timeValueMinutes(30), true);
+ RemoteConnectionInfo stats1 =
+ new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 4, TimeValue.timeValueMinutes(30), true);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
- stats1 = new RemoteConnectionInfo("test_cluster_1",
- Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
- 4, 3, TimeValue.timeValueMinutes(30), false);
+ stats1 = new RemoteConnectionInfo("test_cluster_1", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
- stats1 = new RemoteConnectionInfo("test_cluster",
- Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 15)),
- 4, 3, TimeValue.timeValueMinutes(30), false);
+ stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:15"), 4, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
- stats1 = new RemoteConnectionInfo("test_cluster",
- Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
- 4, 3, TimeValue.timeValueMinutes(30), true);
+ stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), true);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
- stats1 = new RemoteConnectionInfo("test_cluster",
- Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
- 4, 3, TimeValue.timeValueMinutes(325), true);
+ stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(325), true);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
- stats1 = new RemoteConnectionInfo("test_cluster",
- Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
- 5, 3, TimeValue.timeValueMinutes(30), false);
+ stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 5, 3, TimeValue.timeValueMinutes(30), false);
assertSerialization(stats1);
assertNotEquals(stats, stats1);
}
@@ -1016,9 +1026,8 @@ public class RemoteClusterConnectionTests extends ESTestCase {
public void testRemoteConnectionInfoBwComp() throws IOException {
final Version version = VersionUtils.randomVersionBetween(random(),
Version.V_6_1_0, VersionUtils.getPreviousVersion(Version.V_7_0_0));
- RemoteConnectionInfo expected = new RemoteConnectionInfo("test_cluster",
- Collections.singletonList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
- 4, 4, new TimeValue(30, TimeUnit.MINUTES), false);
+ RemoteConnectionInfo expected =
+ new RemoteConnectionInfo("test_cluster", Arrays.asList("0.0.0.0:1"), 4, 4, new TimeValue(30, TimeUnit.MINUTES), false);
// This version was created using the serialization code in use from 6.1 but before 7.0
String encoded = "AQQAAAAABzAuMC4wLjAAAAABAQQAAAAABzAuMC4wLjAAAABQBDwEBAx0ZXN0X2NsdXN0ZXIA";
@@ -1042,27 +1051,25 @@ public class RemoteClusterConnectionTests extends ESTestCase {
}
public void testRenderConnectionInfoXContent() throws IOException {
- RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster",
- Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
- 4, 3, TimeValue.timeValueMinutes(30), true);
+ RemoteConnectionInfo stats =
+ new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), true);
stats = assertSerialization(stats);
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
stats.toXContent(builder, null);
builder.endObject();
- assertEquals("{\"test_cluster\":{\"seeds\":[\"0.0.0.0:1\"],\"connected\":true," +
+ assertEquals("{\"test_cluster\":{\"seeds\":[\"seed:1\"],\"connected\":true," +
"\"num_nodes_connected\":3,\"max_connections_per_cluster\":4,\"initial_connect_timeout\":\"30m\"," +
"\"skip_unavailable\":true}}", Strings.toString(builder));
- stats = new RemoteConnectionInfo("some_other_cluster",
- Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1), new TransportAddress(TransportAddress.META_ADDRESS, 2)),
- 2, 0, TimeValue.timeValueSeconds(30), false);
+ stats = new RemoteConnectionInfo(
+ "some_other_cluster", Arrays.asList("seed:1", "seed:2"), 2, 0, TimeValue.timeValueSeconds(30), false);
stats = assertSerialization(stats);
builder = XContentFactory.jsonBuilder();
builder.startObject();
stats.toXContent(builder, null);
builder.endObject();
- assertEquals("{\"some_other_cluster\":{\"seeds\":[\"0.0.0.0:1\",\"0.0.0.0:2\"],"
+ assertEquals("{\"some_other_cluster\":{\"seeds\":[\"seed:1\",\"seed:2\"],"
+ "\"connected\":false,\"num_nodes_connected\":0,\"max_connections_per_cluster\":2,\"initial_connect_timeout\":\"30s\"," +
"\"skip_unavailable\":false}}", Strings.toString(builder));
}
@@ -1081,7 +1088,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
+ seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
ConnectionManager connectionManager = connection.getConnectionManager();
assertFalse(connectionManager.nodeConnected(seedNode));
assertFalse(connectionManager.nodeConnected(discoverableNode));
@@ -1131,9 +1138,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
+ seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
if (randomBoolean()) {
- updateSeedNodes(connection, Arrays.asList(() -> seedNode));
+ updateSeedNodes(connection, seedNodes(seedNode));
}
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference> reference = new AtomicReference<>();
@@ -1165,14 +1172,14 @@ public class RemoteClusterConnectionTests extends ESTestCase {
List discoverableTransports = new CopyOnWriteArrayList<>();
try {
final int numDiscoverableNodes = randomIntBetween(5, 20);
- List> discoverableNodes = new ArrayList<>(numDiscoverableNodes);
+ List>> discoverableNodes = new ArrayList<>(numDiscoverableNodes);
for (int i = 0; i < numDiscoverableNodes; i++ ) {
MockTransportService transportService = startTransport("discoverable_node" + i, knownNodes, Version.CURRENT);
- discoverableNodes.add(transportService::getLocalDiscoNode);
+ discoverableNodes.add(Tuple.tuple("discoverable_node" + i, transportService::getLocalDiscoNode));
discoverableTransports.add(transportService);
}
- List> seedNodes = randomSubsetOf(discoverableNodes);
+ List>> seedNodes = randomSubsetOf(discoverableNodes);
Collections.shuffle(seedNodes, random());
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
@@ -1221,7 +1228,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
discoverableTransports.add(transportService);
connection.addConnectedNode(transportService.getLocalDiscoNode());
} else {
- DiscoveryNode node = randomFrom(discoverableNodes).get();
+ DiscoveryNode node = randomFrom(discoverableNodes).v2().get();
connection.onNodeDisconnected(node);
}
}
@@ -1269,14 +1276,16 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Arrays.asList( () -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
+ seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
ConnectionManager connectionManager = connection.getConnectionManager();
- updateSeedNodes(connection, Collections.singletonList(() -> seedNode));
+ updateSeedNodes(connection, seedNodes(seedNode));
assertTrue(connectionManager.nodeConnected(seedNode));
assertTrue(connectionManager.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
- List> discoveryNodes =
- Arrays.asList(otherClusterTransport::getLocalDiscoNode, () -> seedNode);
+ List>> discoveryNodes =
+ Arrays.asList(
+ Tuple.tuple("other", otherClusterTransport::getLocalDiscoNode),
+ Tuple.tuple(seedNode.toString(), () -> seedNode));
Collections.shuffle(discoveryNodes, random());
updateSeedNodes(connection, discoveryNodes);
assertTrue(connectionManager.nodeConnected(seedNode));
@@ -1287,7 +1296,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
assertTrue(connectionManager.nodeConnected(discoverableNode));
assertTrue(connection.assertNoRunningConnections());
IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () ->
- updateSeedNodes(connection, Arrays.asList(() -> otherClusterTransport.getLocalDiscoNode())));
+ updateSeedNodes(connection, Arrays.asList(Tuple.tuple("other", otherClusterTransport::getLocalDiscoNode))));
assertThat(illegalStateException.getMessage(),
startsWith("handshake failed, mismatched cluster name [Cluster [otherCluster]]" +
" - {other_cluster_discoverable_node}"));
@@ -1339,7 +1348,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
- Collections.singletonList(() -> connectedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) {
+ seedNodes(connectedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) {
connection.addConnectedNode(connectedNode);
for (int i = 0; i < 10; i++) {
//always a direct connection as the remote node is already connected
@@ -1376,10 +1385,10 @@ public class RemoteClusterConnectionTests extends ESTestCase {
service.start();
service.acceptIncomingRequests();
CountDownLatch multipleResolveLatch = new CountDownLatch(2);
- Supplier seedSupplier = () -> {
+ Tuple> seedSupplier = Tuple.tuple(seedNode.toString(), () -> {
multipleResolveLatch.countDown();
return seedNode;
- };
+ });
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedSupplier), service, Integer.MAX_VALUE, n -> true, null)) {
updateSeedNodes(connection, Arrays.asList(seedSupplier));
@@ -1409,9 +1418,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
threadPool, null, Collections.emptySet())) {
service.start();
service.acceptIncomingRequests();
- Supplier seedSupplier = () ->
- RemoteClusterAware.buildSeedNode("some-remote-cluster", "node_0:" + randomIntBetween(1, 10000), true);
- assertEquals("node_0", seedSupplier.get().getAttributes().get("server_name"));
+ Tuple> seedSupplier = Tuple.tuple("node_0", () ->
+ RemoteClusterAware.buildSeedNode("some-remote-cluster", "node_0:" + randomIntBetween(1, 10000), true));
+ assertEquals("node_0", seedSupplier.v2().get().getAttributes().get("server_name"));
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
Arrays.asList(seedSupplier), service, Integer.MAX_VALUE, n -> true, proxyAddress)) {
updateSeedNodes(connection, Arrays.asList(seedSupplier), proxyAddress);
diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
index dfc5d4367b4..34dfc420133 100644
--- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
@@ -125,41 +125,42 @@ public class RemoteClusterServiceTests extends ESTestCase {
}
public void testBuildRemoteClustersDynamicConfig() throws Exception {
- Map>>> map = RemoteClusterService.buildRemoteClustersDynamicConfig(
- Settings.builder()
- .put("cluster.remote.foo.seeds", "192.168.0.1:8080")
- .put("cluster.remote.bar.seeds", "[::1]:9090")
- .put("cluster.remote.boom.seeds", "boom-node1.internal:1000")
- .put("cluster.remote.boom.proxy", "foo.bar.com:1234")
- .put("search.remote.quux.seeds", "quux:9300")
- .put("search.remote.quux.proxy", "quux-proxy:19300")
- .build());
+ Map>>>> map =
+ RemoteClusterService.buildRemoteClustersDynamicConfig(
+ Settings.builder()
+ .put("cluster.remote.foo.seeds", "192.168.0.1:8080")
+ .put("cluster.remote.bar.seeds", "[::1]:9090")
+ .put("cluster.remote.boom.seeds", "boom-node1.internal:1000")
+ .put("cluster.remote.boom.proxy", "foo.bar.com:1234")
+ .put("search.remote.quux.seeds", "quux:9300")
+ .put("search.remote.quux.proxy", "quux-proxy:19300")
+ .build());
assertThat(map.keySet(), containsInAnyOrder(equalTo("foo"), equalTo("bar"), equalTo("boom"), equalTo("quux")));
assertThat(map.get("foo").v2(), hasSize(1));
assertThat(map.get("bar").v2(), hasSize(1));
assertThat(map.get("boom").v2(), hasSize(1));
assertThat(map.get("quux").v2(), hasSize(1));
- DiscoveryNode foo = map.get("foo").v2().get(0).get();
+ DiscoveryNode foo = map.get("foo").v2().get(0).v2().get();
assertEquals("", map.get("foo").v1());
assertEquals(foo.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("192.168.0.1"), 8080)));
assertEquals(foo.getId(), "foo#192.168.0.1:8080");
assertEquals(foo.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
- DiscoveryNode bar = map.get("bar").v2().get(0).get();
+ DiscoveryNode bar = map.get("bar").v2().get(0).v2().get();
assertEquals(bar.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("[::1]"), 9090)));
assertEquals(bar.getId(), "bar#[::1]:9090");
assertEquals("", map.get("bar").v1());
assertEquals(bar.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
- DiscoveryNode boom = map.get("boom").v2().get(0).get();
+ DiscoveryNode boom = map.get("boom").v2().get(0).v2().get();
assertEquals(boom.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0));
assertEquals("boom-node1.internal", boom.getHostName());
assertEquals(boom.getId(), "boom#boom-node1.internal:1000");
assertEquals("foo.bar.com:1234", map.get("boom").v1());
assertEquals(boom.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
- DiscoveryNode quux = map.get("quux").v2().get(0).get();
+ DiscoveryNode quux = map.get("quux").v2().get(0).v2().get();
assertEquals(quux.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0));
assertEquals("quux", quux.getHostName());
assertEquals(quux.getId(), "quux#quux:9300");
diff --git a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java
index 9de70f4339f..0c253f1446c 100644
--- a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java
+++ b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java
@@ -24,6 +24,7 @@ import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteResponse;
+import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -311,12 +312,11 @@ public class SimpleVersioningIT extends ESIntegTestCase {
assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(1, 2).execute(), VersionConflictEngineException.class);
client().admin().indices().prepareRefresh().execute().actionGet();
- // TODO: Enable once get response returns seqNo
-// for (int i = 0; i < 10; i++) {
-// final GetResponse response = client().prepareGet("test", "type", "1").get();
-// assertThat(response.getSeqNo(), equalTo(1L));
-// assertThat(response.getPrimaryTerm(), equalTo(1L));
-// }
+ for (int i = 0; i < 10; i++) {
+ final GetResponse response = client().prepareGet("test", "type", "1").get();
+ assertThat(response.getSeqNo(), equalTo(1L));
+ assertThat(response.getPrimaryTerm(), equalTo(1L));
+ }
// search with versioning
for (int i = 0; i < 10; i++) {
diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java
index 3478c14cfda..cb7f5ff4a22 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java
@@ -27,6 +27,6 @@ public abstract class AbstractWireSerializingTestCase exten
@Override
protected T copyInstance(T instance, Version version) throws IOException {
- return copyWriteable(instance, getNamedWriteableRegistry(), instanceReader());
+ return copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), version);
}
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java
index c81d0810f08..4669284685c 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java
@@ -135,13 +135,16 @@ public final class RandomObjects {
}
}
if (value instanceof Float) {
+ if (xContentType == XContentType.CBOR) {
+ //with CBOR we get back a float
+ return value;
+ }
if (xContentType == XContentType.SMILE) {
//with SMILE we get back a double (this will change in Jackson 2.9 where it will return a Float)
return ((Float)value).doubleValue();
- } else {
- //with JSON AND YAML we get back a double, but with float precision.
- return Double.parseDouble(value.toString());
}
+ //with JSON AND YAML we get back a double, but with float precision.
+ return Double.parseDouble(value.toString());
}
if (value instanceof Byte) {
return ((Byte)value).intValue();
diff --git a/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc b/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc
index 540f5866825..18c88f7addd 100644
--- a/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc
+++ b/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc
@@ -2,7 +2,7 @@
[[security-api-invalidate-token]]
=== Invalidate token API
-Invalidates an access token or a refresh token.
+Invalidates one or more access tokens or refresh tokens.
==== Request
@@ -19,21 +19,31 @@ can no longer be used. That time period is defined by the
The refresh tokens returned by the <> are
only valid for 24 hours. They can also be used exactly once.
-If you want to invalidate an access or refresh token immediately, use this invalidate token API.
+If you want to invalidate one or more access or refresh tokens immediately, use this invalidate token API.
==== Request Body
The following parameters can be specified in the body of a DELETE request and
-pertain to invalidating a token:
+pertain to invalidating tokens:
`token` (optional)::
-(string) An access token. This parameter cannot be used when `refresh_token` is used.
+(string) An access token. This parameter cannot be used any of `refresh_token`, `realm_name` or
+ `username` are used.
`refresh_token` (optional)::
-(string) A refresh token. This parameter cannot be used when `token` is used.
+(string) A refresh token. This parameter cannot be used any of `refresh_token`, `realm_name` or
+ `username` are used.
-NOTE: One of `token` or `refresh_token` parameters is required.
+`realm_name` (optional)::
+(string) The name of an authentication realm. This parameter cannot be used with either `refresh_token` or `token`.
+
+`username` (optional)::
+(string) The username of a user. This parameter cannot be used with either `refresh_token` or `token`
+
+NOTE: While all parameters are optional, at least one of them is required. More specifically, either one of `token`
+or `refresh_token` parameters is required. If none of these two are specified, then `realm_name` and/or `username`
+need to be specified.
==== Examples
@@ -59,15 +69,75 @@ DELETE /_security/oauth2/token
--------------------------------------------------
// NOTCONSOLE
-A successful call returns a JSON structure that indicates whether the token
-has already been invalidated.
+The following example invalidates all access tokens and refresh tokens for the `saml1` realm immediately:
[source,js]
--------------------------------------------------
+DELETE /_xpack/security/oauth2/token
{
- "created" : true <1>
+ "realm_name" : "saml1"
}
--------------------------------------------------
// NOTCONSOLE
-<1> When a token has already been invalidated, `created` is set to false.
+The following example invalidates all access tokens and refresh tokens for the user `myuser` in all realms immediately:
+
+[source,js]
+--------------------------------------------------
+DELETE /_xpack/security/oauth2/token
+{
+ "username" : "myuser"
+}
+--------------------------------------------------
+// NOTCONSOLE
+
+Finally, the following example invalidates all access tokens and refresh tokens for the user `myuser` in
+ the `saml1` realm immediately:
+
+[source,js]
+--------------------------------------------------
+DELETE /_xpack/security/oauth2/token
+{
+ "username" : "myuser",
+ "realm_name" : "saml1"
+}
+--------------------------------------------------
+// NOTCONSOLE
+
+A successful call returns a JSON structure that contains the number of tokens that were invalidated, the number
+of tokens that had already been invalidated, and potentially a list of errors encountered while invalidating
+specific tokens.
+
+[source,js]
+--------------------------------------------------
+{
+ "invalidated_tokens":9, <1>
+ "previously_invalidated_tokens":15, <2>
+ "error_count":2, <3>
+ "error_details":[ <4>
+ {
+ "type":"exception",
+ "reason":"Elasticsearch exception [type=exception, reason=foo]",
+ "caused_by":{
+ "type":"exception",
+ "reason":"Elasticsearch exception [type=illegal_argument_exception, reason=bar]"
+ }
+ },
+ {
+ "type":"exception",
+ "reason":"Elasticsearch exception [type=exception, reason=boo]",
+ "caused_by":{
+ "type":"exception",
+ "reason":"Elasticsearch exception [type=illegal_argument_exception, reason=far]"
+ }
+ }
+ ]
+}
+--------------------------------------------------
+// NOTCONSOLE
+
+<1> The number of the tokens that were invalidated as part of this request.
+<2> The number of tokens that were already invalidated.
+<3> The number of errors that were encountered when invalidating the tokens.
+<4> Details about these errors. This field is not present in the response when
+ `error_count` is 0.
diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java
index b25bd71c67f..58ba11e4d04 100644
--- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java
+++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java
@@ -6,7 +6,6 @@
package org.elasticsearch.xpack.ccr;
-import org.apache.lucene.util.SetOnce;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.Client;
@@ -111,7 +110,6 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E
private final boolean enabled;
private final Settings settings;
private final CcrLicenseChecker ccrLicenseChecker;
- private final SetOnce repositoryManager = new SetOnce<>();
private Client client;
/**
@@ -152,11 +150,10 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E
return emptyList();
}
- this.repositoryManager.set(new CcrRepositoryManager(settings, clusterService, client));
-
return Arrays.asList(
ccrLicenseChecker,
- new AutoFollowCoordinator(client, clusterService, ccrLicenseChecker)
+ new CcrRepositoryManager(settings, clusterService, client),
+ new AutoFollowCoordinator(client, clusterService, ccrLicenseChecker, threadPool::relativeTimeInMillis)
);
}
diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java
index a1504ff2f8a..54403df3678 100644
--- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java
+++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java
@@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.xpack.ccr.action.repositories.DeleteInternalCcrRepositoryAction;
@@ -18,31 +19,70 @@ import org.elasticsearch.xpack.ccr.action.repositories.PutInternalCcrRepositoryA
import org.elasticsearch.xpack.ccr.action.repositories.PutInternalCcrRepositoryRequest;
import org.elasticsearch.xpack.ccr.repository.CcrRepository;
+import java.io.IOException;
import java.util.List;
+import java.util.Set;
-class CcrRepositoryManager extends RemoteClusterAware {
+class CcrRepositoryManager extends AbstractLifecycleComponent {
private final Client client;
+ private final RemoteSettingsUpdateListener updateListener;
CcrRepositoryManager(Settings settings, ClusterService clusterService, Client client) {
super(settings);
this.client = client;
- listenForUpdates(clusterService.getClusterSettings());
+ updateListener = new RemoteSettingsUpdateListener(settings);
+ updateListener.listenForUpdates(clusterService.getClusterSettings());
}
@Override
- protected void updateRemoteCluster(String clusterAlias, List addresses, String proxyAddress) {
- String repositoryName = CcrRepository.NAME_PREFIX + clusterAlias;
- if (addresses.isEmpty()) {
- DeleteInternalCcrRepositoryRequest request = new DeleteInternalCcrRepositoryRequest(repositoryName);
- PlainActionFuture f = PlainActionFuture.newFuture();
- client.execute(DeleteInternalCcrRepositoryAction.INSTANCE, request, f);
- assert f.isDone() : "Should be completed as it is executed synchronously";
- } else {
- ActionRequest request = new PutInternalCcrRepositoryRequest(repositoryName, CcrRepository.TYPE);
- PlainActionFuture f = PlainActionFuture.newFuture();
- client.execute(PutInternalCcrRepositoryAction.INSTANCE, request, f);
- assert f.isDone() : "Should be completed as it is executed synchronously";
+ protected void doStart() {
+ updateListener.init();
+ }
+
+ @Override
+ protected void doStop() {
+ }
+
+ @Override
+ protected void doClose() throws IOException {
+ }
+
+ private void putRepository(String repositoryName) {
+ ActionRequest request = new PutInternalCcrRepositoryRequest(repositoryName, CcrRepository.TYPE);
+ PlainActionFuture f = PlainActionFuture.newFuture();
+ client.execute(PutInternalCcrRepositoryAction.INSTANCE, request, f);
+ assert f.isDone() : "Should be completed as it is executed synchronously";
+ }
+
+ private void deleteRepository(String repositoryName) {
+ DeleteInternalCcrRepositoryRequest request = new DeleteInternalCcrRepositoryRequest(repositoryName);
+ PlainActionFuture f = PlainActionFuture.newFuture();
+ client.execute(DeleteInternalCcrRepositoryAction.INSTANCE, request, f);
+ assert f.isDone() : "Should be completed as it is executed synchronously";
+ }
+
+ private class RemoteSettingsUpdateListener extends RemoteClusterAware {
+
+ private RemoteSettingsUpdateListener(Settings settings) {
+ super(settings);
+ }
+
+ void init() {
+ Set clusterAliases = buildRemoteClustersDynamicConfig(settings).keySet();
+ for (String clusterAlias : clusterAliases) {
+ putRepository(CcrRepository.NAME_PREFIX + clusterAlias);
+ }
+ }
+
+ @Override
+ protected void updateRemoteCluster(String clusterAlias, List addresses, String proxy) {
+ String repositoryName = CcrRepository.NAME_PREFIX + clusterAlias;
+ if (addresses.isEmpty()) {
+ deleteRepository(repositoryName);
+ } else {
+ putRepository(repositoryName);
+ }
}
}
}
diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java
index 7900351105c..4888b0367fd 100644
--- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java
+++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java
@@ -52,9 +52,12 @@ import java.util.TreeMap;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
+import java.util.function.LongSupplier;
import java.util.function.Supplier;
import java.util.stream.Collectors;
+import static org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster;
+
/**
* A component that runs only on the elected master node and follows leader indices automatically
* if they match with a auto follow pattern that is defined in {@link AutoFollowMetadata}.
@@ -67,6 +70,7 @@ public class AutoFollowCoordinator implements ClusterStateListener {
private final Client client;
private final ClusterService clusterService;
private final CcrLicenseChecker ccrLicenseChecker;
+ private final LongSupplier relativeMillisTimeProvider;
private volatile Map autoFollowers = Collections.emptyMap();
@@ -79,10 +83,13 @@ public class AutoFollowCoordinator implements ClusterStateListener {
public AutoFollowCoordinator(
Client client,
ClusterService clusterService,
- CcrLicenseChecker ccrLicenseChecker) {
+ CcrLicenseChecker ccrLicenseChecker,
+ LongSupplier relativeMillisTimeProvider) {
+
this.client = client;
this.clusterService = clusterService;
this.ccrLicenseChecker = Objects.requireNonNull(ccrLicenseChecker, "ccrLicenseChecker");
+ this.relativeMillisTimeProvider = relativeMillisTimeProvider;
clusterService.addListener(this);
this.recentAutoFollowErrors = new LinkedHashMap() {
@Override
@@ -93,11 +100,26 @@ public class AutoFollowCoordinator implements ClusterStateListener {
}
public synchronized AutoFollowStats getStats() {
+ final Map autoFollowers = this.autoFollowers;
+ final TreeMap timesSinceLastAutoFollowPerRemoteCluster = new TreeMap<>();
+ for (Map.Entry entry : autoFollowers.entrySet()) {
+ long lastAutoFollowTimeInMillis = entry.getValue().lastAutoFollowTimeInMillis;
+ long lastSeenMetadataVersion = entry.getValue().metadataVersion;
+ if (lastAutoFollowTimeInMillis != -1) {
+ long timeSinceLastCheckInMillis = relativeMillisTimeProvider.getAsLong() - lastAutoFollowTimeInMillis;
+ timesSinceLastAutoFollowPerRemoteCluster.put(entry.getKey(),
+ new AutoFollowedCluster(timeSinceLastCheckInMillis, lastSeenMetadataVersion));
+ } else {
+ timesSinceLastAutoFollowPerRemoteCluster.put(entry.getKey(), new AutoFollowedCluster(-1L, lastSeenMetadataVersion));
+ }
+ }
+
return new AutoFollowStats(
numberOfFailedIndicesAutoFollowed,
numberOfFailedRemoteClusterStateRequests,
numberOfSuccessfulIndicesAutoFollowed,
- new TreeMap<>(recentAutoFollowErrors)
+ new TreeMap<>(recentAutoFollowErrors),
+ timesSinceLastAutoFollowPerRemoteCluster
);
}
@@ -146,7 +168,8 @@ public class AutoFollowCoordinator implements ClusterStateListener {
Map newAutoFollowers = new HashMap<>(newRemoteClusters.size());
for (String remoteCluster : newRemoteClusters) {
- AutoFollower autoFollower = new AutoFollower(remoteCluster, this::updateStats, clusterService::state) {
+ AutoFollower autoFollower =
+ new AutoFollower(remoteCluster, this::updateStats, clusterService::state, relativeMillisTimeProvider) {
@Override
void getRemoteClusterState(final String remoteCluster,
@@ -239,20 +262,25 @@ public class AutoFollowCoordinator implements ClusterStateListener {
private final String remoteCluster;
private final Consumer> statsUpdater;
private final Supplier followerClusterStateSupplier;
+ private final LongSupplier relativeTimeProvider;
+ private volatile long lastAutoFollowTimeInMillis = -1;
private volatile long metadataVersion = 0;
private volatile CountDown autoFollowPatternsCountDown;
private volatile AtomicArray autoFollowResults;
AutoFollower(final String remoteCluster,
final Consumer> statsUpdater,
- final Supplier followerClusterStateSupplier) {
+ final Supplier followerClusterStateSupplier,
+ LongSupplier relativeTimeProvider) {
this.remoteCluster = remoteCluster;
this.statsUpdater = statsUpdater;
this.followerClusterStateSupplier = followerClusterStateSupplier;
+ this.relativeTimeProvider = relativeTimeProvider;
}
void start() {
+ lastAutoFollowTimeInMillis = relativeTimeProvider.getAsLong();
final ClusterState clusterState = followerClusterStateSupplier.get();
final AutoFollowMetadata autoFollowMetadata = clusterState.metaData().custom(AutoFollowMetadata.TYPE);
if (autoFollowMetadata == null) {
diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java
index 5abe852ca5f..8865c536917 100644
--- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java
+++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java
@@ -12,7 +12,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
-import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
@@ -117,27 +116,23 @@ public abstract class CcrIntegTestCase extends ESTestCase {
}
stopClusters();
- NodeConfigurationSource nodeConfigurationSource = createNodeConfigurationSource();
Collection> mockPlugins = Arrays.asList(ESIntegTestCase.TestSeedPlugin.class,
TestZenDiscovery.TestPlugin.class, MockHttpTransport.TestPlugin.class, getTestTransportPlugin());
InternalTestCluster leaderCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(),
- numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), nodeConfigurationSource, 0, "leader", mockPlugins,
+ numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), createNodeConfigurationSource(null), 0, "leader", mockPlugins,
Function.identity());
- InternalTestCluster followerCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(),
- numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), nodeConfigurationSource, 0, "follower", mockPlugins,
- Function.identity());
- clusterGroup = new ClusterGroup(leaderCluster, followerCluster);
-
leaderCluster.beforeTest(random(), 0.0D);
leaderCluster.ensureAtLeastNumDataNodes(numberOfNodesPerCluster());
+
+ String address = leaderCluster.getDataNodeInstance(TransportService.class).boundAddress().publishAddress().toString();
+ InternalTestCluster followerCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(),
+ numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), createNodeConfigurationSource(address), 0, "follower",
+ mockPlugins, Function.identity());
+ clusterGroup = new ClusterGroup(leaderCluster, followerCluster);
+
followerCluster.beforeTest(random(), 0.0D);
followerCluster.ensureAtLeastNumDataNodes(numberOfNodesPerCluster());
-
- ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest();
- String address = leaderCluster.getDataNodeInstance(TransportService.class).boundAddress().publishAddress().toString();
- updateSettingsRequest.persistentSettings(Settings.builder().put("cluster.remote.leader_cluster.seeds", address));
- assertAcked(followerClient().admin().cluster().updateSettings(updateSettingsRequest).actionGet());
}
/**
@@ -175,7 +170,7 @@ public abstract class CcrIntegTestCase extends ESTestCase {
}
}
- private NodeConfigurationSource createNodeConfigurationSource() {
+ private NodeConfigurationSource createNodeConfigurationSource(String leaderSeedAddress) {
Settings.Builder builder = Settings.builder();
builder.put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), Integer.MAX_VALUE);
// Default the watermarks to absurdly low to prevent the tests
@@ -195,6 +190,9 @@ public abstract class CcrIntegTestCase extends ESTestCase {
builder.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false);
builder.put(XPackSettings.LOGSTASH_ENABLED.getKey(), false);
builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
+ if (leaderSeedAddress != null) {
+ builder.put("cluster.remote.leader_cluster.seeds", leaderSeedAddress);
+ }
return new NodeConfigurationSource() {
@Override
public Settings nodeSettings(int nodeOrdinal) {
diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java
index 534397a0a9a..7228acaacf1 100644
--- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java
+++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java
@@ -89,7 +89,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101"));
assertThat(entries.get(0).getValue(), nullValue());
};
- AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(currentState)) {
+ AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(currentState), () -> 1L) {
@Override
void getRemoteClusterState(String remoteCluster,
long metadataVersion,
@@ -154,7 +154,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
assertThat(results.get(0).clusterStateFetchException, sameInstance(failure));
assertThat(results.get(0).autoFollowExecutionResults.entrySet().size(), equalTo(0));
};
- AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) {
+ AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) {
@Override
void getRemoteClusterState(String remoteCluster,
long metadataVersion,
@@ -209,7 +209,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101"));
assertThat(entries.get(0).getValue(), sameInstance(failure));
};
- AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) {
+ AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) {
@Override
void getRemoteClusterState(String remoteCluster,
long metadataVersion,
@@ -266,7 +266,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101"));
assertThat(entries.get(0).getValue(), sameInstance(failure));
};
- AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) {
+ AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) {
@Override
void getRemoteClusterState(String remoteCluster,
long metadataVersion,
@@ -532,8 +532,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator(
null,
mock(ClusterService.class),
- new CcrLicenseChecker(() -> true, () -> false)
- );
+ new CcrLicenseChecker(() -> true, () -> false),
+ () -> 1L);
autoFollowCoordinator.updateStats(Collections.singletonList(
new AutoFollowCoordinator.AutoFollowResult("_alias1"))
@@ -585,6 +585,92 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias2:index2").getCause().getMessage(), equalTo("error"));
}
+ public void testUpdateAutoFollowers() {
+ ClusterService clusterService = mock(ClusterService.class);
+ // Return a cluster state with no patterns so that the auto followers never really execute:
+ ClusterState followerState = ClusterState.builder(new ClusterName("remote"))
+ .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
+ new AutoFollowMetadata(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap())))
+ .build();
+ when(clusterService.state()).thenReturn(followerState);
+ AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator(
+ null,
+ clusterService,
+ new CcrLicenseChecker(() -> true, () -> false),
+ () -> 1L);
+ // Add 3 patterns:
+ Map patterns = new HashMap<>();
+ patterns.put("pattern1", new AutoFollowPattern("remote1", Collections.singletonList("logs-*"), null, null, null,
+ null, null, null, null, null, null, null, null));
+ patterns.put("pattern2", new AutoFollowPattern("remote2", Collections.singletonList("logs-*"), null, null, null,
+ null, null, null, null, null, null, null, null));
+ patterns.put("pattern3", new AutoFollowPattern("remote2", Collections.singletonList("metrics-*"), null, null, null,
+ null, null, null, null, null, null, null, null));
+ ClusterState clusterState = ClusterState.builder(new ClusterName("remote"))
+ .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
+ new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())))
+ .build();
+ autoFollowCoordinator.updateAutoFollowers(clusterState);
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(2));
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote1"), notNullValue());
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue());
+ // Remove patterns 1 and 3:
+ patterns.remove("pattern1");
+ patterns.remove("pattern3");
+ clusterState = ClusterState.builder(new ClusterName("remote"))
+ .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
+ new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())))
+ .build();
+ autoFollowCoordinator.updateAutoFollowers(clusterState);
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(1));
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue());
+ // Add pattern 4:
+ patterns.put("pattern4", new AutoFollowPattern("remote1", Collections.singletonList("metrics-*"), null, null, null,
+ null, null, null, null, null, null, null, null));
+ clusterState = ClusterState.builder(new ClusterName("remote"))
+ .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
+ new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())))
+ .build();
+ autoFollowCoordinator.updateAutoFollowers(clusterState);
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(2));
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote1"), notNullValue());
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue());
+ // Remove patterns 2 and 4:
+ patterns.remove("pattern2");
+ patterns.remove("pattern4");
+ clusterState = ClusterState.builder(new ClusterName("remote"))
+ .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
+ new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())))
+ .build();
+ autoFollowCoordinator.updateAutoFollowers(clusterState);
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0));
+ }
+
+ public void testUpdateAutoFollowersNoPatterns() {
+ AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator(
+ null,
+ mock(ClusterService.class),
+ new CcrLicenseChecker(() -> true, () -> false),
+ () -> 1L);
+ ClusterState clusterState = ClusterState.builder(new ClusterName("remote"))
+ .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
+ new AutoFollowMetadata(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap())))
+ .build();
+ autoFollowCoordinator.updateAutoFollowers(clusterState);
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0));
+ }
+
+ public void testUpdateAutoFollowersNoAutoFollowMetadata() {
+ AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator(
+ null,
+ mock(ClusterService.class),
+ new CcrLicenseChecker(() -> true, () -> false),
+ () -> 1L);
+ ClusterState clusterState = ClusterState.builder(new ClusterName("remote")).build();
+ autoFollowCoordinator.updateAutoFollowers(clusterState);
+ assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0));
+ }
+
public void testWaitForMetadataVersion() {
Client client = mock(Client.class);
when(client.getRemoteClusterClient(anyString())).thenReturn(client);
@@ -611,7 +697,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
List allResults = new ArrayList<>();
Consumer> handler = allResults::addAll;
- AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states)) {
+ AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states), () -> 1L) {
long previousRequestedMetadataVersion = 0;
@@ -669,7 +755,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
fail("should not be invoked");
};
AtomicInteger counter = new AtomicInteger();
- AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states)) {
+ AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states), () -> 1L) {
long previousRequestedMetadataVersion = 0;
diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java
index c651cca5b6a..41e771ac97e 100644
--- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java
+++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java
@@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction;
import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction;
import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomReadExceptions;
+import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomTrackingClusters;
import static org.elasticsearch.xpack.ccr.action.StatsResponsesTests.createStatsResponse;
public class AutoFollowStatsResponseTests extends AbstractWireSerializingTestCase {
@@ -27,7 +28,8 @@ public class AutoFollowStatsResponseTests extends AbstractWireSerializingTestCas
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
- randomReadExceptions()
+ randomReadExceptions(),
+ randomTrackingClusters()
);
FollowStatsAction.StatsResponses statsResponse = createStatsResponse();
return new CcrStatsAction.Response(autoFollowStats, statsResponse);
diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java
index c4a61529f49..61b92b485c1 100644
--- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java
+++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java
@@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ccr.AutoFollowStats;
+import org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster;
import java.io.IOException;
import java.util.Map;
@@ -34,7 +35,8 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase randomTrackingClusters() {
+ final int count = randomIntBetween(0, 16);
+ final NavigableMap readExceptions = new TreeMap<>();
+ for (int i = 0; i < count; i++) {
+ readExceptions.put("" + i, new AutoFollowedCluster(randomLong(), randomNonNegativeLong()));
+ }
+ return readExceptions;
+ }
+
@Override
protected Writeable.Reader instanceReader() {
return AutoFollowStats::new;
@@ -56,6 +67,11 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase entry : newInstance.getRecentAutoFollowErrors().entrySet()) {
@@ -68,6 +84,8 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase(Collections.singletonMap(
randomAlphaOfLength(4),
new ElasticsearchException("cannot follow index")));
+
+ final NavigableMap trackingClusters =
+ new TreeMap<>(Collections.singletonMap(
+ randomAlphaOfLength(4),
+ new AutoFollowedCluster(1L, 1L)));
final AutoFollowStats autoFollowStats =
- new AutoFollowStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), recentAutoFollowExceptions);
+ new AutoFollowStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), recentAutoFollowExceptions,
+ trackingClusters);
final AutoFollowStatsMonitoringDoc document =
new AutoFollowStatsMonitoringDoc("_cluster", timestamp, intervalMillis, node, autoFollowStats);
@@ -99,7 +107,7 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase
+ "\"ccr_auto_follow_stats\":{"
+ "\"number_of_failed_follow_indices\":" + autoFollowStats.getNumberOfFailedFollowIndices() + ","
+ "\"number_of_failed_remote_cluster_state_requests\":" +
- autoFollowStats.getNumberOfFailedRemoteClusterStateRequests() + ","
+ autoFollowStats.getNumberOfFailedRemoteClusterStateRequests() + ","
+ "\"number_of_successful_follow_indices\":" + autoFollowStats.getNumberOfSuccessfulFollowIndices() + ","
+ "\"recent_auto_follow_errors\":["
+ "{"
@@ -109,6 +117,15 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase
+ "\"reason\":\"cannot follow index\""
+ "}"
+ "}"
+ + "],"
+ + "\"auto_followed_clusters\":["
+ + "{"
+ + "\"cluster_name\":\"" + trackingClusters.keySet().iterator().next() + "\","
+ + "\"time_since_last_check_millis\":" +
+ trackingClusters.values().iterator().next().getTimeSinceLastCheckMillis() + ","
+ + "\"last_seen_metadata_version\":" +
+ trackingClusters.values().iterator().next().getLastSeenMetadataVersion()
+ + "}"
+ "]"
+ "}"
+ "}"));
@@ -117,7 +134,11 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase
public void testShardFollowNodeTaskStatusFieldsMapped() throws IOException {
final NavigableMap fetchExceptions =
new TreeMap<>(Collections.singletonMap("leader_index", new ElasticsearchException("cannot follow index")));
- final AutoFollowStats status = new AutoFollowStats(1, 0, 2, fetchExceptions);
+ final NavigableMap trackingClusters =
+ new TreeMap<>(Collections.singletonMap(
+ randomAlphaOfLength(4),
+ new AutoFollowedCluster(1L, 1L)));
+ final AutoFollowStats status = new AutoFollowStats(1, 0, 2, fetchExceptions, trackingClusters);
XContentBuilder builder = jsonBuilder();
builder.value(status);
Map serializedStatus = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false);
@@ -142,18 +163,28 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase
assertThat("expected keyword field type for field [" + fieldName + "]", fieldType,
anyOf(equalTo("keyword"), equalTo("text")));
} else {
+ Map, ?> innerFieldValue = (Map, ?>) ((List) fieldValue).get(0);
// Manual test specific object fields and if not just fail:
if (fieldName.equals("recent_auto_follow_errors")) {
assertThat(fieldType, equalTo("nested"));
- assertThat(((Map, ?>) fieldMapping.get("properties")).size(), equalTo(2));
+ assertThat(((Map, ?>) fieldMapping.get("properties")).size(), equalTo(innerFieldValue.size()));
assertThat(XContentMapValues.extractValue("properties.leader_index.type", fieldMapping), equalTo("keyword"));
assertThat(XContentMapValues.extractValue("properties.auto_follow_exception.type", fieldMapping), equalTo("object"));
+ innerFieldValue = (Map, ?>) innerFieldValue.get("auto_follow_exception");
Map, ?> exceptionFieldMapping =
(Map, ?>) XContentMapValues.extractValue("properties.auto_follow_exception.properties", fieldMapping);
- assertThat(exceptionFieldMapping.size(), equalTo(2));
+ assertThat(exceptionFieldMapping.size(), equalTo(innerFieldValue.size()));
assertThat(XContentMapValues.extractValue("type.type", exceptionFieldMapping), equalTo("keyword"));
assertThat(XContentMapValues.extractValue("reason.type", exceptionFieldMapping), equalTo("text"));
+ } else if (fieldName.equals("auto_followed_clusters")) {
+ assertThat(fieldType, equalTo("nested"));
+ Map, ?> innerFieldMapping = ((Map, ?>) fieldMapping.get("properties"));
+ assertThat(innerFieldMapping.size(), equalTo(innerFieldValue.size()));
+
+ assertThat(XContentMapValues.extractValue("cluster_name.type", innerFieldMapping), equalTo("keyword"));
+ assertThat(XContentMapValues.extractValue("time_since_last_check_millis.type", innerFieldMapping), equalTo("long"));
+ assertThat(XContentMapValues.extractValue("last_seen_metadata_version.type", innerFieldMapping), equalTo("long"));
} else {
fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]");
}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java
index 6f28c450f04..032cedbdcdf 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java
@@ -6,6 +6,7 @@
package org.elasticsearch.xpack.core.ccr;
import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@@ -17,6 +18,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.AbstractMap;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
@@ -33,6 +35,10 @@ public class AutoFollowStats implements Writeable, ToXContentObject {
private static final ParseField RECENT_AUTO_FOLLOW_ERRORS = new ParseField("recent_auto_follow_errors");
private static final ParseField LEADER_INDEX = new ParseField("leader_index");
private static final ParseField AUTO_FOLLOW_EXCEPTION = new ParseField("auto_follow_exception");
+ private static final ParseField AUTO_FOLLOWED_CLUSTERS = new ParseField("auto_followed_clusters");
+ private static final ParseField CLUSTER_NAME = new ParseField("cluster_name");
+ private static final ParseField TIME_SINCE_LAST_CHECK_MILLIS = new ParseField("time_since_last_check_millis");
+ private static final ParseField LAST_SEEN_METADATA_VERSION = new ParseField("last_seen_metadata_version");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser STATS_PARSER = new ConstructingObjectParser<>("auto_follow_stats",
@@ -43,26 +49,39 @@ public class AutoFollowStats implements Writeable, ToXContentObject {
new TreeMap<>(
((List>) args[3])
.stream()
- .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))
- ));
+ .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))),
+ new TreeMap<>(
+ ((List>) args[4])
+ .stream()
+ .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))));
private static final ConstructingObjectParser, Void> AUTO_FOLLOW_EXCEPTIONS_PARSER =
new ConstructingObjectParser<>(
"auto_follow_stats_errors",
args -> new AbstractMap.SimpleEntry<>((String) args[0], (ElasticsearchException) args[1]));
+ private static final ConstructingObjectParser, Void> AUTO_FOLLOWED_CLUSTERS_PARSER =
+ new ConstructingObjectParser<>(
+ "auto_followed_clusters",
+ args -> new AbstractMap.SimpleEntry<>((String) args[0], new AutoFollowedCluster((Long) args[1], (Long) args[2])));
+
static {
AUTO_FOLLOW_EXCEPTIONS_PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_INDEX);
AUTO_FOLLOW_EXCEPTIONS_PARSER.declareObject(
ConstructingObjectParser.constructorArg(),
(p, c) -> ElasticsearchException.fromXContent(p),
AUTO_FOLLOW_EXCEPTION);
+ AUTO_FOLLOWED_CLUSTERS_PARSER.declareString(ConstructingObjectParser.constructorArg(), CLUSTER_NAME);
+ AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_SINCE_LAST_CHECK_MILLIS);
+ AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_SEEN_METADATA_VERSION);
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED);
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS);
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED);
STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOW_EXCEPTIONS_PARSER,
RECENT_AUTO_FOLLOW_ERRORS);
+ STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOWED_CLUSTERS_PARSER,
+ AUTO_FOLLOWED_CLUSTERS);
}
public static AutoFollowStats fromXContent(final XContentParser parser) {
@@ -73,24 +92,32 @@ public class AutoFollowStats implements Writeable, ToXContentObject {
private final long numberOfFailedRemoteClusterStateRequests;
private final long numberOfSuccessfulFollowIndices;
private final NavigableMap recentAutoFollowErrors;
+ private final NavigableMap autoFollowedClusters;
public AutoFollowStats(
- long numberOfFailedFollowIndices,
- long numberOfFailedRemoteClusterStateRequests,
- long numberOfSuccessfulFollowIndices,
- NavigableMap recentAutoFollowErrors
+ long numberOfFailedFollowIndices,
+ long numberOfFailedRemoteClusterStateRequests,
+ long numberOfSuccessfulFollowIndices,
+ NavigableMap recentAutoFollowErrors,
+ NavigableMap autoFollowedClusters
) {
this.numberOfFailedFollowIndices = numberOfFailedFollowIndices;
this.numberOfFailedRemoteClusterStateRequests = numberOfFailedRemoteClusterStateRequests;
this.numberOfSuccessfulFollowIndices = numberOfSuccessfulFollowIndices;
this.recentAutoFollowErrors = recentAutoFollowErrors;
+ this.autoFollowedClusters = autoFollowedClusters;
}
public AutoFollowStats(StreamInput in) throws IOException {
numberOfFailedFollowIndices = in.readVLong();
numberOfFailedRemoteClusterStateRequests = in.readVLong();
numberOfSuccessfulFollowIndices = in.readVLong();
- recentAutoFollowErrors= new TreeMap<>(in.readMap(StreamInput::readString, StreamInput::readException));
+ recentAutoFollowErrors = new TreeMap<>(in.readMap(StreamInput::readString, StreamInput::readException));
+ if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
+ autoFollowedClusters = new TreeMap<>(in.readMap(StreamInput::readString, AutoFollowedCluster::new));
+ } else {
+ autoFollowedClusters = Collections.emptyNavigableMap();
+ }
}
@Override
@@ -99,6 +126,9 @@ public class AutoFollowStats implements Writeable, ToXContentObject {
out.writeVLong(numberOfFailedRemoteClusterStateRequests);
out.writeVLong(numberOfSuccessfulFollowIndices);
out.writeMap(recentAutoFollowErrors, StreamOutput::writeString, StreamOutput::writeException);
+ if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
+ out.writeMap(autoFollowedClusters, StreamOutput::writeString, (out1, value) -> value.writeTo(out1));
+ }
}
public long getNumberOfFailedFollowIndices() {
@@ -117,6 +147,10 @@ public class AutoFollowStats implements Writeable, ToXContentObject {
return recentAutoFollowErrors;
}
+ public NavigableMap getAutoFollowedClusters() {
+ return autoFollowedClusters;
+ }
+
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
@@ -148,6 +182,19 @@ public class AutoFollowStats implements Writeable, ToXContentObject {
}
}
builder.endArray();
+ builder.startArray(AUTO_FOLLOWED_CLUSTERS.getPreferredName());
+ {
+ for (final Map.Entry entry : autoFollowedClusters.entrySet()) {
+ builder.startObject();
+ {
+ builder.field(CLUSTER_NAME.getPreferredName(), entry.getKey());
+ builder.field(TIME_SINCE_LAST_CHECK_MILLIS.getPreferredName(), entry.getValue().getTimeSinceLastCheckMillis());
+ builder.field(LAST_SEEN_METADATA_VERSION.getPreferredName(), entry.getValue().getLastSeenMetadataVersion());
+ }
+ builder.endObject();
+ }
+ }
+ builder.endArray();
return builder;
}
@@ -165,7 +212,8 @@ public class AutoFollowStats implements Writeable, ToXContentObject {
* keys.
*/
recentAutoFollowErrors.keySet().equals(that.recentAutoFollowErrors.keySet()) &&
- getFetchExceptionMessages(this).equals(getFetchExceptionMessages(that));
+ getFetchExceptionMessages(this).equals(getFetchExceptionMessages(that)) &&
+ Objects.equals(autoFollowedClusters, that.autoFollowedClusters);
}
@Override
@@ -179,7 +227,8 @@ public class AutoFollowStats implements Writeable, ToXContentObject {
* messages. Note that we are relying on the fact that the auto follow exceptions are ordered by keys.
*/
recentAutoFollowErrors.keySet(),
- getFetchExceptionMessages(this)
+ getFetchExceptionMessages(this),
+ autoFollowedClusters
);
}
@@ -194,6 +243,58 @@ public class AutoFollowStats implements Writeable, ToXContentObject {
", numberOfFailedRemoteClusterStateRequests=" + numberOfFailedRemoteClusterStateRequests +
", numberOfSuccessfulFollowIndices=" + numberOfSuccessfulFollowIndices +
", recentAutoFollowErrors=" + recentAutoFollowErrors +
+ ", autoFollowedClusters=" + autoFollowedClusters +
'}';
}
+
+ public static class AutoFollowedCluster implements Writeable {
+
+ private final long timeSinceLastCheckMillis;
+ private final long lastSeenMetadataVersion;
+
+ public AutoFollowedCluster(long timeSinceLastCheckMillis, long lastSeenMetadataVersion) {
+ this.timeSinceLastCheckMillis = timeSinceLastCheckMillis;
+ this.lastSeenMetadataVersion = lastSeenMetadataVersion;
+ }
+
+ public AutoFollowedCluster(StreamInput in) throws IOException {
+ this(in.readZLong(), in.readVLong());
+ }
+
+ public long getTimeSinceLastCheckMillis() {
+ return timeSinceLastCheckMillis;
+ }
+
+ public long getLastSeenMetadataVersion() {
+ return lastSeenMetadataVersion;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeZLong(timeSinceLastCheckMillis);
+ out.writeVLong(lastSeenMetadataVersion);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AutoFollowedCluster that = (AutoFollowedCluster) o;
+ return timeSinceLastCheckMillis == that.timeSinceLastCheckMillis &&
+ lastSeenMetadataVersion == that.lastSeenMetadataVersion;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(timeSinceLastCheckMillis, lastSeenMetadataVersion);
+ }
+
+ @Override
+ public String toString() {
+ return "AutoFollowedCluster{" +
+ "timeSinceLastCheckMillis=" + timeSinceLastCheckMillis +
+ ", lastSeenMetadataVersion=" + lastSeenMetadataVersion +
+ '}';
+ }
+ }
}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java
index 679ee0756f6..57bd5bd35dd 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java
@@ -8,7 +8,7 @@ package org.elasticsearch.xpack.core.security.action.token;
import org.elasticsearch.action.Action;
/**
- * Action for invalidating a given token
+ * Action for invalidating one or more tokens
*/
public final class InvalidateTokenAction extends Action {
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java
index 7a8372fe456..de3b73ec4af 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java
@@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.security.action.token;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@@ -22,31 +23,81 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
public final class InvalidateTokenRequest extends ActionRequest {
public enum Type {
- ACCESS_TOKEN,
- REFRESH_TOKEN
+ ACCESS_TOKEN("token"),
+ REFRESH_TOKEN("refresh_token");
+
+ private final String value;
+
+ Type(String value) {
+ this.value = value;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public static Type fromString(String tokenType) {
+ if (tokenType != null) {
+ for (Type type : values()) {
+ if (type.getValue().equals(tokenType)) {
+ return type;
+ }
+ }
+ }
+ return null;
+ }
}
private String tokenString;
private Type tokenType;
+ private String realmName;
+ private String userName;
public InvalidateTokenRequest() {}
/**
- * @param tokenString the string representation of the token
+ * @param tokenString the string representation of the token to be invalidated
+ * @param tokenType the type of the token to be invalidated
+ * @param realmName the name of the realm for which all tokens will be invalidated
+ * @param userName the principal of the user for which all tokens will be invalidated
*/
- public InvalidateTokenRequest(String tokenString, Type type) {
+ public InvalidateTokenRequest(@Nullable String tokenString, @Nullable String tokenType,
+ @Nullable String realmName, @Nullable String userName) {
this.tokenString = tokenString;
- this.tokenType = type;
+ this.tokenType = Type.fromString(tokenType);
+ this.realmName = realmName;
+ this.userName = userName;
+ }
+
+ /**
+ * @param tokenString the string representation of the token to be invalidated
+ * @param tokenType the type of the token to be invalidated
+ */
+ public InvalidateTokenRequest(String tokenString, String tokenType) {
+ this.tokenString = tokenString;
+ this.tokenType = Type.fromString(tokenType);
+ this.realmName = null;
+ this.userName = null;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
- if (Strings.isNullOrEmpty(tokenString)) {
- validationException = addValidationError("token string must be provided", null);
- }
- if (tokenType == null) {
- validationException = addValidationError("token type must be provided", validationException);
+ if (Strings.hasText(realmName) || Strings.hasText(userName)) {
+ if (Strings.hasText(tokenString)) {
+ validationException =
+ addValidationError("token string must not be provided when realm name or username is specified", null);
+ }
+ if (tokenType != null) {
+ validationException =
+ addValidationError("token type must not be provided when realm name or username is specified", validationException);
+ }
+ } else if (Strings.isNullOrEmpty(tokenString)) {
+ validationException =
+ addValidationError("token string must be provided when not specifying a realm name or a username", null);
+ } else if (tokenType == null) {
+ validationException =
+ addValidationError("token type must be provided when a token string is specified", null);
}
return validationException;
}
@@ -67,26 +118,76 @@ public final class InvalidateTokenRequest extends ActionRequest {
this.tokenType = tokenType;
}
+ public String getRealmName() {
+ return realmName;
+ }
+
+ public void setRealmName(String realmName) {
+ this.realmName = realmName;
+ }
+
+ public String getUserName() {
+ return userName;
+ }
+
+ public void setUserName(String userName) {
+ this.userName = userName;
+ }
+
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
- out.writeString(tokenString);
+ if (out.getVersion().before(Version.V_7_0_0)) {
+ if (Strings.isNullOrEmpty(tokenString)) {
+ throw new IllegalArgumentException("token is required for versions < v6.6.0");
+ }
+ out.writeString(tokenString);
+ } else {
+ out.writeOptionalString(tokenString);
+ }
if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
- out.writeVInt(tokenType.ordinal());
+ if (out.getVersion().before(Version.V_7_0_0)) {
+ if (tokenType == null) {
+ throw new IllegalArgumentException("token type is not optional for versions > v6.2.0 and < v6.6.0");
+ }
+ out.writeVInt(tokenType.ordinal());
+ } else {
+ out.writeOptionalVInt(tokenType == null ? null : tokenType.ordinal());
+ }
} else if (tokenType == Type.REFRESH_TOKEN) {
- throw new IllegalArgumentException("refresh token invalidation cannot be serialized with version [" + out.getVersion() +
- "]");
+ throw new IllegalArgumentException("refresh token invalidation cannot be serialized with version [" + out.getVersion() + "]");
+ }
+ if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
+ out.writeOptionalString(realmName);
+ out.writeOptionalString(userName);
+ } else if (realmName != null || userName != null) {
+ throw new IllegalArgumentException(
+ "realm or user token invalidation cannot be serialized with version [" + out.getVersion() + "]");
}
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
- tokenString = in.readString();
+ if (in.getVersion().before(Version.V_7_0_0)) {
+ tokenString = in.readString();
+ } else {
+ tokenString = in.readOptionalString();
+ }
if (in.getVersion().onOrAfter(Version.V_6_2_0)) {
- tokenType = Type.values()[in.readVInt()];
+ if (in.getVersion().before(Version.V_7_0_0)) {
+ int type = in.readVInt();
+ tokenType = Type.values()[type];
+ } else {
+ Integer type = in.readOptionalVInt();
+ tokenType = type == null ? null : Type.values()[type];
+ }
} else {
tokenType = Type.ACCESS_TOKEN;
}
+ if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
+ realmName = in.readOptionalString();
+ userName = in.readOptionalString();
+ }
}
}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java
index f77f6c65332..0b454905cfa 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java
@@ -34,4 +34,20 @@ public final class InvalidateTokenRequestBuilder
request.setTokenType(type);
return this;
}
+
+ /**
+ * Sets the name of the realm for which all tokens should be invalidated
+ */
+ public InvalidateTokenRequestBuilder setRealmName(String realmName) {
+ request.setRealmName(realmName);
+ return this;
+ }
+
+ /**
+ * Sets the username for which all tokens should be invalidated
+ */
+ public InvalidateTokenRequestBuilder setUserName(String username) {
+ request.setUserName(username);
+ return this;
+ }
}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java
index cebb005b272..886caeac370 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java
@@ -5,41 +5,83 @@
*/
package org.elasticsearch.xpack.core.security.action.token;
+import org.elasticsearch.Version;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult;
import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Objects;
/**
- * Response for a invalidation of a token.
+ * Response for a invalidation of one or multiple tokens.
*/
-public final class InvalidateTokenResponse extends ActionResponse {
+public final class InvalidateTokenResponse extends ActionResponse implements ToXContent {
- private boolean created;
+ private TokensInvalidationResult result;
public InvalidateTokenResponse() {}
- public InvalidateTokenResponse(boolean created) {
- this.created = created;
+ public InvalidateTokenResponse(TokensInvalidationResult result) {
+ this.result = result;
}
- /**
- * If the token is already invalidated then created will be false
- */
- public boolean isCreated() {
- return created;
+ public TokensInvalidationResult getResult() {
+ return result;
+ }
+
+ private boolean isCreated() {
+ return result.getInvalidatedTokens().size() > 0
+ && result.getPreviouslyInvalidatedTokens().isEmpty()
+ && result.getErrors().isEmpty();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
- out.writeBoolean(created);
+ if (out.getVersion().before(Version.V_7_0_0)) {
+ out.writeBoolean(isCreated());
+ } else {
+ result.writeTo(out);
+ }
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
- created = in.readBoolean();
+ if (in.getVersion().before(Version.V_7_0_0)) {
+ final boolean created = in.readBoolean();
+ if (created) {
+ result = new TokensInvalidationResult(Arrays.asList(""), Collections.emptyList(), Collections.emptyList(), 0);
+ } else {
+ result = new TokensInvalidationResult(Collections.emptyList(), Arrays.asList(""), Collections.emptyList(), 0);
+ }
+ } else {
+ result = new TokensInvalidationResult(in);
+ }
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
+ result.toXContent(builder, params);
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ InvalidateTokenResponse that = (InvalidateTokenResponse) o;
+ return Objects.equals(result, that.result);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(result);
}
}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java
new file mode 100644
index 00000000000..cfa83b63ed5
--- /dev/null
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+package org.elasticsearch.xpack.core.security.authc.support;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * The result of attempting to invalidate one or multiple tokens. The result contains information about:
+ *
+ * - how many of the tokens were actually invalidated
+ * - how many tokens are not invalidated in this request because they were already invalidated
+ * - how many errors were encountered while invalidating tokens and the error details
+ *
+ */
+public class TokensInvalidationResult implements ToXContentObject, Writeable {
+
+ private final List invalidatedTokens;
+ private final List previouslyInvalidatedTokens;
+ private final List errors;
+ private final int attemptCount;
+
+ public TokensInvalidationResult(List invalidatedTokens, List previouslyInvalidatedTokens,
+ @Nullable List errors, int attemptCount) {
+ Objects.requireNonNull(invalidatedTokens, "invalidated_tokens must be provided");
+ this.invalidatedTokens = invalidatedTokens;
+ Objects.requireNonNull(previouslyInvalidatedTokens, "previously_invalidated_tokens must be provided");
+ this.previouslyInvalidatedTokens = previouslyInvalidatedTokens;
+ if (null != errors) {
+ this.errors = errors;
+ } else {
+ this.errors = Collections.emptyList();
+ }
+ this.attemptCount = attemptCount;
+ }
+
+ public TokensInvalidationResult(StreamInput in) throws IOException {
+ this.invalidatedTokens = in.readList(StreamInput::readString);
+ this.previouslyInvalidatedTokens = in.readList(StreamInput::readString);
+ this.errors = in.readList(StreamInput::readException);
+ this.attemptCount = in.readVInt();
+ }
+
+ public static TokensInvalidationResult emptyResult() {
+ return new TokensInvalidationResult(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), 0);
+ }
+
+
+ public List getInvalidatedTokens() {
+ return invalidatedTokens;
+ }
+
+ public List getPreviouslyInvalidatedTokens() {
+ return previouslyInvalidatedTokens;
+ }
+
+ public List getErrors() {
+ return errors;
+ }
+
+ public int getAttemptCount() {
+ return attemptCount;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject()
+ //Remove created after PR is backported to 6.x
+ .field("created", isCreated())
+ .field("invalidated_tokens", invalidatedTokens.size())
+ .field("previously_invalidated_tokens", previouslyInvalidatedTokens.size())
+ .field("error_count", errors.size());
+ if (errors.isEmpty() == false) {
+ builder.field("error_details");
+ builder.startArray();
+ for (ElasticsearchException e : errors) {
+ builder.startObject();
+ ElasticsearchException.generateThrowableXContent(builder, params, e);
+ builder.endObject();
+ }
+ builder.endArray();
+ }
+ return builder.endObject();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeStringList(invalidatedTokens);
+ out.writeStringList(previouslyInvalidatedTokens);
+ out.writeCollection(errors, StreamOutput::writeException);
+ out.writeVInt(attemptCount);
+ }
+
+ private boolean isCreated() {
+ return this.getInvalidatedTokens().size() > 0
+ && this.getPreviouslyInvalidatedTokens().isEmpty()
+ && this.getErrors().isEmpty();
+ }
+}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java
index ef59f870c68..a7faf4d2231 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java
@@ -326,6 +326,10 @@ public class SecurityClient {
return new InvalidateTokenRequestBuilder(client).setTokenString(token);
}
+ public InvalidateTokenRequestBuilder prepareInvalidateToken() {
+ return new InvalidateTokenRequestBuilder(client);
+ }
+
public void invalidateToken(InvalidateTokenRequest request, ActionListener listener) {
client.execute(InvalidateTokenAction.INSTANCE, request, listener);
}
diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json
index 1e6d3ec892a..c34fed37516 100644
--- a/x-pack/plugin/core/src/main/resources/monitoring-es.json
+++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json
@@ -1060,6 +1060,20 @@
}
}
}
+ },
+ "auto_followed_clusters": {
+ "type": "nested",
+ "properties": {
+ "cluster_name": {
+ "type": "keyword"
+ },
+ "time_since_last_check_millis": {
+ "type": "long"
+ },
+ "last_seen_metadata_version": {
+ "type": "long"
+ }
+ }
}
}
}
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java
index bd23198e8ea..2d8782f0111 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java
@@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.token;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasItem;
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestTests.java
new file mode 100644
index 00000000000..3fd7eb7da46
--- /dev/null
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestTests.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.core.security.action.token;
+
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.test.ESTestCase;
+
+import static org.hamcrest.Matchers.containsString;
+
+public class InvalidateTokenRequestTests extends ESTestCase {
+
+ public void testValidation() {
+ InvalidateTokenRequest request = new InvalidateTokenRequest();
+ ActionRequestValidationException ve = request.validate();
+ assertNotNull(ve);
+ assertEquals(1, ve.validationErrors().size());
+ assertThat(ve.validationErrors().get(0), containsString("token string must be provided when not specifying a realm"));
+
+ request = new InvalidateTokenRequest(randomAlphaOfLength(12), randomFrom("", null));
+ ve = request.validate();
+ assertNotNull(ve);
+ assertEquals(1, ve.validationErrors().size());
+ assertThat(ve.validationErrors().get(0), containsString("token type must be provided when a token string is specified"));
+
+ request = new InvalidateTokenRequest(randomFrom("", null), "access_token");
+ ve = request.validate();
+ assertNotNull(ve);
+ assertEquals(1, ve.validationErrors().size());
+ assertThat(ve.validationErrors().get(0), containsString("token string must be provided when not specifying a realm"));
+
+ request = new InvalidateTokenRequest(randomFrom("", null), randomFrom("", null), randomAlphaOfLength(4), randomAlphaOfLength(8));
+ ve = request.validate();
+ assertNull(ve);
+
+ request =
+ new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("", null), randomAlphaOfLength(4), randomAlphaOfLength(8));
+ ve = request.validate();
+ assertNotNull(ve);
+ assertEquals(1, ve.validationErrors().size());
+ assertThat(ve.validationErrors().get(0),
+ containsString("token string must not be provided when realm name or username is specified"));
+
+ request = new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("token", "refresh_token"),
+ randomAlphaOfLength(4), randomAlphaOfLength(8));
+ ve = request.validate();
+ assertNotNull(ve);
+ assertEquals(2, ve.validationErrors().size());
+ assertThat(ve.validationErrors().get(0),
+ containsString("token string must not be provided when realm name or username is specified"));
+ assertThat(ve.validationErrors().get(1),
+ containsString("token type must not be provided when realm name or username is specified"));
+
+ request =
+ new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("", null), randomAlphaOfLength(4), randomAlphaOfLength(8));
+ ve = request.validate();
+ assertNotNull(ve);
+ assertEquals(1, ve.validationErrors().size());
+ assertThat(ve.validationErrors().get(0),
+ containsString("token string must not be provided when realm name or username is specified"));
+
+ request =
+ new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("token", "refresh_token"), randomFrom("", null),
+ randomAlphaOfLength(8));
+ ve = request.validate();
+ assertNotNull(ve);
+ assertEquals(2, ve.validationErrors().size());
+ assertThat(ve.validationErrors().get(0),
+ containsString("token string must not be provided when realm name or username is specified"));
+ assertThat(ve.validationErrors().get(1),
+ containsString("token type must not be provided when realm name or username is specified"));
+
+ request = new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("", null), randomFrom("", null), randomAlphaOfLength(8));
+ ve = request.validate();
+ assertNotNull(ve);
+ assertEquals(1, ve.validationErrors().size());
+ assertThat(ve.validationErrors().get(0),
+ containsString("token string must not be provided when realm name or username is specified"));
+ }
+}
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java
new file mode 100644
index 00000000000..1a59971ff9c
--- /dev/null
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java
@@ -0,0 +1,141 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.core.security.action.token;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.Version;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.VersionUtils;
+import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+
+public class InvalidateTokenResponseTests extends ESTestCase {
+
+ public void testSerialization() throws IOException {
+ TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false)),
+ Arrays.asList(generateRandomStringArray(20, 15, false)),
+ Arrays.asList(new ElasticsearchException("foo", new IllegalArgumentException("this is an error message")),
+ new ElasticsearchException("bar", new IllegalArgumentException("this is an error message2"))),
+ randomIntBetween(0, 5));
+ InvalidateTokenResponse response = new InvalidateTokenResponse(result);
+ try (BytesStreamOutput output = new BytesStreamOutput()) {
+ response.writeTo(output);
+ try (StreamInput input = output.bytes().streamInput()) {
+ InvalidateTokenResponse serialized = new InvalidateTokenResponse();
+ serialized.readFrom(input);
+ assertThat(serialized.getResult().getInvalidatedTokens(), equalTo(response.getResult().getInvalidatedTokens()));
+ assertThat(serialized.getResult().getPreviouslyInvalidatedTokens(),
+ equalTo(response.getResult().getPreviouslyInvalidatedTokens()));
+ assertThat(serialized.getResult().getErrors().size(), equalTo(response.getResult().getErrors().size()));
+ assertThat(serialized.getResult().getErrors().get(0).toString(), containsString("this is an error message"));
+ assertThat(serialized.getResult().getErrors().get(1).toString(), containsString("this is an error message2"));
+ }
+ }
+
+ result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false)),
+ Arrays.asList(generateRandomStringArray(20, 15, false)),
+ Collections.emptyList(), randomIntBetween(0, 5));
+ response = new InvalidateTokenResponse(result);
+ try (BytesStreamOutput output = new BytesStreamOutput()) {
+ response.writeTo(output);
+ try (StreamInput input = output.bytes().streamInput()) {
+ InvalidateTokenResponse serialized = new InvalidateTokenResponse();
+ serialized.readFrom(input);
+ assertThat(serialized.getResult().getInvalidatedTokens(), equalTo(response.getResult().getInvalidatedTokens()));
+ assertThat(serialized.getResult().getPreviouslyInvalidatedTokens(),
+ equalTo(response.getResult().getPreviouslyInvalidatedTokens()));
+ assertThat(serialized.getResult().getErrors().size(), equalTo(response.getResult().getErrors().size()));
+ }
+ }
+ }
+
+ public void testSerializationToPre66Version() throws IOException{
+ final Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_2_0, Version.V_6_5_1);
+ TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)),
+ Arrays.asList(generateRandomStringArray(20, 15, false, false)),
+ Arrays.asList(new ElasticsearchException("foo", new IllegalArgumentException("this is an error message")),
+ new ElasticsearchException("bar", new IllegalArgumentException("this is an error message2"))),
+ randomIntBetween(0, 5));
+ InvalidateTokenResponse response = new InvalidateTokenResponse(result);
+ try (BytesStreamOutput output = new BytesStreamOutput()) {
+ output.setVersion(version);
+ response.writeTo(output);
+ try (StreamInput input = output.bytes().streamInput()) {
+ // False as we have errors and previously invalidated tokens
+ assertThat(input.readBoolean(), equalTo(false));
+ }
+ }
+
+ result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)),
+ Arrays.asList(generateRandomStringArray(20, 15, false, false)),
+ Collections.emptyList(), randomIntBetween(0, 5));
+ response = new InvalidateTokenResponse(result);
+ try (BytesStreamOutput output = new BytesStreamOutput()) {
+ output.setVersion(version);
+ response.writeTo(output);
+ try (StreamInput input = output.bytes().streamInput()) {
+ // False as we have previously invalidated tokens
+ assertThat(input.readBoolean(), equalTo(false));
+ }
+ }
+
+ result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)),
+ Collections.emptyList(), Collections.emptyList(), randomIntBetween(0, 5));
+ response = new InvalidateTokenResponse(result);
+ try (BytesStreamOutput output = new BytesStreamOutput()) {
+ output.setVersion(version);
+ response.writeTo(output);
+ try (StreamInput input = output.bytes().streamInput()) {
+ assertThat(input.readBoolean(), equalTo(true));
+ }
+ }
+ }
+
+ public void testToXContent() throws IOException {
+ List invalidatedTokens = Arrays.asList(generateRandomStringArray(20, 15, false));
+ List previouslyInvalidatedTokens = Arrays.asList(generateRandomStringArray(20, 15, false));
+ TokensInvalidationResult result = new TokensInvalidationResult(invalidatedTokens, previouslyInvalidatedTokens,
+ Arrays.asList(new ElasticsearchException("foo", new IllegalArgumentException("this is an error message")),
+ new ElasticsearchException("bar", new IllegalArgumentException("this is an error message2"))),
+ randomIntBetween(0, 5));
+ InvalidateTokenResponse response = new InvalidateTokenResponse(result);
+ XContentBuilder builder = XContentFactory.jsonBuilder();
+ response.toXContent(builder, ToXContent.EMPTY_PARAMS);
+ assertThat(Strings.toString(builder),
+ equalTo("{\"created\":false," +
+ "\"invalidated_tokens\":" + invalidatedTokens.size() + "," +
+ "\"previously_invalidated_tokens\":" + previouslyInvalidatedTokens.size() + "," +
+ "\"error_count\":2," +
+ "\"error_details\":[" +
+ "{\"type\":\"exception\"," +
+ "\"reason\":\"foo\"," +
+ "\"caused_by\":{" +
+ "\"type\":\"illegal_argument_exception\"," +
+ "\"reason\":\"this is an error message\"}" +
+ "}," +
+ "{\"type\":\"exception\"," +
+ "\"reason\":\"bar\"," +
+ "\"caused_by\":" +
+ "{\"type\":\"illegal_argument_exception\"," +
+ "\"reason\":\"this is an error message2\"}" +
+ "}" +
+ "]" +
+ "}"));
+ }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java
index f0e6bf2c990..8c35df01ed9 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java
@@ -18,6 +18,7 @@ import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionAction;
import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionRequest;
import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionResponse;
+import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult;
import org.elasticsearch.xpack.security.authc.Realms;
import org.elasticsearch.xpack.security.authc.TokenService;
import org.elasticsearch.xpack.security.authc.UserToken;
@@ -27,12 +28,11 @@ import org.elasticsearch.xpack.security.authc.saml.SamlRedirect;
import org.elasticsearch.xpack.security.authc.saml.SamlUtils;
import org.opensaml.saml.saml2.core.LogoutResponse;
-import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
-import java.util.stream.Collectors;
+import java.util.function.Predicate;
import static org.elasticsearch.xpack.security.authc.saml.SamlRealm.findSamlRealms;
@@ -85,7 +85,7 @@ public final class TransportSamlInvalidateSessionAction
private void findAndInvalidateTokens(SamlRealm realm, SamlLogoutRequestHandler.Result result, ActionListener listener) {
final Map tokenMetadata = realm.createTokenMetadata(result.getNameId(), result.getSession());
- if (Strings.hasText((String) tokenMetadata.get(SamlRealm.TOKEN_METADATA_NAMEID_VALUE)) == false) {
+ if (Strings.isNullOrEmpty((String) tokenMetadata.get(SamlRealm.TOKEN_METADATA_NAMEID_VALUE))) {
// If we don't have a valid name-id to match against, don't do anything
logger.debug("Logout request [{}] has no NameID value, so cannot invalidate any sessions", result);
listener.onResponse(0);
@@ -93,22 +93,21 @@ public final class TransportSamlInvalidateSessionAction
}
tokenService.findActiveTokensForRealm(realm.name(), ActionListener.wrap(tokens -> {
- List> sessionTokens = filterTokens(tokens, tokenMetadata);
- logger.debug("Found [{}] token pairs to invalidate for SAML metadata [{}]", sessionTokens.size(), tokenMetadata);
- if (sessionTokens.isEmpty()) {
- listener.onResponse(0);
- } else {
- GroupedActionListener groupedListener = new GroupedActionListener<>(
- ActionListener.wrap(collection -> listener.onResponse(collection.size()), listener::onFailure),
- sessionTokens.size(), Collections.emptyList()
- );
- sessionTokens.forEach(tuple -> invalidateTokenPair(tuple, groupedListener));
- }
- }, e -> listener.onFailure(e)
- ));
+ logger.debug("Found [{}] token pairs to invalidate for SAML metadata [{}]", tokens.size(), tokenMetadata);
+ if (tokens.isEmpty()) {
+ listener.onResponse(0);
+ } else {
+ GroupedActionListener groupedListener = new GroupedActionListener<>(
+ ActionListener.wrap(collection -> listener.onResponse(collection.size()), listener::onFailure),
+ tokens.size(), Collections.emptyList()
+ );
+ tokens.forEach(tuple -> invalidateTokenPair(tuple, groupedListener));
+ }
+ }, listener::onFailure
+ ), containsMetadata(tokenMetadata));
}
- private void invalidateTokenPair(Tuple tokenPair, ActionListener listener) {
+ private void invalidateTokenPair(Tuple tokenPair, ActionListener listener) {
// Invalidate the refresh token first, so the client doesn't trigger a refresh once the access token is invalidated
tokenService.invalidateRefreshToken(tokenPair.v2(), ActionListener.wrap(ignore -> tokenService.invalidateAccessToken(
tokenPair.v1(),
@@ -118,13 +117,12 @@ public final class TransportSamlInvalidateSessionAction
})), listener::onFailure));
}
- private List> filterTokens(Collection> tokens, Map requiredMetadata) {
- return tokens.stream()
- .filter(tup -> {
- Map actualMetadata = tup.v1().getMetadata();
- return requiredMetadata.entrySet().stream().allMatch(e -> Objects.equals(actualMetadata.get(e.getKey()), e.getValue()));
- })
- .collect(Collectors.toList());
+
+ private Predicate