Merge branch 'master' into close-index-api-refactoring
This commit is contained in:
commit
0a0c969517
|
@ -64,17 +64,11 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPool.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]aliases[/\\]IndexAliasesIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]deps[/\\]joda[/\\]SimpleJodaTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]EnvironmentTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]NodeEnvironmentTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]explain[/\\]ExplainActionIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]get[/\\]GetActionIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indexing[/\\]IndexActionIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]JvmGcMonitorServiceSettingsTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsServiceTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]recovery[/\\]FullRollingRestartIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]recovery[/\\]RecoveryWhileUnderLoadIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]recovery[/\\]RelocationIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]recovery[/\\]TruncatedRecoveryIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]BytesRestResponseTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]AliasRoutingIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]SimpleRoutingIT.java" checks="LineLength" />
|
||||
|
|
|
@ -268,8 +268,14 @@ final class RequestConverters {
|
|||
}
|
||||
|
||||
static Request sourceExists(GetRequest getRequest) {
|
||||
Request request = new Request(HttpHead.METHOD_NAME, endpoint(getRequest.index(), getRequest.type(), getRequest.id(), "_source"));
|
||||
|
||||
String optionalType = getRequest.type();
|
||||
String endpoint;
|
||||
if (optionalType.equals(MapperService.SINGLE_MAPPING_NAME)) {
|
||||
endpoint = endpoint(getRequest.index(), "_source", getRequest.id());
|
||||
} else {
|
||||
endpoint = endpoint(getRequest.index(), optionalType, getRequest.id(), "_source");
|
||||
}
|
||||
Request request = new Request(HttpHead.METHOD_NAME, endpoint);
|
||||
Params parameters = new Params(request);
|
||||
parameters.withPreference(getRequest.preference());
|
||||
parameters.withRouting(getRequest.routing());
|
||||
|
|
|
@ -39,6 +39,10 @@ public final class AutoFollowStats {
|
|||
static final ParseField RECENT_AUTO_FOLLOW_ERRORS = new ParseField("recent_auto_follow_errors");
|
||||
static final ParseField LEADER_INDEX = new ParseField("leader_index");
|
||||
static final ParseField AUTO_FOLLOW_EXCEPTION = new ParseField("auto_follow_exception");
|
||||
static final ParseField AUTO_FOLLOWED_CLUSTERS = new ParseField("auto_followed_clusters");
|
||||
static final ParseField CLUSTER_NAME = new ParseField("cluster_name");
|
||||
static final ParseField TIME_SINCE_LAST_CHECK_MILLIS = new ParseField("time_since_last_check_millis");
|
||||
static final ParseField LAST_SEEN_METADATA_VERSION = new ParseField("last_seen_metadata_version");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final ConstructingObjectParser<AutoFollowStats, Void> STATS_PARSER = new ConstructingObjectParser<>("auto_follow_stats",
|
||||
|
@ -48,6 +52,10 @@ public final class AutoFollowStats {
|
|||
(Long) args[2],
|
||||
new TreeMap<>(
|
||||
((List<Map.Entry<String, ElasticsearchException>>) args[3])
|
||||
.stream()
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))),
|
||||
new TreeMap<>(
|
||||
((List<Map.Entry<String, AutoFollowedCluster>>) args[4])
|
||||
.stream()
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))
|
||||
));
|
||||
|
@ -57,6 +65,11 @@ public final class AutoFollowStats {
|
|||
"auto_follow_stats_errors",
|
||||
args -> new AbstractMap.SimpleEntry<>((String) args[0], (ElasticsearchException) args[1]));
|
||||
|
||||
private static final ConstructingObjectParser<Map.Entry<String, AutoFollowedCluster>, Void> AUTO_FOLLOWED_CLUSTERS_PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"auto_followed_clusters",
|
||||
args -> new AbstractMap.SimpleEntry<>((String) args[0], new AutoFollowedCluster((Long) args[1], (Long) args[2])));
|
||||
|
||||
static {
|
||||
AUTO_FOLLOW_EXCEPTIONS_PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_INDEX);
|
||||
AUTO_FOLLOW_EXCEPTIONS_PARSER.declareObject(
|
||||
|
@ -64,26 +77,35 @@ public final class AutoFollowStats {
|
|||
(p, c) -> ElasticsearchException.fromXContent(p),
|
||||
AUTO_FOLLOW_EXCEPTION);
|
||||
|
||||
AUTO_FOLLOWED_CLUSTERS_PARSER.declareString(ConstructingObjectParser.constructorArg(), CLUSTER_NAME);
|
||||
AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_SINCE_LAST_CHECK_MILLIS);
|
||||
AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_SEEN_METADATA_VERSION);
|
||||
|
||||
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED);
|
||||
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS);
|
||||
STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED);
|
||||
STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOW_EXCEPTIONS_PARSER,
|
||||
RECENT_AUTO_FOLLOW_ERRORS);
|
||||
STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOWED_CLUSTERS_PARSER,
|
||||
AUTO_FOLLOWED_CLUSTERS);
|
||||
}
|
||||
|
||||
private final long numberOfFailedFollowIndices;
|
||||
private final long numberOfFailedRemoteClusterStateRequests;
|
||||
private final long numberOfSuccessfulFollowIndices;
|
||||
private final NavigableMap<String, ElasticsearchException> recentAutoFollowErrors;
|
||||
private final NavigableMap<String, AutoFollowedCluster> autoFollowedClusters;
|
||||
|
||||
AutoFollowStats(long numberOfFailedFollowIndices,
|
||||
long numberOfFailedRemoteClusterStateRequests,
|
||||
long numberOfSuccessfulFollowIndices,
|
||||
NavigableMap<String, ElasticsearchException> recentAutoFollowErrors) {
|
||||
NavigableMap<String, ElasticsearchException> recentAutoFollowErrors,
|
||||
NavigableMap<String, AutoFollowedCluster> autoFollowedClusters) {
|
||||
this.numberOfFailedFollowIndices = numberOfFailedFollowIndices;
|
||||
this.numberOfFailedRemoteClusterStateRequests = numberOfFailedRemoteClusterStateRequests;
|
||||
this.numberOfSuccessfulFollowIndices = numberOfSuccessfulFollowIndices;
|
||||
this.recentAutoFollowErrors = recentAutoFollowErrors;
|
||||
this.autoFollowedClusters = autoFollowedClusters;
|
||||
}
|
||||
|
||||
public long getNumberOfFailedFollowIndices() {
|
||||
|
@ -102,4 +124,27 @@ public final class AutoFollowStats {
|
|||
return recentAutoFollowErrors;
|
||||
}
|
||||
|
||||
public NavigableMap<String, AutoFollowedCluster> getAutoFollowedClusters() {
|
||||
return autoFollowedClusters;
|
||||
}
|
||||
|
||||
public static class AutoFollowedCluster {
|
||||
|
||||
private final long timeSinceLastCheckMillis;
|
||||
private final long lastSeenMetadataVersion;
|
||||
|
||||
public AutoFollowedCluster(long timeSinceLastCheckMillis, long lastSeenMetadataVersion) {
|
||||
this.timeSinceLastCheckMillis = timeSinceLastCheckMillis;
|
||||
this.lastSeenMetadataVersion = lastSeenMetadataVersion;
|
||||
}
|
||||
|
||||
public long getTimeSinceLastCheckMillis() {
|
||||
return timeSinceLastCheckMillis;
|
||||
}
|
||||
|
||||
public long getLastSeenMetadataVersion() {
|
||||
return lastSeenMetadataVersion;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -73,6 +73,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.index.rankeval.PrecisionAtK;
|
||||
|
@ -115,6 +116,7 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
|
@ -156,6 +158,58 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
getAndExistsWithTypeTest(RequestConverters::get, HttpGet.METHOD_NAME);
|
||||
}
|
||||
|
||||
public void testSourceExists() throws IOException {
|
||||
doTestSourceExists((index, id) -> new GetRequest(index, id));
|
||||
}
|
||||
|
||||
public void testSourceExistsWithType() throws IOException {
|
||||
String type = frequently() ? randomAlphaOfLengthBetween(3, 10) : MapperService.SINGLE_MAPPING_NAME;
|
||||
doTestSourceExists((index, id) -> new GetRequest(index, type, id));
|
||||
}
|
||||
|
||||
private static void doTestSourceExists(BiFunction<String, String, GetRequest> requestFunction) throws IOException {
|
||||
String index = randomAlphaOfLengthBetween(3, 10);
|
||||
String id = randomAlphaOfLengthBetween(3, 10);
|
||||
final GetRequest getRequest = requestFunction.apply(index, id);
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
if (randomBoolean()) {
|
||||
String preference = randomAlphaOfLengthBetween(3, 10);
|
||||
getRequest.preference(preference);
|
||||
expectedParams.put("preference", preference);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String routing = randomAlphaOfLengthBetween(3, 10);
|
||||
getRequest.routing(routing);
|
||||
expectedParams.put("routing", routing);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
boolean realtime = randomBoolean();
|
||||
getRequest.realtime(realtime);
|
||||
if (realtime == false) {
|
||||
expectedParams.put("realtime", "false");
|
||||
}
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
boolean refresh = randomBoolean();
|
||||
getRequest.refresh(refresh);
|
||||
if (refresh) {
|
||||
expectedParams.put("refresh", "true");
|
||||
}
|
||||
}
|
||||
Request request = RequestConverters.sourceExists(getRequest);
|
||||
assertEquals(HttpHead.METHOD_NAME, request.getMethod());
|
||||
String type = getRequest.type();
|
||||
if (type.equals(MapperService.SINGLE_MAPPING_NAME)) {
|
||||
assertEquals("/" + index + "/_source/" + id, request.getEndpoint());
|
||||
} else {
|
||||
assertEquals("/" + index + "/" + type + "/" + id + "/_source", request.getEndpoint());
|
||||
}
|
||||
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testMultiGet() throws IOException {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
MultiGetRequest multiGetRequest = new MultiGetRequest();
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.client.ccr;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.client.ccr.AutoFollowStats.AutoFollowedCluster;
|
||||
import org.elasticsearch.client.ccr.IndicesFollowStats.ShardFollowStats;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
|
@ -185,6 +186,19 @@ public class CcrStatsResponseTests extends ESTestCase {
|
|||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
builder.startArray(AutoFollowStats.AUTO_FOLLOWED_CLUSTERS.getPreferredName());
|
||||
for (Map.Entry<String, AutoFollowedCluster> entry : autoFollowStats.getAutoFollowedClusters().entrySet()) {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(AutoFollowStats.CLUSTER_NAME.getPreferredName(), entry.getKey());
|
||||
builder.field(AutoFollowStats.TIME_SINCE_LAST_CHECK_MILLIS.getPreferredName(),
|
||||
entry.getValue().getTimeSinceLastCheckMillis());
|
||||
builder.field(AutoFollowStats.LAST_SEEN_METADATA_VERSION.getPreferredName(),
|
||||
entry.getValue().getLastSeenMetadataVersion());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
|
@ -315,11 +329,16 @@ public class CcrStatsResponseTests extends ESTestCase {
|
|||
for (int i = 0; i < count; i++) {
|
||||
readExceptions.put("" + i, new ElasticsearchException(new IllegalStateException("index [" + i + "]")));
|
||||
}
|
||||
final NavigableMap<String, AutoFollowedCluster> autoFollowClusters = new TreeMap<>();
|
||||
for (int i = 0; i < count; i++) {
|
||||
autoFollowClusters.put("" + i, new AutoFollowedCluster(randomLong(), randomNonNegativeLong()));
|
||||
}
|
||||
return new AutoFollowStats(
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
readExceptions
|
||||
readExceptions,
|
||||
autoFollowClusters
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1265,7 +1265,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals(3, getResponse.getSourceAsMap().size());
|
||||
//tag::get-response
|
||||
String index = getResponse.getIndex();
|
||||
String type = getResponse.getType();
|
||||
String id = getResponse.getId();
|
||||
if (getResponse.isExists()) {
|
||||
long version = getResponse.getVersion();
|
||||
|
|
|
@ -1317,6 +1317,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/36362")
|
||||
public void testInvalidateToken() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -36,4 +36,4 @@ The returned +{response}+ contains a single property:
|
|||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
--------------------------------------------------
|
|
@ -63,6 +63,8 @@ Returns this:
|
|||
"_type": "_doc",
|
||||
"_id": "my_id",
|
||||
"_version": 1,
|
||||
"_seq_no": 22,
|
||||
"_primary_term": 1,
|
||||
"_source": {
|
||||
"data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
|
||||
"attachment": {
|
||||
|
@ -74,7 +76,7 @@ Returns this:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
|
||||
To specify only some fields to be extracted:
|
||||
|
@ -146,6 +148,8 @@ Returns this:
|
|||
"_type": "_doc",
|
||||
"_id": "my_id",
|
||||
"_version": 1,
|
||||
"_seq_no": 35,
|
||||
"_primary_term": 1,
|
||||
"_source": {
|
||||
"data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
|
||||
"attachment": {
|
||||
|
@ -157,7 +161,7 @@ Returns this:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
|
||||
[source,js]
|
||||
|
@ -194,6 +198,8 @@ Returns this:
|
|||
"_type": "_doc",
|
||||
"_id": "my_id_2",
|
||||
"_version": 1,
|
||||
"_seq_no": 40,
|
||||
"_primary_term": 1,
|
||||
"_source": {
|
||||
"data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
|
||||
"max_size": 5,
|
||||
|
@ -206,7 +212,7 @@ Returns this:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
|
||||
[[ingest-attachment-with-arrays]]
|
||||
|
@ -285,6 +291,8 @@ Returns this:
|
|||
"_type" : "_doc",
|
||||
"_id" : "my_id",
|
||||
"_version" : 1,
|
||||
"_seq_no" : 50,
|
||||
"_primary_term" : 1,
|
||||
"found" : true,
|
||||
"_source" : {
|
||||
"attachments" : [
|
||||
|
@ -312,7 +320,7 @@ Returns this:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
|
||||
Note that the `target_field` needs to be set, otherwise the
|
||||
|
|
|
@ -75,6 +75,8 @@ Which returns:
|
|||
"_type": "_doc",
|
||||
"_id": "my_id",
|
||||
"_version": 1,
|
||||
"_seq_no": 55,
|
||||
"_primary_term": 1,
|
||||
"_source": {
|
||||
"ip": "8.8.8.8",
|
||||
"geoip": {
|
||||
|
@ -85,7 +87,7 @@ Which returns:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term":1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
Here is an example that uses the default country database and adds the
|
||||
geographical information to the `geo` field based on the `ip` field`. Note that
|
||||
|
@ -124,6 +126,8 @@ returns this:
|
|||
"_type": "_doc",
|
||||
"_id": "my_id",
|
||||
"_version": 1,
|
||||
"_seq_no": 65,
|
||||
"_primary_term": 1,
|
||||
"_source": {
|
||||
"ip": "8.8.8.8",
|
||||
"geo": {
|
||||
|
@ -133,7 +137,7 @@ returns this:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
|
||||
Not all IP addresses find geo information from the database, When this
|
||||
|
@ -174,13 +178,15 @@ Which returns:
|
|||
"_type" : "_doc",
|
||||
"_id" : "my_id",
|
||||
"_version" : 1,
|
||||
"_seq_no" : 71,
|
||||
"_primary_term": 1,
|
||||
"found" : true,
|
||||
"_source" : {
|
||||
"ip" : "80.231.5.0"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
[[ingest-geoip-mappings-note]]
|
||||
===== Recognizing Location as a Geopoint
|
||||
|
|
|
@ -57,6 +57,8 @@ Which returns
|
|||
"_type": "_doc",
|
||||
"_id": "my_id",
|
||||
"_version": 1,
|
||||
"_seq_no": 22,
|
||||
"_primary_term": 1,
|
||||
"_source": {
|
||||
"agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",
|
||||
"user_agent": {
|
||||
|
@ -73,7 +75,7 @@ Which returns
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term": 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
===== Using a custom regex file
|
||||
To use a custom regex file for parsing the user agents, that file has to be put into the `config/ingest-user-agent` directory and
|
||||
|
|
|
@ -105,7 +105,8 @@ The API returns the following results:
|
|||
"number_of_failed_follow_indices" : 0,
|
||||
"number_of_failed_remote_cluster_state_requests" : 0,
|
||||
"number_of_successful_follow_indices" : 1,
|
||||
"recent_auto_follow_errors" : []
|
||||
"recent_auto_follow_errors" : [],
|
||||
"auto_followed_clusters" : []
|
||||
},
|
||||
"follow_stats" : {
|
||||
"indices" : [
|
||||
|
@ -151,6 +152,7 @@ The API returns the following results:
|
|||
// TESTRESPONSE[s/"number_of_failed_remote_cluster_state_requests" : 0/"number_of_failed_remote_cluster_state_requests" : $body.auto_follow_stats.number_of_failed_remote_cluster_state_requests/]
|
||||
// TESTRESPONSE[s/"number_of_successful_follow_indices" : 1/"number_of_successful_follow_indices" : $body.auto_follow_stats.number_of_successful_follow_indices/]
|
||||
// TESTRESPONSE[s/"recent_auto_follow_errors" : \[\]/"recent_auto_follow_errors" : $body.auto_follow_stats.recent_auto_follow_errors/]
|
||||
// TESTRESPONSE[s/"auto_followed_clusters" : \[\]/"auto_followed_clusters" : $body.auto_follow_stats.auto_followed_clusters/]
|
||||
// TESTRESPONSE[s/"leader_global_checkpoint" : 1024/"leader_global_checkpoint" : $body.follow_stats.indices.0.shards.0.leader_global_checkpoint/]
|
||||
// TESTRESPONSE[s/"leader_max_seq_no" : 1536/"leader_max_seq_no" : $body.follow_stats.indices.0.shards.0.leader_max_seq_no/]
|
||||
// TESTRESPONSE[s/"follower_global_checkpoint" : 768/"follower_global_checkpoint" : $body.follow_stats.indices.0.shards.0.follower_global_checkpoint/]
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
[[docs-get]]
|
||||
== Get API
|
||||
|
||||
The get API allows to get a typed JSON document from the index based on
|
||||
The get API allows to get a JSON document from the index based on
|
||||
its id. The following example gets a JSON document from an index called
|
||||
twitter, under a type called `_doc`, with id valued 0:
|
||||
twitter with id valued 0:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -21,6 +21,8 @@ The result of the above get operation is:
|
|||
"_type" : "_doc",
|
||||
"_id" : "0",
|
||||
"_version" : 1,
|
||||
"_seq_no" : 10,
|
||||
"_primary_term" : 1,
|
||||
"found": true,
|
||||
"_source" : {
|
||||
"user" : "kimchy",
|
||||
|
@ -30,9 +32,9 @@ The result of the above get operation is:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
The above result includes the `_index`, `_type`, `_id` and `_version`
|
||||
The above result includes the `_index`, `_id` and `_version`
|
||||
of the document we wish to retrieve, including the actual `_source`
|
||||
of the document if it could be found (as indicated by the `found`
|
||||
field in the response).
|
||||
|
@ -156,6 +158,8 @@ The result of the above get operation is:
|
|||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_version": 1,
|
||||
"_seq_no" : 22,
|
||||
"_primary_term" : 1,
|
||||
"found": true,
|
||||
"fields": {
|
||||
"tags": [
|
||||
|
@ -164,7 +168,7 @@ The result of the above get operation is:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
|
||||
Field values fetched from the document itself are always returned as an array.
|
||||
|
@ -199,6 +203,8 @@ The result of the above get operation is:
|
|||
"_type": "_doc",
|
||||
"_id": "2",
|
||||
"_version": 1,
|
||||
"_seq_no" : 13,
|
||||
"_primary_term" : 1,
|
||||
"_routing": "user1",
|
||||
"found": true,
|
||||
"fields": {
|
||||
|
@ -208,7 +214,7 @@ The result of the above get operation is:
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
Also only leaf fields can be returned via the `stored_field` option. So object fields can't be returned and such requests
|
||||
will fail.
|
||||
|
@ -217,13 +223,13 @@ will fail.
|
|||
[[_source]]
|
||||
=== Getting the +_source+ directly
|
||||
|
||||
Use the `/{index}/{type}/{id}/_source` endpoint to get
|
||||
Use the `/{index}/_source/{id}` endpoint to get
|
||||
just the `_source` field of the document,
|
||||
without any additional content around it. For example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET twitter/_doc/1/_source
|
||||
GET twitter/_source/1
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
@ -232,7 +238,7 @@ You can also use the same source filtering parameters to control which parts of
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET twitter/_doc/1/_source?_source_includes=*.id&_source_excludes=entities'
|
||||
GET twitter/_source/1/?_source_includes=*.id&_source_excludes=entities'
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
@ -242,7 +248,7 @@ An existing document will not have a _source if it is disabled in the <<mapping-
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
HEAD twitter/_doc/1/_source
|
||||
HEAD twitter/_source/1
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
|
|
@ -870,13 +870,15 @@ which will return:
|
|||
"_index": "test2",
|
||||
"_type": "_doc",
|
||||
"_version": 1,
|
||||
"_seq_no": 44,
|
||||
"_primary_term": 1,
|
||||
"_source": {
|
||||
"text": "words words",
|
||||
"tag": "foo"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term": 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
[float]
|
||||
[[docs-reindex-slice]]
|
||||
|
|
|
@ -421,7 +421,7 @@ And the response:
|
|||
"_primary_term" : 1
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"_seq_no" : 0/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
From the above, we can see that a new customer document was successfully created inside the customer index. The document also has an internal id of 1 which we specified at index time.
|
||||
|
||||
|
@ -445,11 +445,13 @@ And the response:
|
|||
"_type" : "_doc",
|
||||
"_id" : "1",
|
||||
"_version" : 1,
|
||||
"_seq_no" : 25,
|
||||
"_primary_term" : 1,
|
||||
"found" : true,
|
||||
"_source" : { "name": "John Doe" }
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
Nothing out of the ordinary here other than a field, `found`, stating that we found a document with the requested ID 1 and another field, `_source`, which returns the full JSON document that we indexed from the previous step.
|
||||
|
||||
|
|
|
@ -730,13 +730,15 @@ GET test/_doc/2
|
|||
"_type": "_doc",
|
||||
"_id": "2",
|
||||
"_version": 1,
|
||||
"_seq_no": 22,
|
||||
"_primary_term": 1,
|
||||
"found": true,
|
||||
"_source": {
|
||||
"foo": "bar"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term": 1/"_primary_term" : $body._primary_term/]
|
||||
////
|
||||
|
||||
The source document can also use dot delimited fields to represent nested fields.
|
||||
|
@ -967,6 +969,8 @@ GET test/_doc/2
|
|||
"_type": "_doc",
|
||||
"_id": "2",
|
||||
"_version": 1,
|
||||
"_seq_no": 34,
|
||||
"_primary_term": 1,
|
||||
"found": true,
|
||||
"_source": {
|
||||
"tags": [
|
||||
|
@ -976,7 +980,7 @@ GET test/_doc/2
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
////
|
||||
|
||||
|
||||
|
@ -1088,6 +1092,8 @@ GET test/_doc/1
|
|||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_version": 1,
|
||||
"_seq_no": 60,
|
||||
"_primary_term": 1,
|
||||
"found": true,
|
||||
"_source": {
|
||||
"href": {
|
||||
|
@ -1097,7 +1103,7 @@ GET test/_doc/1
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
|
||||
Regular expressions can be expensive and should be avoided if viable
|
||||
|
@ -1548,11 +1554,11 @@ PUT /myindex/_doc/1?pipeline=monthlyindex
|
|||
"successful" : 1,
|
||||
"failed" : 0
|
||||
},
|
||||
"_seq_no" : 0,
|
||||
"_seq_no" : 55,
|
||||
"_primary_term" : 1
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
|
||||
The above request will not index this document into the `myindex` index, but into the `myindex-2016-04-01` index because
|
||||
|
@ -2787,11 +2793,11 @@ Response from the index request:
|
|||
"successful": 1,
|
||||
"failed": 0
|
||||
},
|
||||
"_seq_no": 0,
|
||||
"_seq_no": 66,
|
||||
"_primary_term": 1,
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
Indexed document:
|
||||
|
||||
|
@ -2963,11 +2969,11 @@ The response from the above index request:
|
|||
"successful": 1,
|
||||
"failed": 0
|
||||
},
|
||||
"_seq_no": 0,
|
||||
"_seq_no": 89,
|
||||
"_primary_term": 1,
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
In the above response, you can see that our document was actually indexed into `my_index` instead of
|
||||
`any_index`. This type of manipulation is often convenient in pipelines that have various branches of transformation,
|
||||
|
|
|
@ -101,11 +101,13 @@ the `xpack.monitoring.collection.interval` setting 10 seconds. See
|
|||
+
|
||||
--
|
||||
By default, the data is stored on the same cluster by using a
|
||||
<<local-exporter,`local` exporter>>.
|
||||
|
||||
Alternatively, you can use an <<http-exporter,`http` exporter>> to send data to
|
||||
<<local-exporter,`local` exporter>>. Alternatively, you can use an <<http-exporter,`http` exporter>> to send data to
|
||||
a separate _monitoring cluster_.
|
||||
|
||||
IMPORTANT: The {es} {monitor-features} use ingest pipelines, therefore the
|
||||
cluster that stores the monitoring data must have at least one
|
||||
<<ingest,ingest node>>.
|
||||
|
||||
For more information about typical monitoring architectures,
|
||||
see {stack-ov}/how-monitoring-works.html[How Monitoring Works].
|
||||
--
|
||||
|
|
|
@ -164,6 +164,10 @@ output.elasticsearch:
|
|||
<1> In this example, the data is stored on a monitoring cluster with nodes
|
||||
`es-mon-1` and `es-mon-2`.
|
||||
|
||||
IMPORTANT: The {es} {monitor-features} use ingest pipelines, therefore the
|
||||
cluster that stores the monitoring data must have at least one
|
||||
<<ingest,ingest node>>.
|
||||
|
||||
For more information about these configuration options, see
|
||||
{metricbeat-ref}/elasticsearch-output.html[Configure the {es} output].
|
||||
--
|
||||
|
|
|
@ -23,7 +23,7 @@ teardown:
|
|||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
# default pipeline via index
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
|
@ -48,7 +48,7 @@ teardown:
|
|||
id: 1
|
||||
- match: { _source.bytes_source_field: "1kb" }
|
||||
- match: { _source.bytes_target_field: 1024 }
|
||||
|
||||
# default pipeline via alias
|
||||
- do:
|
||||
index:
|
||||
index: test_alias
|
||||
|
@ -63,12 +63,101 @@ teardown:
|
|||
id: 2
|
||||
- match: { _source.bytes_source_field: "1kb" }
|
||||
- match: { _source.bytes_target_field: 1024 }
|
||||
# default pipeline via upsert
|
||||
- do:
|
||||
update:
|
||||
index: test
|
||||
type: test
|
||||
id: 3
|
||||
body:
|
||||
script:
|
||||
source: "ctx._source.ran_script = true"
|
||||
lang: "painless"
|
||||
upsert: { "bytes_source_field":"1kb" }
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 3
|
||||
- match: { _source.bytes_source_field: "1kb" }
|
||||
- match: { _source.bytes_target_field: 1024 }
|
||||
# default pipeline via scripted upsert
|
||||
- do:
|
||||
update:
|
||||
index: test
|
||||
type: test
|
||||
id: 4
|
||||
body:
|
||||
script:
|
||||
source: "ctx._source.bytes_source_field = '1kb'"
|
||||
lang: "painless"
|
||||
upsert : {}
|
||||
scripted_upsert: true
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 4
|
||||
- match: { _source.bytes_source_field: "1kb" }
|
||||
- match: { _source.bytes_target_field: 1024 }
|
||||
# default pipeline via doc_as_upsert
|
||||
- do:
|
||||
update:
|
||||
index: test
|
||||
type: test
|
||||
id: 5
|
||||
body:
|
||||
doc: { "bytes_source_field":"1kb" }
|
||||
doc_as_upsert: true
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 5
|
||||
- match: { _source.bytes_source_field: "1kb" }
|
||||
- match: { _source.bytes_target_field: 1024 }
|
||||
# default pipeline via bulk upsert
|
||||
# note - bulk scripted upsert's execute the pipeline before the script, so any data referenced by the pipeline
|
||||
# needs to be in the upsert, not the script
|
||||
- do:
|
||||
bulk:
|
||||
refresh: true
|
||||
body: |
|
||||
{"update":{"_id":"6","_index":"test","_type":"test"}}
|
||||
{"script":"ctx._source.ran_script = true","upsert":{"bytes_source_field":"1kb"}}
|
||||
{"update":{"_id":"7","_index":"test","_type":"test"}}
|
||||
{"doc":{"bytes_source_field":"2kb"}, "doc_as_upsert":true}
|
||||
{"update":{"_id":"8","_index":"test","_type":"test"}}
|
||||
{"script": "ctx._source.ran_script = true","upsert":{"bytes_source_field":"3kb"}, "scripted_upsert" : true}
|
||||
|
||||
- do:
|
||||
mget:
|
||||
body:
|
||||
docs:
|
||||
- { _index: "test", _type: "_doc", _id: "6" }
|
||||
- { _index: "test", _type: "_doc", _id: "7" }
|
||||
- { _index: "test", _type: "_doc", _id: "8" }
|
||||
- match: { docs.0._index: "test" }
|
||||
- match: { docs.0._id: "6" }
|
||||
- match: { docs.0._source.bytes_source_field: "1kb" }
|
||||
- match: { docs.0._source.bytes_target_field: 1024 }
|
||||
- is_false: docs.0._source.ran_script
|
||||
- match: { docs.1._index: "test" }
|
||||
- match: { docs.1._id: "7" }
|
||||
- match: { docs.1._source.bytes_source_field: "2kb" }
|
||||
- match: { docs.1._source.bytes_target_field: 2048 }
|
||||
- match: { docs.2._index: "test" }
|
||||
- match: { docs.2._id: "8" }
|
||||
- match: { docs.2._source.bytes_source_field: "3kb" }
|
||||
- match: { docs.2._source.bytes_target_field: 3072 }
|
||||
- match: { docs.2._source.ran_script: true }
|
||||
|
||||
# explicit no default pipeline
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 3
|
||||
id: 9
|
||||
pipeline: "_none"
|
||||
body: {bytes_source_field: "1kb"}
|
||||
|
||||
|
@ -76,15 +165,15 @@ teardown:
|
|||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 3
|
||||
id: 9
|
||||
- match: { _source.bytes_source_field: "1kb" }
|
||||
- is_false: _source.bytes_target_field
|
||||
|
||||
# bad request
|
||||
- do:
|
||||
catch: bad_request
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 4
|
||||
id: 10
|
||||
pipeline: ""
|
||||
body: {bytes_source_field: "1kb"}
|
||||
|
|
|
@ -57,3 +57,44 @@ teardown:
|
|||
type: test
|
||||
id: 2
|
||||
- match: { _source.foo: "blub" }
|
||||
|
||||
---
|
||||
"Test Drop Processor On Failure":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline_with_failure"
|
||||
body: >
|
||||
{
|
||||
"description" : "pipeline with on failure drop",
|
||||
"processors": [
|
||||
{
|
||||
"fail": {
|
||||
"message": "failed",
|
||||
"on_failure": [
|
||||
{
|
||||
"drop": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 3
|
||||
pipeline: "my_pipeline_with_failure"
|
||||
body: {
|
||||
foo: "bar"
|
||||
}
|
||||
|
||||
- do:
|
||||
catch: missing
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 3
|
||||
- match: { found: false }
|
||||
|
|
|
@ -73,38 +73,6 @@ public final class AnalyzerCaster {
|
|||
} else if (expected == Double.class) {
|
||||
return PainlessCast.originalTypetoTargetType(def.class, Double.class, explicit);
|
||||
}
|
||||
} else if (actual == Object.class) {
|
||||
if (expected == byte.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Object.class, Byte.class, true, byte.class);
|
||||
} else if (expected == short.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Object.class, Short.class, true, short.class);
|
||||
} else if (expected == char.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Object.class, Character.class, true, char.class);
|
||||
} else if (expected == int.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Object.class, Integer.class, true, int.class);
|
||||
} else if (expected == long.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Object.class, Long.class, true, long.class);
|
||||
} else if (expected == float.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Object.class, Float.class, true, float.class);
|
||||
} else if (expected == double.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Object.class, Double.class, true, double.class);
|
||||
}
|
||||
} else if (actual == Number.class) {
|
||||
if (expected == byte.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Number.class, Byte.class, true, byte.class);
|
||||
} else if (expected == short.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Number.class, Short.class, true, short.class);
|
||||
} else if (expected == char.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Number.class, Character.class, true, char.class);
|
||||
} else if (expected == int.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Number.class, Integer.class, true, int.class);
|
||||
} else if (expected == long.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Number.class, Long.class, true, long.class);
|
||||
} else if (expected == float.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Number.class, Float.class, true, float.class);
|
||||
} else if (expected == double.class && explicit && internal) {
|
||||
return PainlessCast.unboxTargetType(Number.class, Double.class, true, double.class);
|
||||
}
|
||||
} else if (actual == String.class) {
|
||||
if (expected == char.class && explicit) {
|
||||
return PainlessCast.originalTypetoTargetType(String.class, char.class, true);
|
||||
|
@ -140,8 +108,6 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.boxTargetType(byte.class, byte.class, explicit, byte.class);
|
||||
} else if (expected == Short.class && internal) {
|
||||
return PainlessCast.boxTargetType(byte.class, short.class, explicit, short.class);
|
||||
} else if (expected == Character.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(byte.class, char.class, true, char.class);
|
||||
} else if (expected == Integer.class && internal) {
|
||||
return PainlessCast.boxTargetType(byte.class, int.class, explicit, int.class);
|
||||
} else if (expected == Long.class && internal) {
|
||||
|
@ -170,12 +136,8 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.originalTypetoTargetType(short.class, float.class, explicit);
|
||||
} else if (expected == double.class) {
|
||||
return PainlessCast.originalTypetoTargetType(short.class, double.class, explicit);
|
||||
} else if (expected == Byte.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(short.class, byte.class, true, byte.class);
|
||||
} else if (expected == Short.class && internal) {
|
||||
return PainlessCast.boxTargetType(short.class, short.class, explicit, short.class);
|
||||
} else if (expected == Character.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(short.class, char.class, true, char.class);
|
||||
} else if (expected == Integer.class && internal) {
|
||||
return PainlessCast.boxTargetType(short.class, int.class, explicit, int.class);
|
||||
} else if (expected == Long.class && internal) {
|
||||
|
@ -206,10 +168,6 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.originalTypetoTargetType(char.class, float.class, explicit);
|
||||
} else if (expected == double.class) {
|
||||
return PainlessCast.originalTypetoTargetType(char.class, double.class, explicit);
|
||||
} else if (expected == Byte.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(char.class, byte.class, true, byte.class);
|
||||
} else if (expected == Short.class && internal) {
|
||||
return PainlessCast.boxTargetType(char.class, short.class, explicit, short.class);
|
||||
} else if (expected == Character.class && internal) {
|
||||
return PainlessCast.boxTargetType(char.class, char.class, true, char.class);
|
||||
} else if (expected == Integer.class && internal) {
|
||||
|
@ -240,12 +198,6 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.originalTypetoTargetType(int.class, float.class, explicit);
|
||||
} else if (expected == double.class) {
|
||||
return PainlessCast.originalTypetoTargetType(int.class, double.class, explicit);
|
||||
} else if (expected == Byte.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(int.class, byte.class, true, byte.class);
|
||||
} else if (expected == Short.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(int.class, short.class, true, short.class);
|
||||
} else if (expected == Character.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(int.class, char.class, true, char.class);
|
||||
} else if (expected == Integer.class && internal) {
|
||||
return PainlessCast.boxTargetType(int.class, int.class, explicit, int.class);
|
||||
} else if (expected == Long.class && internal) {
|
||||
|
@ -274,14 +226,6 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.originalTypetoTargetType(long.class, float.class, explicit);
|
||||
} else if (expected == double.class) {
|
||||
return PainlessCast.originalTypetoTargetType(long.class, double.class, explicit);
|
||||
} else if (expected == Byte.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(long.class, byte.class, true, byte.class);
|
||||
} else if (expected == Short.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(long.class, short.class, true, short.class);
|
||||
} else if (expected == Character.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(long.class, char.class, true, char.class);
|
||||
} else if (expected == Integer.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(long.class, int.class, true, int.class);
|
||||
} else if (expected == Long.class && internal) {
|
||||
return PainlessCast.boxTargetType(long.class, long.class, explicit, long.class);
|
||||
} else if (expected == Float.class && internal) {
|
||||
|
@ -308,16 +252,6 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.originalTypetoTargetType(float.class, long.class, true);
|
||||
} else if (expected == double.class) {
|
||||
return PainlessCast.originalTypetoTargetType(float.class, double.class, explicit);
|
||||
} else if (expected == Byte.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(float.class, byte.class, true, byte.class);
|
||||
} else if (expected == Short.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(float.class, short.class, true, short.class);
|
||||
} else if (expected == Character.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(float.class, char.class, true, char.class);
|
||||
} else if (expected == Integer.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(float.class, int.class, true, int.class);
|
||||
} else if (expected == Long.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(float.class, long.class, true, long.class);
|
||||
} else if (expected == Float.class && internal) {
|
||||
return PainlessCast.boxTargetType(float.class, float.class, explicit, float.class);
|
||||
} else if (expected == Double.class && internal) {
|
||||
|
@ -342,18 +276,6 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.originalTypetoTargetType(double.class, long.class, true);
|
||||
} else if (expected == float.class && explicit) {
|
||||
return PainlessCast.originalTypetoTargetType(double.class, float.class, true);
|
||||
} else if (expected == Byte.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(double.class, byte.class, true, byte.class);
|
||||
} else if (expected == Short.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(double.class, short.class, true, short.class);
|
||||
} else if (expected == Character.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(double.class, char.class, true, char.class);
|
||||
} else if (expected == Integer.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(double.class, int.class, true, int.class);
|
||||
} else if (expected == Long.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(double.class, long.class, true, long.class);
|
||||
} else if (expected == Float.class && explicit && internal) {
|
||||
return PainlessCast.boxTargetType(double.class, float.class, true, float.class);
|
||||
} else if (expected == Double.class && internal) {
|
||||
return PainlessCast.boxTargetType(double.class, double.class, explicit, double.class);
|
||||
}
|
||||
|
@ -366,8 +288,6 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.unboxOriginalType(byte.class, byte.class, explicit, byte.class);
|
||||
} else if (expected == short.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(byte.class, short.class, explicit, byte.class);
|
||||
} else if (expected == char.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(byte.class, char.class, true, byte.class);
|
||||
} else if (expected == int.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(byte.class, int.class, explicit, byte.class);
|
||||
} else if (expected == long.class && internal) {
|
||||
|
@ -376,14 +296,20 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.unboxOriginalType(byte.class, float.class, explicit, byte.class);
|
||||
} else if (expected == double.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(byte.class, double.class, explicit, byte.class);
|
||||
} else if (expected == Short.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, short.class);
|
||||
} else if (expected == Integer.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, int.class);
|
||||
} else if (expected == Long.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, long.class);
|
||||
} else if (expected == Float.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, float.class);
|
||||
} else if (expected == Double.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, double.class);
|
||||
}
|
||||
} else if (actual == Short.class) {
|
||||
if (expected == byte.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(short.class, byte.class, true, short.class);
|
||||
} else if (expected == short.class && internal) {
|
||||
if (expected == short.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(short.class, short.class, explicit, short.class);
|
||||
} else if (expected == char.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(short.class, char.class, true, short.class);
|
||||
} else if (expected == int.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(short.class, int.class, explicit, short.class);
|
||||
} else if (expected == long.class && internal) {
|
||||
|
@ -392,13 +318,17 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.unboxOriginalType(short.class, float.class, explicit, short.class);
|
||||
} else if (expected == double.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(short.class, double.class, explicit, short.class);
|
||||
} else if (expected == Integer.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, int.class);
|
||||
} else if (expected == Long.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, long.class);
|
||||
} else if (expected == Float.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, float.class);
|
||||
} else if (expected == Double.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, double.class);
|
||||
}
|
||||
} else if (actual == Character.class) {
|
||||
if (expected == byte.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(char.class, byte.class, true, char.class);
|
||||
} else if (expected == short.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(char.class, short.class, true, char.class);
|
||||
} else if (expected == char.class && internal) {
|
||||
if (expected == char.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(char.class, char.class, explicit, char.class);
|
||||
} else if (expected == int.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(char.class, int.class, explicit, char.class);
|
||||
|
@ -408,15 +338,17 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.unboxOriginalType(char.class, float.class, explicit, char.class);
|
||||
} else if (expected == double.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(char.class, double.class, explicit, char.class);
|
||||
} else if (expected == Integer.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, int.class);
|
||||
} else if (expected == Long.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, long.class);
|
||||
} else if (expected == Float.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, float.class);
|
||||
} else if (expected == Double.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, double.class);
|
||||
}
|
||||
} else if (actual == Integer.class) {
|
||||
if (expected == byte.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(int.class, byte.class, true, int.class);
|
||||
} else if (expected == short.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(int.class, short.class, true, int.class);
|
||||
} else if (expected == char.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(int.class, char.class, true, int.class);
|
||||
} else if (expected == int.class && internal) {
|
||||
if (expected == int.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(int.class, int.class, explicit, int.class);
|
||||
} else if (expected == long.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(int.class, long.class, explicit, int.class);
|
||||
|
@ -424,61 +356,45 @@ public final class AnalyzerCaster {
|
|||
return PainlessCast.unboxOriginalType(int.class, float.class, explicit, int.class);
|
||||
} else if (expected == double.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(int.class, double.class, explicit, int.class);
|
||||
} else if (expected == Long.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, long.class);
|
||||
} else if (expected == Float.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, float.class);
|
||||
} else if (expected == Double.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, double.class);
|
||||
}
|
||||
} else if (actual == Long.class) {
|
||||
if (expected == byte.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(long.class, byte.class, true, long.class);
|
||||
} else if (expected == short.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(long.class, short.class, true, long.class);
|
||||
} else if (expected == char.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(long.class, char.class, true, long.class);
|
||||
} else if (expected == int.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(long.class, int.class, true, long.class);
|
||||
} else if (expected == long.class && internal) {
|
||||
if (expected == long.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(long.class, long.class, explicit, long.class);
|
||||
} else if (expected == float.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(long.class, float.class, explicit, long.class);
|
||||
} else if (expected == double.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(long.class, double.class, explicit, long.class);
|
||||
} else if (expected == Float.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, long.class, float.class);
|
||||
} else if (expected == Double.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, long.class, double.class);
|
||||
}
|
||||
} else if (actual == Float.class) {
|
||||
if (expected == byte.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(float.class, byte.class, true, float.class);
|
||||
} else if (expected == short.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(float.class, short.class, true, float.class);
|
||||
} else if (expected == char.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(float.class, char.class, true, float.class);
|
||||
} else if (expected == int.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(float.class, int.class, true, float.class);
|
||||
} else if (expected == long.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(float.class, long.class, true, float.class);
|
||||
} else if (expected == float.class && internal) {
|
||||
if (expected == float.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(float.class, float.class, explicit, float.class);
|
||||
} else if (expected == double.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(float.class, double.class, explicit, float.class);
|
||||
} else if (expected == Double.class && internal) {
|
||||
return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, float.class, double.class);
|
||||
}
|
||||
} else if (actual == Double.class) {
|
||||
if (expected == byte.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(double.class, byte.class, true, double.class);
|
||||
} else if (expected == short.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(double.class, short.class, true, double.class);
|
||||
} else if (expected == char.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(double.class, char.class, true, double.class);
|
||||
} else if (expected == int.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(double.class, int.class, true, double.class);
|
||||
} else if (expected == long.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(double.class, long.class, true, double.class);
|
||||
} else if (expected == float.class && explicit && internal) {
|
||||
return PainlessCast.unboxOriginalType(double.class, float.class, true, double.class);
|
||||
} else if (expected == double.class && internal) {
|
||||
if (expected == double.class && internal) {
|
||||
return PainlessCast.unboxOriginalType(double.class, double.class, explicit, double.class);
|
||||
}
|
||||
}
|
||||
|
||||
if ( actual == def.class ||
|
||||
if (
|
||||
actual == def.class ||
|
||||
(actual != void.class && expected == def.class) ||
|
||||
expected.isAssignableFrom(actual) ||
|
||||
(actual.isAssignableFrom(expected) && explicit)) {
|
||||
expected.isAssignableFrom(actual) ||
|
||||
(actual.isAssignableFrom(expected) && explicit)
|
||||
) {
|
||||
return PainlessCast.originalTypetoTargetType(actual, expected, explicit);
|
||||
} else {
|
||||
throw location.createError(new ClassCastException("Cannot cast from " +
|
||||
|
|
|
@ -154,6 +154,10 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
invokeStatic(UTILITY_TYPE, CHAR_TO_STRING);
|
||||
} else if (cast.originalType == String.class && cast.targetType == char.class) {
|
||||
invokeStatic(UTILITY_TYPE, STRING_TO_CHAR);
|
||||
} else if (cast.unboxOriginalType != null && cast.boxTargetType != null) {
|
||||
unbox(getType(cast.unboxOriginalType));
|
||||
writeCast(cast.unboxOriginalType, cast.boxTargetType);
|
||||
box(getType(cast.boxTargetType));
|
||||
} else if (cast.unboxOriginalType != null) {
|
||||
unbox(getType(cast.unboxOriginalType));
|
||||
writeCast(cast.originalType, cast.targetType);
|
||||
|
|
|
@ -75,6 +75,15 @@ public class PainlessCast {
|
|||
return new PainlessCast(originalType, targetType, explicitCast, null, null, null, boxTargetType);
|
||||
}
|
||||
|
||||
/** Create a cast where the original type is unboxed, cast to a target type, and the target type is boxed. */
|
||||
public static PainlessCast unboxOriginalTypeToBoxTargetType(boolean explicitCast, Class<?> unboxOriginalType, Class<?> boxTargetType) {
|
||||
|
||||
Objects.requireNonNull(unboxOriginalType);
|
||||
Objects.requireNonNull(boxTargetType);
|
||||
|
||||
return new PainlessCast(null, null, explicitCast, unboxOriginalType, null, null, boxTargetType);
|
||||
}
|
||||
|
||||
public final Class<?> originalType;
|
||||
public final Class<?> targetType;
|
||||
public final boolean explicitCast;
|
||||
|
|
|
@ -0,0 +1,511 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.painless;
|
||||
|
||||
public class BoxedCastTests extends ScriptTestCase {
|
||||
|
||||
public void testMethodCallByteToBoxedCasts() {
|
||||
assertEquals(0, exec("byte u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("byte u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("byte u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("byte u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("byte u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("byte u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("byte u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Byte u = Byte.valueOf((byte)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
assertEquals(0, exec("byte u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("byte u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("byte u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("byte u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("byte u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("byte u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("byte u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Byte u = Byte.valueOf((byte)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
assertEquals(0, exec("def u = (byte)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (byte)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (byte)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (byte)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (byte)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (byte)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (byte)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
assertEquals(0, exec("def u = (byte)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (byte)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (byte)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (byte)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (byte)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (byte)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (byte)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
}
|
||||
|
||||
public void testMethodCallShortToBoxedCasts() {
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("short u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("short u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("short u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("short u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("short u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("short u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("short u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Short u = Short.valueOf((short)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Short u = Short.valueOf((short)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Short u = Short.valueOf((short)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Short u = Short.valueOf((short)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Short u = Short.valueOf((short)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Short u = Short.valueOf((short)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Short u = Short.valueOf((short)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("short u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("short u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("short u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("short u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("short u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("short u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("short u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Short u = Short.valueOf((short)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Short u = Short.valueOf((short)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (short)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (short)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (short)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (short)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (short)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (short)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (short)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (short)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (short)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (short)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (short)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (short)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (short)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (short)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
}
|
||||
|
||||
public void testMethodCallCharacterToBoxedCasts() {
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("char u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("char u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("char u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("char u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("char u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("char u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("char u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Character u = Character.valueOf((char)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Character u = Character.valueOf((char)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Character u = Character.valueOf((char)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Character u = Character.valueOf((char)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Character u = Character.valueOf((char)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Character u = Character.valueOf((char)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Character u = Character.valueOf((char)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("char u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("char u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("char u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("char u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("char u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("char u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("char u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Character u = Character.valueOf((char)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Character u = Character.valueOf((char)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (char)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (char)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (char)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (char)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (char)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (char)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (char)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (char)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (char)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (char)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (char)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (char)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (char)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (char)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
}
|
||||
|
||||
public void testMethodCallIntegerToBoxedCasts() {
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("int u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("int u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("int u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("int u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("int u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("int u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("int u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Integer u = Integer.valueOf((int)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Integer u = Integer.valueOf((int)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Integer u = Integer.valueOf((int)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("int u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("int u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("int u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("int u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("int u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("int u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("int u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Integer u = Integer.valueOf((int)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Integer u = Integer.valueOf((int)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Integer u = Integer.valueOf((int)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (int)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (int)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (int)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (int)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (int)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (int)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (int)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (int)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (int)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (int)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (int)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (int)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (int)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (int)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
}
|
||||
|
||||
public void testMethodCallLongToBoxedCasts() {
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("long u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("long u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("long u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("long u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("long u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("long u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("long u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Long u = Long.valueOf((long)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Long u = Long.valueOf((long)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Long u = Long.valueOf((long)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Long u = Long.valueOf((long)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Long u = Long.valueOf((long)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Long u = Long.valueOf((long)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Long u = Long.valueOf((long)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("long u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("long u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("long u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("long u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("long u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("long u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("long u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Long u = Long.valueOf((long)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Long u = Long.valueOf((long)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Long u = Long.valueOf((long)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Long u = Long.valueOf((long)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (long)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (long)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (long)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (long)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (long)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (long)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (long)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (long)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (long)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (long)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (long)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (long)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (long)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (long)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
}
|
||||
|
||||
public void testMethodCallFloatToBoxedCasts() {
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("float u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("float u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("float u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("float u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("float u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("float u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("float u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Float u = Float.valueOf((float)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Float u = Float.valueOf((float)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Float u = Float.valueOf((float)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Float u = Float.valueOf((float)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Float u = Float.valueOf((float)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Float u = Float.valueOf((float)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Float u = Float.valueOf((float)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("float u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("float u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("float u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("float u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("float u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("float u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("float u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Float u = Float.valueOf((float)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Float u = Float.valueOf((float)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Float u = Float.valueOf((float)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Float u = Float.valueOf((float)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Float u = Float.valueOf((float)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Float u = Float.valueOf((float)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Float u = Float.valueOf((float)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (float)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (float)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (float)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (float)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (float)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (float)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (float)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (float)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (float)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (float)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (float)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (float)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (float)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (float)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
}
|
||||
|
||||
public void testMethodCallDoubleToBoxedCasts() {
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("double u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Double u = Double.valueOf((double)1); Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("double u = 1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("double u = 1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("Double u = Double.valueOf((double)1); def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("Double u = Double.valueOf((double)1); def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; Short b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; Character b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; Long b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; Float b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (double)1; Double b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; def b = Byte.valueOf((byte)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; def b = Short.valueOf((short)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; def b = Character.valueOf((char)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; def b = Integer.valueOf((int)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; def b = Long.valueOf((long)1); b.compareTo(u);"));
|
||||
expectScriptThrows(ClassCastException.class,
|
||||
() -> exec("def u = (double)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
|
||||
assertEquals(0, exec("def u = (double)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
|
||||
}
|
||||
}
|
|
@ -54,6 +54,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQueryBuilder> {
|
||||
|
@ -152,12 +153,13 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
|
|||
assertThat(getRequest.version(), Matchers.equalTo(indexedDocumentVersion));
|
||||
if (indexedDocumentExists) {
|
||||
return new GetResponse(
|
||||
new GetResult(indexedDocumentIndex, indexedDocumentType, indexedDocumentId, 0L, true,
|
||||
new GetResult(indexedDocumentIndex, indexedDocumentType, indexedDocumentId, 0, 1, 0L, true,
|
||||
documentSource.iterator().next(), Collections.emptyMap())
|
||||
);
|
||||
} else {
|
||||
return new GetResponse(
|
||||
new GetResult(indexedDocumentIndex, indexedDocumentType, indexedDocumentId, -1, false, null, Collections.emptyMap())
|
||||
new GetResult(indexedDocumentIndex, indexedDocumentType, indexedDocumentId, UNASSIGNED_SEQ_NO, 0, -1,
|
||||
false, null, Collections.emptyMap())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -149,8 +149,8 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
|
|||
|
||||
public void testGetSourceAction() throws IOException {
|
||||
createTestDoc();
|
||||
headTestCase("/test/test/1/_source", emptyMap(), greaterThan(0));
|
||||
headTestCase("/test/test/2/_source", emptyMap(), NOT_FOUND.getStatus(), greaterThan(0));
|
||||
headTestCase("/test/_source/1", emptyMap(), greaterThan(0));
|
||||
headTestCase("/test/_source/2", emptyMap(), NOT_FOUND.getStatus(), greaterThan(0));
|
||||
|
||||
try (XContentBuilder builder = jsonBuilder()) {
|
||||
builder.startObject();
|
||||
|
@ -175,7 +175,7 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
|
|||
request.setJsonEntity(Strings.toString(builder));
|
||||
client().performRequest(request);
|
||||
createTestDoc("test-no-source", "test-no-source");
|
||||
headTestCase("/test-no-source/test-no-source/1/_source", emptyMap(), NOT_FOUND.getStatus(), greaterThan(0));
|
||||
headTestCase("/test-no-source/_source/1", emptyMap(), NOT_FOUND.getStatus(), greaterThan(0));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html",
|
||||
"methods": ["HEAD"],
|
||||
"url": {
|
||||
"path": "/{index}/{type}/{id}/_source",
|
||||
"paths": ["/{index}/{type}/{id}/_source"],
|
||||
"path": "/{index}/_source/{id}",
|
||||
"paths": ["/{index}/_source/{id}", "/{index}/{type}/{id}/_source"],
|
||||
"parts": {
|
||||
"id": {
|
||||
"type" : "string",
|
||||
|
@ -18,8 +18,8 @@
|
|||
},
|
||||
"type": {
|
||||
"type" : "string",
|
||||
"required" : true,
|
||||
"description" : "The type of the document; use `_all` to fetch the first document matching the ID across all types"
|
||||
"required" : false,
|
||||
"description" : "The type of the document; deprecated and optional starting with 7.0"
|
||||
}
|
||||
},
|
||||
"params": {
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html",
|
||||
"methods": ["GET"],
|
||||
"url": {
|
||||
"path": "/{index}/{type}/{id}/_source",
|
||||
"paths": ["/{index}/{type}/{id}/_source"],
|
||||
"path": "/{index}/_source/{id}",
|
||||
"paths": ["/{index}/_source/{id}", "/{index}/{type}/{id}/_source"],
|
||||
"parts": {
|
||||
"id": {
|
||||
"type" : "string",
|
||||
|
@ -18,8 +18,8 @@
|
|||
},
|
||||
"type": {
|
||||
"type" : "string",
|
||||
"required" : true,
|
||||
"description" : "The type of the document; use `_all` to fetch the first document matching the ID across all types"
|
||||
"required" : false,
|
||||
"description" : "The type of the document; deprecated and optional starting with 7.0"
|
||||
}
|
||||
},
|
||||
"params": {
|
||||
|
|
|
@ -1,16 +1,19 @@
|
|||
---
|
||||
"Basic":
|
||||
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: types are required in requests before 7.0.0
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { "foo": "bar" }
|
||||
|
||||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
|
||||
- match: { '': { foo: bar } }
|
||||
|
@ -18,7 +21,6 @@
|
|||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: _all
|
||||
id: 1
|
||||
|
||||
- match: { '': { foo: bar } }
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
---
|
||||
"Basic with types":
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { "foo": "bar" }
|
||||
|
||||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
|
||||
- match: { '': { foo: bar } }
|
|
@ -1,5 +1,11 @@
|
|||
---
|
||||
"Default values":
|
||||
|
||||
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: types are required in requests before 7.0.0
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
|
@ -10,7 +16,6 @@
|
|||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: _all
|
||||
id: 1
|
||||
|
||||
- match: { '': { foo: bar } }
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
"Default values":
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { "foo": "bar" }
|
||||
|
||||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
|
||||
- match: { '': { foo: bar } }
|
|
@ -1,6 +1,11 @@
|
|||
---
|
||||
"Routing":
|
||||
|
||||
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: types are required in requests before 7.0.0
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
|
@ -26,7 +31,6 @@
|
|||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
routing: 5
|
||||
|
||||
|
@ -36,6 +40,5 @@
|
|||
catch: missing
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
---
|
||||
"Routing":
|
||||
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
body:
|
||||
settings:
|
||||
index:
|
||||
number_of_shards: 5
|
||||
number_of_routing_shards: 5
|
||||
number_of_replicas: 0
|
||||
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: green
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
routing: 5
|
||||
body: { foo: bar }
|
||||
|
||||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
routing: 5
|
||||
|
||||
- match: { '': {foo: bar}}
|
||||
|
||||
- do:
|
||||
catch: missing
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
|
|
@ -1,6 +1,9 @@
|
|||
---
|
||||
"Realtime":
|
||||
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: types are required in requests before 7.0.0
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
|
@ -25,14 +28,12 @@
|
|||
catch: missing
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
realtime: false
|
||||
|
||||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
realtime: true
|
||||
|
||||
|
@ -41,7 +42,6 @@
|
|||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
realtime: false
|
||||
refresh: true
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
---
|
||||
"Realtime":
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
body:
|
||||
settings:
|
||||
refresh_interval: -1
|
||||
number_of_replicas: 0
|
||||
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: green
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
|
||||
- do:
|
||||
catch: missing
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
realtime: false
|
||||
|
||||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
realtime: true
|
||||
|
||||
- match: { '': {foo: bar}}
|
||||
|
||||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
realtime: false
|
||||
refresh: true
|
||||
|
||||
- match: { '': {foo: bar}}
|
||||
|
|
@ -1,6 +1,11 @@
|
|||
---
|
||||
"Source filtering":
|
||||
|
||||
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: types are required in requests before 7.0.0
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
|
@ -9,18 +14,18 @@
|
|||
body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 }
|
||||
|
||||
- do:
|
||||
get_source: { index: test_1, type: test, id: 1, _source_includes: include.field1 }
|
||||
get_source: { index: test_1, id: 1, _source_includes: include.field1 }
|
||||
- match: { include.field1: v1 }
|
||||
- is_false: include.field2
|
||||
|
||||
- do:
|
||||
get_source: { index: test_1, type: test, id: 1, _source_includes: "include.field1,include.field2" }
|
||||
get_source: { index: test_1, id: 1, _source_includes: "include.field1,include.field2" }
|
||||
- match: { include.field1: v1 }
|
||||
- match: { include.field2: v2 }
|
||||
- is_false: count
|
||||
|
||||
- do:
|
||||
get_source: { index: test_1, type: test, id: 1, _source_includes: include, _source_excludes: "*.field2" }
|
||||
get_source: { index: test_1, id: 1, _source_includes: include, _source_excludes: "*.field2" }
|
||||
- match: { include.field1: v1 }
|
||||
- is_false: include.field2
|
||||
- is_false: count
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
"Source filtering":
|
||||
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 }
|
||||
|
||||
- do:
|
||||
get_source: { index: test_1, type: test, id: 1, _source_includes: include.field1 }
|
||||
- match: { include.field1: v1 }
|
||||
- is_false: include.field2
|
||||
|
||||
- do:
|
||||
get_source: { index: test_1, type: test, id: 1, _source_includes: "include.field1,include.field2" }
|
||||
- match: { include.field1: v1 }
|
||||
- match: { include.field2: v2 }
|
||||
- is_false: count
|
||||
|
||||
- do:
|
||||
get_source: { index: test_1, type: test, id: 1, _source_includes: include, _source_excludes: "*.field2" }
|
||||
- match: { include.field1: v1 }
|
||||
- is_false: include.field2
|
||||
- is_false: count
|
|
@ -1,19 +1,27 @@
|
|||
---
|
||||
"Missing document with catch":
|
||||
|
||||
- skip:
|
||||
features: warnings
|
||||
version: " - 6.99.99"
|
||||
reason: types are required in requests before 7.0.0
|
||||
|
||||
- do:
|
||||
catch: missing
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
|
||||
---
|
||||
"Missing document with ignore":
|
||||
|
||||
- skip:
|
||||
features: warnings
|
||||
version: " - 6.99.99"
|
||||
reason: types are required in requests before 7.0.0
|
||||
|
||||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
ignore: 404
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
"Missing document with catch":
|
||||
|
||||
- do:
|
||||
catch: missing
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
|
||||
---
|
||||
"Missing document with ignore":
|
||||
|
||||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
ignore: 404
|
|
@ -1,5 +1,10 @@
|
|||
---
|
||||
setup:
|
||||
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: types are required in requests before 7.0.0
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
|
@ -23,7 +28,6 @@ setup:
|
|||
catch: missing
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
|
||||
---
|
||||
|
@ -32,6 +36,5 @@ setup:
|
|||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
ignore: 404
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
---
|
||||
setup:
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
body:
|
||||
mappings:
|
||||
test:
|
||||
_source: { enabled: false }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
|
||||
|
||||
---
|
||||
"Missing document source with catch":
|
||||
|
||||
- do:
|
||||
catch: missing
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
|
||||
---
|
||||
"Missing document source with ignore":
|
||||
|
||||
- do:
|
||||
get_source:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
ignore: 404
|
|
@ -127,6 +127,24 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
clusterService.addStateApplier(this.ingestForwarder);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the {@link IndexRequest} from the provided {@link DocWriteRequest} for index or upsert actions. Upserts are
|
||||
* modeled as {@link IndexRequest} inside the {@link UpdateRequest}. Ignores {@link org.elasticsearch.action.delete.DeleteRequest}'s
|
||||
*
|
||||
* @param docWriteRequest The request to find the {@link IndexRequest}
|
||||
* @return the found {@link IndexRequest} or {@code null} if one can not be found.
|
||||
*/
|
||||
public static IndexRequest getIndexWriteRequest(DocWriteRequest docWriteRequest) {
|
||||
IndexRequest indexRequest = null;
|
||||
if (docWriteRequest instanceof IndexRequest) {
|
||||
indexRequest = (IndexRequest) docWriteRequest;
|
||||
} else if (docWriteRequest instanceof UpdateRequest) {
|
||||
UpdateRequest updateRequest = (UpdateRequest) docWriteRequest;
|
||||
indexRequest = updateRequest.docAsUpsert() ? updateRequest.doc() : updateRequest.upsertRequest();
|
||||
}
|
||||
return indexRequest;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener<BulkResponse> listener) {
|
||||
final long startTime = relativeTime();
|
||||
|
@ -207,12 +225,12 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
final MetaData metaData = clusterService.state().getMetaData();
|
||||
ImmutableOpenMap<String, IndexMetaData> indicesMetaData = metaData.indices();
|
||||
for (DocWriteRequest<?> actionRequest : bulkRequest.requests) {
|
||||
if (actionRequest instanceof IndexRequest) {
|
||||
IndexRequest indexRequest = (IndexRequest) actionRequest;
|
||||
IndexRequest indexRequest = getIndexWriteRequest(actionRequest);
|
||||
if(indexRequest != null){
|
||||
String pipeline = indexRequest.getPipeline();
|
||||
if (pipeline == null) {
|
||||
IndexMetaData indexMetaData = indicesMetaData.get(indexRequest.index());
|
||||
if (indexMetaData == null) {
|
||||
IndexMetaData indexMetaData = indicesMetaData.get(actionRequest.index());
|
||||
if (indexMetaData == null && indexRequest.index() != null) {
|
||||
//check the alias
|
||||
AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(indexRequest.index());
|
||||
if (indexOrAlias != null && indexOrAlias.isAlias()) {
|
||||
|
@ -626,7 +644,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
}
|
||||
|
||||
void markCurrentItemAsDropped() {
|
||||
IndexRequest indexRequest = (IndexRequest) bulkRequest.requests().get(currentSlot);
|
||||
IndexRequest indexRequest = getIndexWriteRequest(bulkRequest.requests().get(currentSlot));
|
||||
failedSlots.set(currentSlot);
|
||||
itemResponses.add(
|
||||
new BulkItemResponse(currentSlot, indexRequest.opType(),
|
||||
|
@ -639,7 +657,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
}
|
||||
|
||||
void markCurrentItemAsFailed(Exception e) {
|
||||
IndexRequest indexRequest = (IndexRequest) bulkRequest.requests().get(currentSlot);
|
||||
IndexRequest indexRequest = getIndexWriteRequest(bulkRequest.requests().get(currentSlot));
|
||||
// We hit a error during preprocessing a request, so we:
|
||||
// 1) Remember the request item slot from the bulk, so that we're done processing all requests we know what failed
|
||||
// 2) Add a bulk item failure for this request
|
||||
|
|
|
@ -307,6 +307,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
final Tuple<XContentType, Map<String, Object>> sourceAndContent =
|
||||
XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType());
|
||||
updateResponse.setGetResult(UpdateHelper.extractGetResult(updateRequest, concreteIndex,
|
||||
indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(),
|
||||
indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes));
|
||||
}
|
||||
} else if (translatedResult == DocWriteResponse.Result.DELETED) {
|
||||
|
@ -315,7 +316,8 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
deleteResponse.getType(), deleteResponse.getId(), deleteResponse.getSeqNo(), deleteResponse.getPrimaryTerm(),
|
||||
deleteResponse.getVersion(), deleteResponse.getResult());
|
||||
|
||||
final GetResult getResult = UpdateHelper.extractGetResult(updateRequest, concreteIndex, deleteResponse.getVersion(),
|
||||
final GetResult getResult = UpdateHelper.extractGetResult(updateRequest, concreteIndex,
|
||||
deleteResponse.getSeqNo(), deleteResponse.getPrimaryTerm(), deleteResponse.getVersion(),
|
||||
translate.updatedSourceAsMap(), translate.updateSourceContentType(), null);
|
||||
|
||||
updateResponse.setGetResult(getResult);
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.io.IOException;
|
|||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
||||
/**
|
||||
* A request to get a document (its source) from an index based on its type (optional) and id. Best created using
|
||||
* A request to get a document (its source) from an index based on its id. Best created using
|
||||
* {@link org.elasticsearch.client.Requests#getRequest(String)}.
|
||||
* <p>
|
||||
* The operation requires the {@link #index()}, {@link #type(String)} and {@link #id(String)}
|
||||
|
@ -84,7 +84,6 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
|||
* @param index The index to get the document from
|
||||
* @param type The type of the document
|
||||
* @param id The id of the document
|
||||
*
|
||||
* @deprecated Types are in the process of being removed, use {@link GetRequest(String, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
|
@ -127,7 +126,6 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
|||
|
||||
/**
|
||||
* Sets the type of the document to fetch.
|
||||
*
|
||||
* @deprecated Types are in the process of being removed.
|
||||
*/
|
||||
@Deprecated
|
||||
|
|
|
@ -90,6 +90,20 @@ public class GetResponse extends ActionResponse implements Iterable<DocumentFiel
|
|||
return getResult.getVersion();
|
||||
}
|
||||
|
||||
/**
|
||||
* The sequence number assigned to the last operation to have changed this document, if found.
|
||||
*/
|
||||
public long getSeqNo() {
|
||||
return getResult.getSeqNo();
|
||||
}
|
||||
|
||||
/**
|
||||
* The primary term of the last primary that has changed this document, if found.
|
||||
*/
|
||||
public long getPrimaryTerm() {
|
||||
return getResult.getPrimaryTerm();
|
||||
}
|
||||
|
||||
/**
|
||||
* The source of the document if exists.
|
||||
*/
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.update;
|
||||
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRunnable;
|
||||
import org.elasticsearch.action.RoutingMissingException;
|
||||
|
@ -50,7 +51,6 @@ import org.elasticsearch.index.IndexService;
|
|||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -185,8 +185,9 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
if (request.fetchSource() != null && request.fetchSource().fetchSource()) {
|
||||
Tuple<XContentType, Map<String, Object>> sourceAndContent =
|
||||
XContentHelper.convertToMap(upsertSourceBytes, true, upsertRequest.getContentType());
|
||||
update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(),
|
||||
sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
|
||||
update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(),
|
||||
response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), sourceAndContent.v2(),
|
||||
sourceAndContent.v1(), upsertSourceBytes));
|
||||
} else {
|
||||
update.setGetResult(null);
|
||||
}
|
||||
|
@ -205,7 +206,8 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(),
|
||||
response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(),
|
||||
response.getVersion(), response.getResult());
|
||||
update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(),
|
||||
update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(),
|
||||
response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(),
|
||||
result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes));
|
||||
update.setForcedRefresh(response.forcedRefresh());
|
||||
listener.onResponse(update);
|
||||
|
@ -216,10 +218,11 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
DeleteRequest deleteRequest = result.action();
|
||||
client.bulk(toSingleItemBulkRequest(deleteRequest), wrapBulkResponse(
|
||||
ActionListener.<DeleteResponse>wrap(response -> {
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(),
|
||||
response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(),
|
||||
response.getVersion(), response.getResult());
|
||||
update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(),
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(),
|
||||
response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(),
|
||||
response.getResult());
|
||||
update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(),
|
||||
response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(),
|
||||
result.updatedSourceAsMap(), result.updateSourceContentType(), null));
|
||||
update.setForcedRefresh(response.forcedRefresh());
|
||||
listener.onResponse(update);
|
||||
|
|
|
@ -209,8 +209,8 @@ public class UpdateHelper {
|
|||
if (detectNoop && noop) {
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(),
|
||||
getResult.getVersion(), DocWriteResponse.Result.NOOP);
|
||||
update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap,
|
||||
updateSourceContentType, getResult.internalSourceRef()));
|
||||
update.setGetResult(extractGetResult(request, request.index(), getResult.getSeqNo(), getResult.getPrimaryTerm(),
|
||||
getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef()));
|
||||
return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType);
|
||||
} else {
|
||||
final IndexRequest finalIndexRequest = Requests.indexRequest(request.index())
|
||||
|
@ -270,10 +270,9 @@ public class UpdateHelper {
|
|||
// If it was neither an INDEX or DELETE operation, treat it as a noop
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(),
|
||||
getResult.getVersion(), DocWriteResponse.Result.NOOP);
|
||||
update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap,
|
||||
updateSourceContentType, getResult.internalSourceRef()));
|
||||
update.setGetResult(extractGetResult(request, request.index(), getResult.getSeqNo(), getResult.getPrimaryTerm(),
|
||||
getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef()));
|
||||
return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -293,7 +292,7 @@ public class UpdateHelper {
|
|||
/**
|
||||
* Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response.
|
||||
*/
|
||||
public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version,
|
||||
public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long seqNo, long primaryTerm, long version,
|
||||
final Map<String, Object> source, XContentType sourceContentType,
|
||||
@Nullable final BytesReference sourceAsBytes) {
|
||||
if (request.fetchSource() == null || request.fetchSource().fetchSource() == false) {
|
||||
|
@ -318,7 +317,8 @@ public class UpdateHelper {
|
|||
}
|
||||
|
||||
// TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType)
|
||||
return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceFilteredAsBytes, Collections.emptyMap());
|
||||
return new GetResult(concreteIndex, request.type(), request.id(), seqNo, primaryTerm, version, true, sourceFilteredAsBytes,
|
||||
Collections.emptyMap());
|
||||
}
|
||||
|
||||
public static class Result {
|
||||
|
|
|
@ -162,8 +162,9 @@ public class UpdateResponse extends DocWriteResponse {
|
|||
update = new UpdateResponse(shardId, type, id, version, result);
|
||||
}
|
||||
if (getResult != null) {
|
||||
update.setGetResult(new GetResult(update.getIndex(), update.getType(), update.getId(), update.getVersion(),
|
||||
getResult.isExists(),getResult.internalSourceRef(), getResult.getFields()));
|
||||
update.setGetResult(new GetResult(update.getIndex(), update.getType(), update.getId(),
|
||||
getResult.getSeqNo(), getResult.getPrimaryTerm(), update.getVersion(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
|
||||
}
|
||||
update.setForcedRefresh(forcedRefresh);
|
||||
return update;
|
||||
|
|
|
@ -46,7 +46,7 @@ import java.util.UUID;
|
|||
public class RestoreInProgress extends AbstractNamedDiffable<Custom> implements Custom, Iterable<RestoreInProgress.Entry> {
|
||||
|
||||
/**
|
||||
* Fallback UUID used for restore operations that were started before v7.0 and don't have a uuid in the cluster state.
|
||||
* Fallback UUID used for restore operations that were started before v6.6 and don't have a uuid in the cluster state.
|
||||
*/
|
||||
public static final String BWC_UUID = new UUID(0, 0).toString();
|
||||
|
||||
|
@ -436,7 +436,7 @@ public class RestoreInProgress extends AbstractNamedDiffable<Custom> implements
|
|||
final ImmutableOpenMap.Builder<String, Entry> entriesBuilder = ImmutableOpenMap.builder(count);
|
||||
for (int i = 0; i < count; i++) {
|
||||
final String uuid;
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
|
||||
uuid = in.readString();
|
||||
} else {
|
||||
uuid = BWC_UUID;
|
||||
|
@ -468,7 +468,7 @@ public class RestoreInProgress extends AbstractNamedDiffable<Custom> implements
|
|||
out.writeVInt(entries.size());
|
||||
for (ObjectCursor<Entry> v : entries.values()) {
|
||||
Entry entry = v.value;
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
|
||||
out.writeString(entry.uuid);
|
||||
}
|
||||
entry.snapshot().writeTo(out);
|
||||
|
|
|
@ -1531,14 +1531,14 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
if (sourceNumberOfShards < targetNumberOfShards) { // split
|
||||
factor = targetNumberOfShards / sourceNumberOfShards;
|
||||
if (factor * sourceNumberOfShards != targetNumberOfShards || factor <= 1) {
|
||||
throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a must be a " +
|
||||
throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a " +
|
||||
"factor of ["
|
||||
+ targetNumberOfShards + "]");
|
||||
}
|
||||
} else if (sourceNumberOfShards > targetNumberOfShards) { // shrink
|
||||
factor = sourceNumberOfShards / targetNumberOfShards;
|
||||
if (factor * targetNumberOfShards != sourceNumberOfShards || factor <= 1) {
|
||||
throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a must be a " +
|
||||
throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a " +
|
||||
"multiple of ["
|
||||
+ targetNumberOfShards + "]");
|
||||
}
|
||||
|
|
|
@ -222,7 +222,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject {
|
|||
}
|
||||
|
||||
SnapshotRecoverySource(StreamInput in) throws IOException {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
|
||||
restoreUUID = in.readString();
|
||||
} else {
|
||||
restoreUUID = RestoreInProgress.BWC_UUID;
|
||||
|
@ -250,7 +250,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject {
|
|||
|
||||
@Override
|
||||
protected void writeAdditionalFields(StreamOutput out) throws IOException {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
|
||||
out.writeString(restoreUUID);
|
||||
}
|
||||
snapshot.writeTo(out);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.get;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
|
@ -33,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentHelper;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
|
||||
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
import org.elasticsearch.search.lookup.SourceLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -53,6 +55,8 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
public static final String _TYPE = "_type";
|
||||
public static final String _ID = "_id";
|
||||
private static final String _VERSION = "_version";
|
||||
private static final String _SEQ_NO = "_seq_no";
|
||||
private static final String _PRIMARY_TERM = "_primary_term";
|
||||
private static final String FOUND = "found";
|
||||
private static final String FIELDS = "fields";
|
||||
|
||||
|
@ -60,6 +64,8 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
private String type;
|
||||
private String id;
|
||||
private long version;
|
||||
private long seqNo;
|
||||
private long primaryTerm;
|
||||
private boolean exists;
|
||||
private Map<String, DocumentField> fields;
|
||||
private Map<String, Object> sourceAsMap;
|
||||
|
@ -69,11 +75,17 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
GetResult() {
|
||||
}
|
||||
|
||||
public GetResult(String index, String type, String id, long version, boolean exists, BytesReference source,
|
||||
Map<String, DocumentField> fields) {
|
||||
public GetResult(String index, String type, String id, long seqNo, long primaryTerm, long version, boolean exists,
|
||||
BytesReference source, Map<String, DocumentField> fields) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
this.seqNo = seqNo;
|
||||
this.primaryTerm = primaryTerm;
|
||||
assert (seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO && primaryTerm == 0) || (seqNo >= 0 && primaryTerm >= 1) :
|
||||
"seqNo: " + seqNo + " primaryTerm: " + primaryTerm;
|
||||
assert exists || (seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO && primaryTerm == 0) :
|
||||
"doc not found but seqNo/primaryTerm are set";
|
||||
this.version = version;
|
||||
this.exists = exists;
|
||||
this.source = source;
|
||||
|
@ -118,6 +130,20 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
return version;
|
||||
}
|
||||
|
||||
/**
|
||||
* The sequence number assigned to the last operation to have changed this document, if found.
|
||||
*/
|
||||
public long getSeqNo() {
|
||||
return seqNo;
|
||||
}
|
||||
|
||||
/**
|
||||
* The primary term of the last primary that has changed this document, if found.
|
||||
*/
|
||||
public long getPrimaryTerm() {
|
||||
return primaryTerm;
|
||||
}
|
||||
|
||||
/**
|
||||
* The source of the document if exists.
|
||||
*/
|
||||
|
@ -213,6 +239,11 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
}
|
||||
|
||||
public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params) throws IOException {
|
||||
if (seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { // seqNo may not be assigned if read from an old node
|
||||
builder.field(_SEQ_NO, seqNo);
|
||||
builder.field(_PRIMARY_TERM, primaryTerm);
|
||||
}
|
||||
|
||||
List<DocumentField> metaFields = new ArrayList<>();
|
||||
List<DocumentField> otherFields = new ArrayList<>();
|
||||
if (fields != null && !fields.isEmpty()) {
|
||||
|
@ -282,6 +313,8 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
|
||||
String currentFieldName = parser.currentName();
|
||||
long version = -1;
|
||||
long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
long primaryTerm = 0;
|
||||
Boolean found = null;
|
||||
BytesReference source = null;
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
|
@ -297,6 +330,10 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
id = parser.text();
|
||||
} else if (_VERSION.equals(currentFieldName)) {
|
||||
version = parser.longValue();
|
||||
} else if (_SEQ_NO.equals(currentFieldName)) {
|
||||
seqNo = parser.longValue();
|
||||
} else if (_PRIMARY_TERM.equals(currentFieldName)) {
|
||||
primaryTerm = parser.longValue();
|
||||
} else if (FOUND.equals(currentFieldName)) {
|
||||
found = parser.booleanValue();
|
||||
} else {
|
||||
|
@ -326,7 +363,7 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
}
|
||||
}
|
||||
}
|
||||
return new GetResult(index, type, id, version, found, source, fields);
|
||||
return new GetResult(index, type, id, seqNo, primaryTerm, version, found, source, fields);
|
||||
}
|
||||
|
||||
public static GetResult fromXContent(XContentParser parser) throws IOException {
|
||||
|
@ -347,6 +384,13 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
index = in.readString();
|
||||
type = in.readOptionalString();
|
||||
id = in.readString();
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
seqNo = in.readZLong();
|
||||
primaryTerm = in.readVLong();
|
||||
} else {
|
||||
seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
primaryTerm = 0L;
|
||||
}
|
||||
version = in.readLong();
|
||||
exists = in.readBoolean();
|
||||
if (exists) {
|
||||
|
@ -372,6 +416,10 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
out.writeString(index);
|
||||
out.writeOptionalString(type);
|
||||
out.writeString(id);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeZLong(seqNo);
|
||||
out.writeVLong(primaryTerm);
|
||||
}
|
||||
out.writeLong(version);
|
||||
out.writeBoolean(exists);
|
||||
if (exists) {
|
||||
|
@ -397,6 +445,8 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
}
|
||||
GetResult getResult = (GetResult) o;
|
||||
return version == getResult.version &&
|
||||
seqNo == getResult.seqNo &&
|
||||
primaryTerm == getResult.primaryTerm &&
|
||||
exists == getResult.exists &&
|
||||
Objects.equals(index, getResult.index) &&
|
||||
Objects.equals(type, getResult.type) &&
|
||||
|
@ -407,7 +457,7 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(version, exists, index, type, id, fields, sourceAsMap());
|
||||
return Objects.hash(version, seqNo, primaryTerm, exists, index, type, id, fields, sourceAsMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.RoutingFieldMapper;
|
||||
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
|
@ -112,7 +113,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|||
public GetResult get(Engine.GetResult engineGetResult, String id, String type,
|
||||
String[] fields, FetchSourceContext fetchSourceContext) {
|
||||
if (!engineGetResult.exists()) {
|
||||
return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
|
||||
return new GetResult(shardId.getIndexName(), type, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, -1, false, null, null);
|
||||
}
|
||||
|
||||
currentMetric.inc();
|
||||
|
@ -168,7 +169,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|||
}
|
||||
|
||||
if (get == null || get.exists() == false) {
|
||||
return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
|
||||
return new GetResult(shardId.getIndexName(), type, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, -1, false, null, null);
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -233,7 +234,8 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|||
}
|
||||
}
|
||||
|
||||
return new GetResult(shardId.getIndexName(), type, id, get.version(), get.exists(), source, fields);
|
||||
return new GetResult(shardId.getIndexName(), type, id, get.docIdAndVersion().seqNo, get.docIdAndVersion().primaryTerm,
|
||||
get.version(), get.exists(), source, fields);
|
||||
}
|
||||
|
||||
private static FieldsVisitor buildFieldsVisitors(String[] fields, FetchSourceContext fetchSourceContext) {
|
||||
|
|
|
@ -134,7 +134,9 @@ public class CompoundProcessor implements Processor {
|
|||
if (onFailureProcessors.isEmpty()) {
|
||||
throw compoundProcessorException;
|
||||
} else {
|
||||
executeOnFailure(ingestDocument, compoundProcessorException);
|
||||
if (executeOnFailure(ingestDocument, compoundProcessorException) == false) {
|
||||
return null;
|
||||
}
|
||||
break;
|
||||
}
|
||||
} finally {
|
||||
|
@ -145,13 +147,17 @@ public class CompoundProcessor implements Processor {
|
|||
return ingestDocument;
|
||||
}
|
||||
|
||||
|
||||
void executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception {
|
||||
/**
|
||||
* @return true if execution should continue, false if document is dropped.
|
||||
*/
|
||||
boolean executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception {
|
||||
try {
|
||||
putFailureMetadata(ingestDocument, exception);
|
||||
for (Processor processor : onFailureProcessors) {
|
||||
try {
|
||||
processor.execute(ingestDocument);
|
||||
if (processor.execute(ingestDocument) == null) {
|
||||
return false;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw newCompoundProcessorException(e, processor.getType(), processor.getTag());
|
||||
}
|
||||
|
@ -159,6 +165,7 @@ public class CompoundProcessor implements Processor {
|
|||
} finally {
|
||||
removeFailureMetadata(ingestDocument);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private void putFailureMetadata(IngestDocument ingestDocument, ElasticsearchException cause) {
|
||||
|
|
|
@ -24,11 +24,11 @@ import org.elasticsearch.ExceptionsHelper;
|
|||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
import org.elasticsearch.action.bulk.TransportBulkAction;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
|
@ -388,13 +388,7 @@ public class IngestService implements ClusterStateApplier {
|
|||
@Override
|
||||
protected void doRun() {
|
||||
for (DocWriteRequest<?> actionRequest : actionRequests) {
|
||||
IndexRequest indexRequest = null;
|
||||
if (actionRequest instanceof IndexRequest) {
|
||||
indexRequest = (IndexRequest) actionRequest;
|
||||
} else if (actionRequest instanceof UpdateRequest) {
|
||||
UpdateRequest updateRequest = (UpdateRequest) actionRequest;
|
||||
indexRequest = updateRequest.docAsUpsert() ? updateRequest.doc() : updateRequest.upsertRequest();
|
||||
}
|
||||
IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(actionRequest);
|
||||
if (indexRequest == null) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -19,12 +19,14 @@
|
|||
|
||||
package org.elasticsearch.rest.action.document;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
|
@ -49,8 +51,14 @@ import static org.elasticsearch.rest.RestStatus.OK;
|
|||
*/
|
||||
public class RestGetSourceAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetSourceAction.class));
|
||||
static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get_source and exist_source"
|
||||
+ "requests is deprecated.";
|
||||
|
||||
public RestGetSourceAction(final Settings settings, final RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(GET, "/{index}/_source/{id}", this);
|
||||
controller.registerHandler(HEAD, "/{index}/_source/{id}", this);
|
||||
controller.registerHandler(GET, "/{index}/{type}/{id}/_source", this);
|
||||
controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this);
|
||||
}
|
||||
|
@ -62,7 +70,13 @@ public class RestGetSourceAction extends BaseRestHandler {
|
|||
|
||||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
final GetRequest getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id"));
|
||||
final GetRequest getRequest;
|
||||
if (request.hasParam("type")) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("get_source_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id"));
|
||||
} else {
|
||||
getRequest = new GetRequest(request.param("index"), request.param("id"));
|
||||
}
|
||||
getRequest.refresh(request.paramAsBoolean("refresh", getRequest.refresh()));
|
||||
getRequest.routing(request.param("routing"));
|
||||
getRequest.preference(request.param("preference"));
|
||||
|
|
|
@ -19,16 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.OriginalIndices;
|
||||
|
@ -61,6 +51,16 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
|
|||
import org.elasticsearch.search.lookup.SourceLookup;
|
||||
import org.elasticsearch.transport.RemoteClusterAware;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
|
@ -311,10 +311,17 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<D
|
|||
}
|
||||
|
||||
/**
|
||||
* An array of the sort values used.
|
||||
* An array of the (formatted) sort values used.
|
||||
*/
|
||||
public Object[] getSortValues() {
|
||||
return sortValues.sortValues();
|
||||
return sortValues.getFormattedSortValues();
|
||||
}
|
||||
|
||||
/**
|
||||
* An array of the (raw) sort values used.
|
||||
*/
|
||||
public Object[] getRawSortValues() {
|
||||
return sortValues.getRawSortValues();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,9 +20,11 @@
|
|||
package org.elasticsearch.search;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -35,101 +37,56 @@ import java.util.Objects;
|
|||
|
||||
public class SearchSortValues implements ToXContentFragment, Writeable {
|
||||
|
||||
static final SearchSortValues EMPTY = new SearchSortValues(new Object[0]);
|
||||
private final Object[] sortValues;
|
||||
private static final Object[] EMPTY_ARRAY = new Object[0];
|
||||
static final SearchSortValues EMPTY = new SearchSortValues(EMPTY_ARRAY);
|
||||
|
||||
private final Object[] formattedSortValues;
|
||||
private final Object[] rawSortValues;
|
||||
|
||||
SearchSortValues(Object[] sortValues) {
|
||||
this.sortValues = Objects.requireNonNull(sortValues, "sort values must not be empty");
|
||||
this.formattedSortValues = Objects.requireNonNull(sortValues, "sort values must not be empty");
|
||||
this.rawSortValues = EMPTY_ARRAY;
|
||||
}
|
||||
|
||||
public SearchSortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) {
|
||||
Objects.requireNonNull(sortValues);
|
||||
public SearchSortValues(Object[] rawSortValues, DocValueFormat[] sortValueFormats) {
|
||||
Objects.requireNonNull(rawSortValues);
|
||||
Objects.requireNonNull(sortValueFormats);
|
||||
this.sortValues = Arrays.copyOf(sortValues, sortValues.length);
|
||||
for (int i = 0; i < sortValues.length; ++i) {
|
||||
if (this.sortValues[i] instanceof BytesRef) {
|
||||
this.sortValues[i] = sortValueFormats[i].format((BytesRef) sortValues[i]);
|
||||
if (rawSortValues.length != sortValueFormats.length) {
|
||||
throw new IllegalArgumentException("formattedSortValues and sortValueFormats must hold the same number of items");
|
||||
}
|
||||
this.rawSortValues = rawSortValues;
|
||||
this.formattedSortValues = Arrays.copyOf(rawSortValues, rawSortValues.length);
|
||||
for (int i = 0; i < rawSortValues.length; ++i) {
|
||||
//we currently format only BytesRef but we may want to change that in the future
|
||||
Object sortValue = rawSortValues[i];
|
||||
if (sortValue instanceof BytesRef) {
|
||||
this.formattedSortValues[i] = sortValueFormats[i].format((BytesRef) sortValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public SearchSortValues(StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
if (size > 0) {
|
||||
sortValues = new Object[size];
|
||||
for (int i = 0; i < sortValues.length; i++) {
|
||||
byte type = in.readByte();
|
||||
if (type == 0) {
|
||||
sortValues[i] = null;
|
||||
} else if (type == 1) {
|
||||
sortValues[i] = in.readString();
|
||||
} else if (type == 2) {
|
||||
sortValues[i] = in.readInt();
|
||||
} else if (type == 3) {
|
||||
sortValues[i] = in.readLong();
|
||||
} else if (type == 4) {
|
||||
sortValues[i] = in.readFloat();
|
||||
} else if (type == 5) {
|
||||
sortValues[i] = in.readDouble();
|
||||
} else if (type == 6) {
|
||||
sortValues[i] = in.readByte();
|
||||
} else if (type == 7) {
|
||||
sortValues[i] = in.readShort();
|
||||
} else if (type == 8) {
|
||||
sortValues[i] = in.readBoolean();
|
||||
} else {
|
||||
throw new IOException("Can't match type [" + type + "]");
|
||||
}
|
||||
}
|
||||
SearchSortValues(StreamInput in) throws IOException {
|
||||
this.formattedSortValues = in.readArray(Lucene::readSortValue, Object[]::new);
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
this.rawSortValues = in.readArray(Lucene::readSortValue, Object[]::new);
|
||||
} else {
|
||||
sortValues = new Object[0];
|
||||
this.rawSortValues = EMPTY_ARRAY;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(sortValues.length);
|
||||
for (Object sortValue : sortValues) {
|
||||
if (sortValue == null) {
|
||||
out.writeByte((byte) 0);
|
||||
} else {
|
||||
Class type = sortValue.getClass();
|
||||
if (type == String.class) {
|
||||
out.writeByte((byte) 1);
|
||||
out.writeString((String) sortValue);
|
||||
} else if (type == Integer.class) {
|
||||
out.writeByte((byte) 2);
|
||||
out.writeInt((Integer) sortValue);
|
||||
} else if (type == Long.class) {
|
||||
out.writeByte((byte) 3);
|
||||
out.writeLong((Long) sortValue);
|
||||
} else if (type == Float.class) {
|
||||
out.writeByte((byte) 4);
|
||||
out.writeFloat((Float) sortValue);
|
||||
} else if (type == Double.class) {
|
||||
out.writeByte((byte) 5);
|
||||
out.writeDouble((Double) sortValue);
|
||||
} else if (type == Byte.class) {
|
||||
out.writeByte((byte) 6);
|
||||
out.writeByte((Byte) sortValue);
|
||||
} else if (type == Short.class) {
|
||||
out.writeByte((byte) 7);
|
||||
out.writeShort((Short) sortValue);
|
||||
} else if (type == Boolean.class) {
|
||||
out.writeByte((byte) 8);
|
||||
out.writeBoolean((Boolean) sortValue);
|
||||
} else {
|
||||
throw new IOException("Can't handle sort field value of type [" + type + "]");
|
||||
}
|
||||
}
|
||||
out.writeArray(Lucene::writeSortValue, this.formattedSortValues);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeArray(Lucene::writeSortValue, this.rawSortValues);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (sortValues.length > 0) {
|
||||
if (formattedSortValues.length > 0) {
|
||||
builder.startArray(Fields.SORT);
|
||||
for (Object sortValue : sortValues) {
|
||||
for (Object sortValue : formattedSortValues) {
|
||||
builder.value(sortValue);
|
||||
}
|
||||
builder.endArray();
|
||||
|
@ -142,24 +99,37 @@ public class SearchSortValues implements ToXContentFragment, Writeable {
|
|||
return new SearchSortValues(parser.list().toArray());
|
||||
}
|
||||
|
||||
public Object[] sortValues() {
|
||||
return sortValues;
|
||||
/**
|
||||
* Returns the formatted version of the values that sorting was performed against
|
||||
*/
|
||||
public Object[] getFormattedSortValues() {
|
||||
return formattedSortValues;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the raw version of the values that sorting was performed against
|
||||
*/
|
||||
public Object[] getRawSortValues() {
|
||||
return rawSortValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
SearchSortValues other = (SearchSortValues) obj;
|
||||
return Arrays.equals(sortValues, other.sortValues);
|
||||
SearchSortValues that = (SearchSortValues) o;
|
||||
return Arrays.equals(formattedSortValues, that.formattedSortValues) &&
|
||||
Arrays.equals(rawSortValues, that.rawSortValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Arrays.hashCode(sortValues);
|
||||
int result = Arrays.hashCode(formattedSortValues);
|
||||
result = 31 * result + Arrays.hashCode(rawSortValues);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -183,10 +183,11 @@ public abstract class RemoteClusterAware {
|
|||
* (ProxyAddresss, [SeedNodeSuppliers]). If a cluster is configured with a proxy address all seed nodes will point to
|
||||
* {@link TransportAddress#META_ADDRESS} and their configured address will be used as the hostname for the generated discovery node.
|
||||
*/
|
||||
protected static Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> buildRemoteClustersDynamicConfig(Settings settings) {
|
||||
final Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> remoteSeeds =
|
||||
protected static Map<String, Tuple<String, List<Tuple<String, Supplier<DiscoveryNode>>>>> buildRemoteClustersDynamicConfig(
|
||||
final Settings settings) {
|
||||
final Map<String, Tuple<String, List<Tuple<String, Supplier<DiscoveryNode>>>>> remoteSeeds =
|
||||
buildRemoteClustersDynamicConfig(settings, REMOTE_CLUSTERS_SEEDS);
|
||||
final Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> searchRemoteSeeds =
|
||||
final Map<String, Tuple<String, List<Tuple<String, Supplier<DiscoveryNode>>>>> searchRemoteSeeds =
|
||||
buildRemoteClustersDynamicConfig(settings, SEARCH_REMOTE_CLUSTERS_SEEDS);
|
||||
// sort the intersection for predictable output order
|
||||
final NavigableSet<String> intersection =
|
||||
|
@ -205,7 +206,7 @@ public abstract class RemoteClusterAware {
|
|||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
}
|
||||
|
||||
private static Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> buildRemoteClustersDynamicConfig(
|
||||
private static Map<String, Tuple<String, List<Tuple<String, Supplier<DiscoveryNode>>>>> buildRemoteClustersDynamicConfig(
|
||||
final Settings settings, final Setting.AffixSetting<List<String>> seedsSetting) {
|
||||
final Stream<Setting<List<String>>> allConcreteSettings = seedsSetting.getAllConcreteSettings(settings);
|
||||
return allConcreteSettings.collect(
|
||||
|
@ -214,9 +215,9 @@ public abstract class RemoteClusterAware {
|
|||
List<String> addresses = concreteSetting.get(settings);
|
||||
final boolean proxyMode =
|
||||
REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).existsOrFallbackExists(settings);
|
||||
List<Supplier<DiscoveryNode>> nodes = new ArrayList<>(addresses.size());
|
||||
List<Tuple<String, Supplier<DiscoveryNode>>> nodes = new ArrayList<>(addresses.size());
|
||||
for (String address : addresses) {
|
||||
nodes.add(() -> buildSeedNode(clusterName, address, proxyMode));
|
||||
nodes.add(Tuple.tuple(address, () -> buildSeedNode(clusterName, address, proxyMode)));
|
||||
}
|
||||
return new Tuple<>(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).get(settings), nodes);
|
||||
}));
|
||||
|
@ -304,16 +305,24 @@ public abstract class RemoteClusterAware {
|
|||
(namespace, value) -> {});
|
||||
}
|
||||
|
||||
|
||||
protected static InetSocketAddress parseSeedAddress(String remoteHost) {
|
||||
String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost));
|
||||
static InetSocketAddress parseSeedAddress(String remoteHost) {
|
||||
final Tuple<String, Integer> hostPort = parseHostPort(remoteHost);
|
||||
final String host = hostPort.v1();
|
||||
assert hostPort.v2() != null : remoteHost;
|
||||
final int port = hostPort.v2();
|
||||
InetAddress hostAddress;
|
||||
try {
|
||||
hostAddress = InetAddress.getByName(host);
|
||||
} catch (UnknownHostException e) {
|
||||
throw new IllegalArgumentException("unknown host [" + host + "]", e);
|
||||
}
|
||||
return new InetSocketAddress(hostAddress, parsePort(remoteHost));
|
||||
return new InetSocketAddress(hostAddress, port);
|
||||
}
|
||||
|
||||
public static Tuple<String, Integer> parseHostPort(final String remoteHost) {
|
||||
final String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost));
|
||||
final int port = parsePort(remoteHost);
|
||||
return Tuple.tuple(host, port);
|
||||
}
|
||||
|
||||
private static int parsePort(String remoteHost) {
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.action.support.ContextPreservingActionListener;
|
|||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
|
@ -95,7 +96,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
|
|||
private final Predicate<DiscoveryNode> nodePredicate;
|
||||
private final ThreadPool threadPool;
|
||||
private volatile String proxyAddress;
|
||||
private volatile List<Supplier<DiscoveryNode>> seedNodes;
|
||||
private volatile List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes;
|
||||
private volatile boolean skipUnavailable;
|
||||
private final ConnectHandler connectHandler;
|
||||
private final TimeValue initialConnectionTimeout;
|
||||
|
@ -111,7 +112,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
|
|||
* @param nodePredicate a predicate to filter eligible remote nodes to connect to
|
||||
* @param proxyAddress the proxy address
|
||||
*/
|
||||
RemoteClusterConnection(Settings settings, String clusterAlias, List<Supplier<DiscoveryNode>> seedNodes,
|
||||
RemoteClusterConnection(Settings settings, String clusterAlias, List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes,
|
||||
TransportService transportService, int maxNumRemoteConnections, Predicate<DiscoveryNode> nodePredicate,
|
||||
String proxyAddress) {
|
||||
this(settings, clusterAlias, seedNodes, transportService, maxNumRemoteConnections, nodePredicate, proxyAddress,
|
||||
|
@ -119,7 +120,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
|
|||
}
|
||||
|
||||
// Public for tests to pass a StubbableConnectionManager
|
||||
RemoteClusterConnection(Settings settings, String clusterAlias, List<Supplier<DiscoveryNode>> seedNodes,
|
||||
RemoteClusterConnection(Settings settings, String clusterAlias, List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes,
|
||||
TransportService transportService, int maxNumRemoteConnections, Predicate<DiscoveryNode> nodePredicate,
|
||||
String proxyAddress, ConnectionManager connectionManager) {
|
||||
this.transportService = transportService;
|
||||
|
@ -155,7 +156,10 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
|
|||
/**
|
||||
* Updates the list of seed nodes for this cluster connection
|
||||
*/
|
||||
synchronized void updateSeedNodes(String proxyAddress, List<Supplier<DiscoveryNode>> seedNodes, ActionListener<Void> connectListener) {
|
||||
synchronized void updateSeedNodes(
|
||||
final String proxyAddress,
|
||||
final List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes,
|
||||
final ActionListener<Void> connectListener) {
|
||||
this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes));
|
||||
this.proxyAddress = proxyAddress;
|
||||
connectHandler.connect(connectListener);
|
||||
|
@ -465,7 +469,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
|
|||
maybeConnect();
|
||||
}
|
||||
});
|
||||
collectRemoteNodes(seedNodes.iterator(), transportService, connectionManager, listener);
|
||||
collectRemoteNodes(seedNodes.stream().map(Tuple::v2).iterator(), transportService, connectionManager, listener);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -672,10 +676,13 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos
|
|||
* Get the information about remote nodes to be rendered on {@code _remote/info} requests.
|
||||
*/
|
||||
public RemoteConnectionInfo getConnectionInfo() {
|
||||
List<TransportAddress> seedNodeAddresses = seedNodes.stream().map(node -> node.get().getAddress()).collect
|
||||
(Collectors.toList());
|
||||
return new RemoteConnectionInfo(clusterAlias, seedNodeAddresses, maxNumRemoteConnections, connectedNodes.size(),
|
||||
initialConnectionTimeout, skipUnavailable);
|
||||
return new RemoteConnectionInfo(
|
||||
clusterAlias,
|
||||
seedNodes.stream().map(Tuple::v1).collect(Collectors.toList()),
|
||||
maxNumRemoteConnections,
|
||||
connectedNodes.size(),
|
||||
initialConnectionTimeout,
|
||||
skipUnavailable);
|
||||
}
|
||||
|
||||
int getNumNodesConnected() {
|
||||
|
|
|
@ -201,7 +201,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
|||
* @param seeds a cluster alias to discovery node mapping representing the remote clusters seeds nodes
|
||||
* @param connectionListener a listener invoked once every configured cluster has been connected to
|
||||
*/
|
||||
private synchronized void updateRemoteClusters(Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> seeds,
|
||||
private synchronized void updateRemoteClusters(Map<String, Tuple<String, List<Tuple<String, Supplier<DiscoveryNode>>>>> seeds,
|
||||
ActionListener<Void> connectionListener) {
|
||||
if (seeds.containsKey(LOCAL_CLUSTER_GROUP_KEY)) {
|
||||
throw new IllegalArgumentException("remote clusters must not have the empty string as its key");
|
||||
|
@ -212,8 +212,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
|||
} else {
|
||||
CountDown countDown = new CountDown(seeds.size());
|
||||
remoteClusters.putAll(this.remoteClusters);
|
||||
for (Map.Entry<String, Tuple<String, List<Supplier<DiscoveryNode>>>> entry : seeds.entrySet()) {
|
||||
List<Supplier<DiscoveryNode>> seedList = entry.getValue().v2();
|
||||
for (Map.Entry<String, Tuple<String, List<Tuple<String, Supplier<DiscoveryNode>>>>> entry : seeds.entrySet()) {
|
||||
List<Tuple<String, Supplier<DiscoveryNode>>> seedList = entry.getValue().v2();
|
||||
String proxyAddress = entry.getValue().v1();
|
||||
|
||||
RemoteClusterConnection remote = this.remoteClusters.get(entry.getKey());
|
||||
|
@ -408,9 +408,10 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
|||
final List<String> addresses,
|
||||
final String proxyAddress,
|
||||
final ActionListener<Void> connectionListener) {
|
||||
final List<Supplier<DiscoveryNode>> nodes = addresses.stream().<Supplier<DiscoveryNode>>map(address -> () ->
|
||||
buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress))
|
||||
).collect(Collectors.toList());
|
||||
final List<Tuple<String, Supplier<DiscoveryNode>>> nodes =
|
||||
addresses.stream().<Tuple<String, Supplier<DiscoveryNode>>>map(address -> Tuple.tuple(address, () ->
|
||||
buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress)))
|
||||
).collect(Collectors.toList());
|
||||
updateRemoteClusters(Collections.singletonMap(clusterAlias, new Tuple<>(proxyAddress, nodes)), connectionListener);
|
||||
}
|
||||
|
||||
|
@ -421,7 +422,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
|||
void initializeRemoteClusters() {
|
||||
final TimeValue timeValue = REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings);
|
||||
final PlainActionFuture<Void> future = new PlainActionFuture<>();
|
||||
Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> seeds = RemoteClusterAware.buildRemoteClustersDynamicConfig(settings);
|
||||
Map<String, Tuple<String, List<Tuple<String, Supplier<DiscoveryNode>>>>> seeds =
|
||||
RemoteClusterAware.buildRemoteClustersDynamicConfig(settings);
|
||||
updateRemoteClusters(seeds, future);
|
||||
try {
|
||||
future.get(timeValue.millis(), TimeUnit.MILLISECONDS);
|
||||
|
|
|
@ -16,9 +16,11 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.transport;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
@ -27,25 +29,29 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
/**
|
||||
* This class encapsulates all remote cluster information to be rendered on
|
||||
* {@code _remote/info} requests.
|
||||
*/
|
||||
public final class RemoteConnectionInfo implements ToXContentFragment, Writeable {
|
||||
final List<TransportAddress> seedNodes;
|
||||
final List<String> seedNodes;
|
||||
final int connectionsPerCluster;
|
||||
final TimeValue initialConnectionTimeout;
|
||||
final int numNodesConnected;
|
||||
final String clusterAlias;
|
||||
final boolean skipUnavailable;
|
||||
|
||||
RemoteConnectionInfo(String clusterAlias, List<TransportAddress> seedNodes,
|
||||
RemoteConnectionInfo(String clusterAlias, List<String> seedNodes,
|
||||
int connectionsPerCluster, int numNodesConnected,
|
||||
TimeValue initialConnectionTimeout, boolean skipUnavailable) {
|
||||
this.clusterAlias = clusterAlias;
|
||||
|
@ -57,7 +63,17 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable
|
|||
}
|
||||
|
||||
public RemoteConnectionInfo(StreamInput input) throws IOException {
|
||||
seedNodes = input.readList(TransportAddress::new);
|
||||
if (input.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
seedNodes = Arrays.asList(input.readStringArray());
|
||||
} else {
|
||||
// versions prior to 7.0.0 sent the resolved transport address of the seed nodes
|
||||
final List<TransportAddress> transportAddresses = input.readList(TransportAddress::new);
|
||||
seedNodes =
|
||||
transportAddresses
|
||||
.stream()
|
||||
.map(a -> a.address().getHostString() + ":" + a.address().getPort())
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
if (input.getVersion().before(Version.V_7_0_0)) {
|
||||
/*
|
||||
* Versions before 7.0 sent the HTTP addresses of all nodes in the
|
||||
|
@ -78,7 +94,26 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeList(seedNodes);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeStringArray(seedNodes.toArray(new String[0]));
|
||||
} else {
|
||||
// versions prior to 7.0.0 received the resolved transport address of the seed nodes
|
||||
out.writeList(seedNodes
|
||||
.stream()
|
||||
.map(
|
||||
s -> {
|
||||
final Tuple<String, Integer> hostPort = RemoteClusterAware.parseHostPort(s);
|
||||
assert hostPort.v2() != null : s;
|
||||
try {
|
||||
return new TransportAddress(
|
||||
InetAddress.getByAddress(hostPort.v1(), TransportAddress.META_ADDRESS.getAddress()),
|
||||
hostPort.v2());
|
||||
} catch (final UnknownHostException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
})
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
/*
|
||||
* Versions before 7.0 sent the HTTP addresses of all nodes in the
|
||||
|
@ -104,8 +139,8 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable
|
|||
builder.startObject(clusterAlias);
|
||||
{
|
||||
builder.startArray("seeds");
|
||||
for (TransportAddress addr : seedNodes) {
|
||||
builder.value(addr.toString());
|
||||
for (String addr : seedNodes) {
|
||||
builder.value(addr);
|
||||
}
|
||||
builder.endArray();
|
||||
builder.field("connected", numNodesConnected > 0);
|
||||
|
@ -136,4 +171,5 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable
|
|||
return Objects.hash(seedNodes, connectionsPerCluster, initialConnectionTimeout,
|
||||
numNodesConnected, clusterAlias, skipUnavailable);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.action.index.IndexRequest;
|
|||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.AutoCreateIndex;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ClusterStateApplier;
|
||||
|
@ -408,6 +409,57 @@ public class TransportBulkActionIngestTests extends ESTestCase {
|
|||
validateDefaultPipeline(new IndexRequest(WITH_DEFAULT_PIPELINE_ALIAS, "type", "id"));
|
||||
}
|
||||
|
||||
public void testUseDefaultPipelineWithBulkUpsert() throws Exception {
|
||||
Exception exception = new Exception("fake exception");
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
IndexRequest indexRequest1 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id1").source(Collections.emptyMap());
|
||||
IndexRequest indexRequest2 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id2").source(Collections.emptyMap());
|
||||
IndexRequest indexRequest3 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id3").source(Collections.emptyMap());
|
||||
UpdateRequest upsertRequest = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id1").upsert(indexRequest1).script(mockScript("1"));
|
||||
UpdateRequest docAsUpsertRequest = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id2").doc(indexRequest2).docAsUpsert(true);
|
||||
// this test only covers the mechanics that scripted bulk upserts will execute a default pipeline. However, in practice scripted
|
||||
// bulk upserts with a default pipeline are a bit surprising since the script executes AFTER the pipeline.
|
||||
UpdateRequest scriptedUpsert = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id2").upsert(indexRequest3).script(mockScript("1"))
|
||||
.scriptedUpsert(true);
|
||||
bulkRequest.add(upsertRequest).add(docAsUpsertRequest).add(scriptedUpsert);
|
||||
|
||||
AtomicBoolean responseCalled = new AtomicBoolean(false);
|
||||
AtomicBoolean failureCalled = new AtomicBoolean(false);
|
||||
assertNull(indexRequest1.getPipeline());
|
||||
assertNull(indexRequest2.getPipeline());
|
||||
assertNull(indexRequest3.getPipeline());
|
||||
action.execute(null, bulkRequest, ActionListener.wrap(
|
||||
response -> {
|
||||
BulkItemResponse itemResponse = response.iterator().next();
|
||||
assertThat(itemResponse.getFailure().getMessage(), containsString("fake exception"));
|
||||
responseCalled.set(true);
|
||||
},
|
||||
e -> {
|
||||
assertThat(e, sameInstance(exception));
|
||||
failureCalled.set(true);
|
||||
}));
|
||||
|
||||
// check failure works, and passes through to the listener
|
||||
assertFalse(action.isExecuted); // haven't executed yet
|
||||
assertFalse(responseCalled.get());
|
||||
assertFalse(failureCalled.get());
|
||||
verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any());
|
||||
assertEquals(indexRequest1.getPipeline(), "default_pipeline");
|
||||
assertEquals(indexRequest2.getPipeline(), "default_pipeline");
|
||||
assertEquals(indexRequest3.getPipeline(), "default_pipeline");
|
||||
completionHandler.getValue().accept(exception);
|
||||
assertTrue(failureCalled.get());
|
||||
|
||||
// now check success of the transport bulk action
|
||||
indexRequest1.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing
|
||||
indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing
|
||||
indexRequest3.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing
|
||||
completionHandler.getValue().accept(null);
|
||||
assertTrue(action.isExecuted);
|
||||
assertFalse(responseCalled.get()); // listener would only be called by real index action, not our mocked one
|
||||
verifyZeroInteractions(transportService);
|
||||
}
|
||||
|
||||
public void testCreateIndexBeforeRunPipeline() throws Exception {
|
||||
Exception exception = new Exception("fake exception");
|
||||
IndexRequest indexRequest = new IndexRequest("missing_index", "type", "id");
|
||||
|
@ -445,6 +497,7 @@ public class TransportBulkActionIngestTests extends ESTestCase {
|
|||
indexRequest.source(Collections.emptyMap());
|
||||
AtomicBoolean responseCalled = new AtomicBoolean(false);
|
||||
AtomicBoolean failureCalled = new AtomicBoolean(false);
|
||||
assertNull(indexRequest.getPipeline());
|
||||
singleItemBulkWriteAction.execute(null, indexRequest, ActionListener.wrap(
|
||||
response -> {
|
||||
responseCalled.set(true);
|
||||
|
@ -459,6 +512,7 @@ public class TransportBulkActionIngestTests extends ESTestCase {
|
|||
assertFalse(responseCalled.get());
|
||||
assertFalse(failureCalled.get());
|
||||
verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any());
|
||||
assertEquals(indexRequest.getPipeline(), "default_pipeline");
|
||||
completionHandler.getValue().accept(exception);
|
||||
assertTrue(failureCalled.get());
|
||||
|
||||
|
|
|
@ -23,8 +23,10 @@ import org.elasticsearch.action.ActionListener;
|
|||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.bulk.TransportBulkActionTookTests.Resolver;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.AutoCreateIndex;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
@ -132,4 +134,23 @@ public class TransportBulkActionTests extends ESTestCase {
|
|||
throw new AssertionError(exception);
|
||||
}));
|
||||
}
|
||||
|
||||
public void testGetIndexWriteRequest() throws Exception {
|
||||
IndexRequest indexRequest = new IndexRequest("index", "type", "id1").source(Collections.emptyMap());
|
||||
UpdateRequest upsertRequest = new UpdateRequest("index", "type", "id1").upsert(indexRequest).script(mockScript("1"));
|
||||
UpdateRequest docAsUpsertRequest = new UpdateRequest("index", "type", "id2").doc(indexRequest).docAsUpsert(true);
|
||||
UpdateRequest scriptedUpsert = new UpdateRequest("index", "type", "id2").upsert(indexRequest).script(mockScript("1"))
|
||||
.scriptedUpsert(true);
|
||||
|
||||
assertEquals(TransportBulkAction.getIndexWriteRequest(indexRequest), indexRequest);
|
||||
assertEquals(TransportBulkAction.getIndexWriteRequest(upsertRequest), indexRequest);
|
||||
assertEquals(TransportBulkAction.getIndexWriteRequest(docAsUpsertRequest), indexRequest);
|
||||
assertEquals(TransportBulkAction.getIndexWriteRequest(scriptedUpsert), indexRequest);
|
||||
|
||||
DeleteRequest deleteRequest = new DeleteRequest("index", "id");
|
||||
assertNull(TransportBulkAction.getIndexWriteRequest(deleteRequest));
|
||||
|
||||
UpdateRequest badUpsertRequest = new UpdateRequest("index", "type", "id1");
|
||||
assertNull(TransportBulkAction.getIndexWriteRequest(badUpsertRequest));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,7 +65,7 @@ public class ExplainResponseTests extends AbstractStreamableXContentTestCase<Exp
|
|||
GetResult getResult = new GetResult(randomAlphaOfLengthBetween(3, 10),
|
||||
randomAlphaOfLengthBetween(3, 10),
|
||||
randomAlphaOfLengthBetween(3, 10),
|
||||
randomNonNegativeLong(),
|
||||
0, 1, randomNonNegativeLong(),
|
||||
true,
|
||||
RandomObjects.randomSource(random()),
|
||||
singletonMap(fieldName, new DocumentField(fieldName, values)));
|
||||
|
@ -83,7 +83,7 @@ public class ExplainResponseTests extends AbstractStreamableXContentTestCase<Exp
|
|||
String id = "1";
|
||||
boolean exist = true;
|
||||
Explanation explanation = Explanation.match(1.0f, "description", Collections.emptySet());
|
||||
GetResult getResult = new GetResult(null, null, null, -1, true, new BytesArray("{ \"field1\" : " +
|
||||
GetResult getResult = new GetResult(null, null, null, 0, 1, -1, true, new BytesArray("{ \"field1\" : " +
|
||||
"\"value1\", \"field2\":\"value2\"}"), singletonMap("field1", new DocumentField("field1",
|
||||
singletonList("value1"))));
|
||||
ExplainResponse response = new ExplainResponse(index, type, id, exist, explanation, getResult);
|
||||
|
@ -105,6 +105,8 @@ public class ExplainResponseTests extends AbstractStreamableXContentTestCase<Exp
|
|||
" \"details\":[]\n" +
|
||||
" },\n" +
|
||||
" \"get\":{\n" +
|
||||
" \"_seq_no\":0," +
|
||||
" \"_primary_term\":1," +
|
||||
" \"found\":true,\n" +
|
||||
" \"_source\":{\n" +
|
||||
" \"field1\":\"value1\",\n" +
|
||||
|
|
|
@ -40,6 +40,7 @@ public class GetRequestTests extends ESTestCase {
|
|||
final ActionRequestValidationException validate = request.validate();
|
||||
|
||||
assertThat(validate, not(nullValue()));
|
||||
assertEquals(2, validate.validationErrors().size());
|
||||
assertThat(validate.validationErrors(), hasItems("type is missing", "id is missing"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,6 +39,7 @@ import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
|||
import static org.elasticsearch.index.get.GetResultTests.copyGetResult;
|
||||
import static org.elasticsearch.index.get.GetResultTests.mutateGetResult;
|
||||
import static org.elasticsearch.index.get.GetResultTests.randomGetResult;
|
||||
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||
|
@ -91,26 +92,28 @@ public class GetResponseTests extends ESTestCase {
|
|||
|
||||
public void testToXContent() {
|
||||
{
|
||||
GetResponse getResponse = new GetResponse(new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " +
|
||||
GetResponse getResponse = new GetResponse(new GetResult("index", "type", "id", 0, 1, 1, true, new BytesArray("{ \"field1\" : " +
|
||||
"\"value1\", \"field2\":\"value2\"}"), Collections.singletonMap("field1", new DocumentField("field1",
|
||||
Collections.singletonList("value1")))));
|
||||
String output = Strings.toString(getResponse);
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" " +
|
||||
": \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", output);
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," +
|
||||
"\"found\":true,\"_source\":{ \"field1\" : \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}",
|
||||
output);
|
||||
}
|
||||
{
|
||||
GetResponse getResponse = new GetResponse(new GetResult("index", "type", "id", 1, false, null, null));
|
||||
GetResponse getResponse = new GetResponse(new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null));
|
||||
String output = Strings.toString(getResponse);
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"found\":false}", output);
|
||||
}
|
||||
}
|
||||
|
||||
public void testToString() {
|
||||
GetResponse getResponse = new GetResponse(
|
||||
new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " + "\"value1\", \"field2\":\"value2\"}"),
|
||||
GetResponse getResponse = new GetResponse(new GetResult("index", "type", "id", 0, 1, 1, true,
|
||||
new BytesArray("{ \"field1\" : " + "\"value1\", \"field2\":\"value2\"}"),
|
||||
Collections.singletonMap("field1", new DocumentField("field1", Collections.singletonList("value1")))));
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" "
|
||||
+ ": \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", getResponse.toString());
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," +
|
||||
"\"found\":true,\"_source\":{ \"field1\" : \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}",
|
||||
getResponse.toString());
|
||||
}
|
||||
|
||||
public void testEqualsAndHashcode() {
|
||||
|
@ -119,7 +122,8 @@ public class GetResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testFromXContentThrowsParsingException() throws IOException {
|
||||
GetResponse getResponse = new GetResponse(new GetResult(null, null, null, randomIntBetween(1, 5), randomBoolean(), null, null));
|
||||
GetResponse getResponse =
|
||||
new GetResponse(new GetResult(null, null, null, UNASSIGNED_SEQ_NO, 0, randomIntBetween(1, 5), randomBoolean(), null, null));
|
||||
|
||||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
BytesReference originalBytes = toShuffledXContent(getResponse, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean());
|
||||
|
|
|
@ -70,7 +70,7 @@ public class MultiGetResponseTests extends ESTestCase {
|
|||
for (int i = 0; i < items.length; i++) {
|
||||
if (randomBoolean()) {
|
||||
items[i] = new MultiGetItemResponse(new GetResponse(new GetResult(
|
||||
randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4), randomNonNegativeLong(),
|
||||
randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4), 0, 1, randomNonNegativeLong(),
|
||||
true, null, null
|
||||
)), null);
|
||||
} else {
|
||||
|
|
|
@ -60,6 +60,7 @@ import java.util.function.Function;
|
|||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
||||
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
import static org.elasticsearch.script.MockScriptEngine.mockInlineScript;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||
import static org.hamcrest.CoreMatchers.hasItems;
|
||||
|
@ -359,7 +360,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
.scriptedUpsert(true);
|
||||
long nowInMillis = randomNonNegativeLong();
|
||||
// We simulate that the document is not existing yet
|
||||
GetResult getResult = new GetResult("test", "type1", "2", 0, false, null, null);
|
||||
GetResult getResult = new GetResult("test", "type1", "2", UNASSIGNED_SEQ_NO, 0, 0, false, null, null);
|
||||
UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> nowInMillis);
|
||||
Streamable action = result.action();
|
||||
assertThat(action, instanceOf(IndexRequest.class));
|
||||
|
@ -372,7 +373,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
.script(mockInlineScript("ctx._timestamp = ctx._now"))
|
||||
.scriptedUpsert(true);
|
||||
// We simulate that the document is not existing yet
|
||||
GetResult getResult = new GetResult("test", "type1", "2", 0, true, new BytesArray("{}"), null);
|
||||
GetResult getResult = new GetResult("test", "type1", "2", 0, 1, 0, true, new BytesArray("{}"), null);
|
||||
UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> 42L);
|
||||
Streamable action = result.action();
|
||||
assertThat(action, instanceOf(IndexRequest.class));
|
||||
|
@ -381,7 +382,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
|
||||
public void testIndexTimeout() {
|
||||
final GetResult getResult =
|
||||
new GetResult("test", "type", "1", 0, true, new BytesArray("{\"f\":\"v\"}"), null);
|
||||
new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null);
|
||||
final UpdateRequest updateRequest =
|
||||
new UpdateRequest("test", "type", "1")
|
||||
.script(mockInlineScript("return"))
|
||||
|
@ -391,7 +392,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
|
||||
public void testDeleteTimeout() {
|
||||
final GetResult getResult =
|
||||
new GetResult("test", "type", "1", 0, true, new BytesArray("{\"f\":\"v\"}"), null);
|
||||
new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null);
|
||||
final UpdateRequest updateRequest =
|
||||
new UpdateRequest("test", "type", "1")
|
||||
.script(mockInlineScript("ctx.op = delete"))
|
||||
|
@ -402,7 +403,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
public void testUpsertTimeout() throws IOException {
|
||||
final boolean exists = randomBoolean();
|
||||
final BytesReference source = exists ? new BytesArray("{\"f\":\"v\"}") : null;
|
||||
final GetResult getResult = new GetResult("test", "type", "1", 0, exists, source, null);
|
||||
final GetResult getResult = new GetResult("test", "type", "1", UNASSIGNED_SEQ_NO, 0, 0, exists, source, null);
|
||||
final XContentBuilder sourceBuilder = jsonBuilder();
|
||||
sourceBuilder.startObject();
|
||||
{
|
||||
|
@ -535,7 +536,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRoutingExtraction() throws Exception {
|
||||
GetResult getResult = new GetResult("test", "type", "1", 0, false, null, null);
|
||||
GetResult getResult = new GetResult("test", "type", "1", UNASSIGNED_SEQ_NO, 0, 0, false, null, null);
|
||||
IndexRequest indexRequest = new IndexRequest("test", "type", "1");
|
||||
|
||||
// There is no routing and parent because the document doesn't exist
|
||||
|
@ -545,7 +546,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertNull(UpdateHelper.calculateRouting(getResult, indexRequest));
|
||||
|
||||
// Doc exists but has no source or fields
|
||||
getResult = new GetResult("test", "type", "1", 0, true, null, null);
|
||||
getResult = new GetResult("test", "type", "1", 0, 1, 0, true, null, null);
|
||||
|
||||
// There is no routing and parent on either request
|
||||
assertNull(UpdateHelper.calculateRouting(getResult, indexRequest));
|
||||
|
@ -554,7 +555,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
fields.put("_routing", new DocumentField("_routing", Collections.singletonList("routing1")));
|
||||
|
||||
// Doc exists and has the parent and routing fields
|
||||
getResult = new GetResult("test", "type", "1", 0, true, null, fields);
|
||||
getResult = new GetResult("test", "type", "1", 0, 1, 0, true, null, fields);
|
||||
|
||||
// Use the get result parent and routing
|
||||
assertThat(UpdateHelper.calculateRouting(getResult, indexRequest), equalTo("routing1"));
|
||||
|
@ -563,7 +564,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
@SuppressWarnings("deprecated") // VersionType.FORCE is deprecated
|
||||
public void testCalculateUpdateVersion() throws Exception {
|
||||
long randomVersion = randomIntBetween(0, 100);
|
||||
GetResult getResult = new GetResult("test", "type", "1", randomVersion, true, new BytesArray("{}"), null);
|
||||
GetResult getResult = new GetResult("test", "type", "1", 0, 1, randomVersion, true, new BytesArray("{}"), null);
|
||||
|
||||
UpdateRequest request = new UpdateRequest("test", "type1", "1");
|
||||
long version = UpdateHelper.calculateUpdateVersion(request, getResult);
|
||||
|
@ -580,7 +581,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
|
||||
public void testNoopDetection() throws Exception {
|
||||
ShardId shardId = new ShardId("test", "", 0);
|
||||
GetResult getResult = new GetResult("test", "type", "1", 0, true,
|
||||
GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true,
|
||||
new BytesArray("{\"body\": \"foo\"}"),
|
||||
null);
|
||||
|
||||
|
@ -611,7 +612,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
|
||||
public void testUpdateScript() throws Exception {
|
||||
ShardId shardId = new ShardId("test", "", 0);
|
||||
GetResult getResult = new GetResult("test", "type", "1", 0, true,
|
||||
GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true,
|
||||
new BytesArray("{\"body\": \"bar\"}"),
|
||||
null);
|
||||
|
||||
|
|
|
@ -74,11 +74,12 @@ public class UpdateResponseTests extends ESTestCase {
|
|||
|
||||
UpdateResponse updateResponse = new UpdateResponse(new ReplicationResponse.ShardInfo(3, 2),
|
||||
new ShardId("books", "books_uuid", 2), "book", "1", 7, 17, 2, UPDATED);
|
||||
updateResponse.setGetResult(new GetResult("books", "book", "1", 2, true, source, fields));
|
||||
updateResponse.setGetResult(new GetResult("books", "book", "1",0, 1, 2, true, source, fields));
|
||||
|
||||
String output = Strings.toString(updateResponse);
|
||||
assertEquals("{\"_index\":\"books\",\"_type\":\"book\",\"_id\":\"1\",\"_version\":2,\"result\":\"updated\"," +
|
||||
"\"_shards\":{\"total\":3,\"successful\":2,\"failed\":0},\"_seq_no\":7,\"_primary_term\":17,\"get\":{\"found\":true," +
|
||||
"\"_shards\":{\"total\":3,\"successful\":2,\"failed\":0},\"_seq_no\":7,\"_primary_term\":17,\"get\":{" +
|
||||
"\"_seq_no\":0,\"_primary_term\":1,\"found\":true," +
|
||||
"\"_source\":{\"title\":\"Book title\",\"isbn\":\"ABC-123\"},\"fields\":{\"isbn\":[\"ABC-123\"],\"title\":[\"Book " +
|
||||
"title\"]}}}", output);
|
||||
}
|
||||
|
|
|
@ -227,7 +227,7 @@ public class IndexMetaDataTests extends ESTestCase {
|
|||
assertEquals("the number of target shards (0) must be greater than the shard id: 0",
|
||||
expectThrows(IllegalArgumentException.class, () -> IndexMetaData.selectSplitShard(0, metaData, 0)).getMessage());
|
||||
|
||||
assertEquals("the number of source shards [2] must be a must be a factor of [3]",
|
||||
assertEquals("the number of source shards [2] must be a factor of [3]",
|
||||
expectThrows(IllegalArgumentException.class, () -> IndexMetaData.selectSplitShard(0, metaData, 3)).getMessage());
|
||||
|
||||
assertEquals("the number of routing shards [4] must be a multiple of the target shards [8]",
|
||||
|
@ -285,6 +285,6 @@ public class IndexMetaDataTests extends ESTestCase {
|
|||
Settings notAFactorySettings = Settings.builder().put("index.number_of_shards", 2).put("index.number_of_routing_shards", 3).build();
|
||||
iae = expectThrows(IllegalArgumentException.class,
|
||||
() -> IndexMetaData.INDEX_NUMBER_OF_ROUTING_SHARDS_SETTING.get(notAFactorySettings));
|
||||
assertEquals("the number of source shards [2] must be a must be a factor of [3]", iae.getMessage());
|
||||
assertEquals("the number of source shards [2] must be a factor of [3]", iae.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase {
|
|||
MetaDataCreateIndexService.validateShrinkIndex(state, "source", Collections.emptySet(), "target", targetSettings)
|
||||
|
||||
).getMessage());
|
||||
assertEquals("the number of source shards [8] must be a must be a multiple of [3]",
|
||||
assertEquals("the number of source shards [8] must be a multiple of [3]",
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
MetaDataCreateIndexService.validateShrinkIndex(createClusterState("source", 8, randomIntBetween(0, 10),
|
||||
Settings.builder().put("index.blocks.write", true).build()), "source", Collections.emptySet(), "target",
|
||||
|
@ -221,7 +221,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase {
|
|||
).getMessage());
|
||||
|
||||
|
||||
assertEquals("the number of source shards [3] must be a must be a factor of [4]",
|
||||
assertEquals("the number of source shards [3] must be a factor of [4]",
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
MetaDataCreateIndexService.validateSplitIndex(createClusterState("source", 3, randomIntBetween(0, 10),
|
||||
Settings.builder().put("index.blocks.write", true).build()), "source", Collections.emptySet(), "target",
|
||||
|
|
|
@ -531,24 +531,26 @@ public class LuceneTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public static Object randomSortValue() {
|
||||
switch(randomIntBetween(0, 8)) {
|
||||
switch(randomIntBetween(0, 9)) {
|
||||
case 0:
|
||||
return randomAlphaOfLengthBetween(3, 10);
|
||||
return null;
|
||||
case 1:
|
||||
return randomInt();
|
||||
return randomAlphaOfLengthBetween(3, 10);
|
||||
case 2:
|
||||
return randomLong();
|
||||
return randomInt();
|
||||
case 3:
|
||||
return randomFloat();
|
||||
return randomLong();
|
||||
case 4:
|
||||
return randomDouble();
|
||||
return randomFloat();
|
||||
case 5:
|
||||
return randomByte();
|
||||
return randomDouble();
|
||||
case 6:
|
||||
return randomShort();
|
||||
return randomByte();
|
||||
case 7:
|
||||
return randomBoolean();
|
||||
return randomShort();
|
||||
case 8:
|
||||
return randomBoolean();
|
||||
case 9:
|
||||
return new BytesRef(randomAlphaOfLengthBetween(3, 10));
|
||||
default:
|
||||
throw new UnsupportedOperationException();
|
||||
|
|
|
@ -55,7 +55,8 @@ public class EnvironmentTests extends ESTestCase {
|
|||
Environment environment = newEnvironment();
|
||||
assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue());
|
||||
assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue());
|
||||
environment = newEnvironment(Settings.builder().putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
|
||||
environment = newEnvironment(Settings.builder()
|
||||
.putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
|
||||
assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue());
|
||||
assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue());
|
||||
assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue());
|
||||
|
|
|
@ -352,7 +352,8 @@ public class NodeEnvironmentTests extends ESTestCase {
|
|||
for (int i = 0; i < iters; i++) {
|
||||
int shard = randomIntBetween(0, counts.length - 1);
|
||||
try {
|
||||
try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "fooUUID", shard), scaledRandomIntBetween(0, 10))) {
|
||||
try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "fooUUID", shard),
|
||||
scaledRandomIntBetween(0, 10))) {
|
||||
counts[shard].value++;
|
||||
countsAtomic[shard].incrementAndGet();
|
||||
assertEquals(flipFlop[shard].incrementAndGet(), 1);
|
||||
|
@ -386,7 +387,9 @@ public class NodeEnvironmentTests extends ESTestCase {
|
|||
|
||||
final Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "myindexUUID").build();
|
||||
IndexSettings s1 = IndexSettingsModule.newIndexSettings("myindex", indexSettings);
|
||||
IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder().put(indexSettings).put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build());
|
||||
IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder()
|
||||
.put(indexSettings)
|
||||
.put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build());
|
||||
Index index = new Index("myindex", "myindexUUID");
|
||||
ShardId sid = new ShardId(index, 0);
|
||||
|
||||
|
|
|
@ -44,6 +44,7 @@ import static java.util.Collections.singletonMap;
|
|||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||
import static org.elasticsearch.index.get.DocumentFieldTests.randomDocumentField;
|
||||
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||
|
||||
|
@ -72,15 +73,16 @@ public class GetResultTests extends ESTestCase {
|
|||
|
||||
public void testToXContent() throws IOException {
|
||||
{
|
||||
GetResult getResult = new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " +
|
||||
GetResult getResult = new GetResult("index", "type", "id", 0, 1, 1, true, new BytesArray("{ \"field1\" : " +
|
||||
"\"value1\", \"field2\":\"value2\"}"), singletonMap("field1", new DocumentField("field1",
|
||||
singletonList("value1"))));
|
||||
String output = Strings.toString(getResult);
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" " +
|
||||
": \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", output);
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," +
|
||||
"\"found\":true,\"_source\":{ \"field1\" : \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}",
|
||||
output);
|
||||
}
|
||||
{
|
||||
GetResult getResult = new GetResult("index", "type", "id", 1, false, null, null);
|
||||
GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null);
|
||||
String output = Strings.toString(getResult);
|
||||
assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"found\":false}", output);
|
||||
}
|
||||
|
@ -92,7 +94,7 @@ public class GetResultTests extends ESTestCase {
|
|||
GetResult getResult = tuple.v1();
|
||||
// We don't expect to retrieve the index/type/id of the GetResult because they are not rendered
|
||||
// by the toXContentEmbedded method.
|
||||
GetResult expectedGetResult = new GetResult(null, null, null, -1,
|
||||
GetResult expectedGetResult = new GetResult(null, null, null, tuple.v2().getSeqNo(), tuple.v2().getPrimaryTerm(), -1,
|
||||
tuple.v2().isExists(), tuple.v2().sourceRef(), tuple.v2().getFields());
|
||||
|
||||
boolean humanReadable = randomBoolean();
|
||||
|
@ -118,16 +120,16 @@ public class GetResultTests extends ESTestCase {
|
|||
fields.put("foo", new DocumentField("foo", singletonList("bar")));
|
||||
fields.put("baz", new DocumentField("baz", Arrays.asList("baz_0", "baz_1")));
|
||||
|
||||
GetResult getResult = new GetResult("index", "type", "id", 2, true,
|
||||
GetResult getResult = new GetResult("index", "type", "id", 0, 1, 2, true,
|
||||
new BytesArray("{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}"), fields);
|
||||
|
||||
BytesReference originalBytes = toXContentEmbedded(getResult, XContentType.JSON, false);
|
||||
assertEquals("{\"found\":true,\"_source\":{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}," +
|
||||
assertEquals("{\"_seq_no\":0,\"_primary_term\":1,\"found\":true,\"_source\":{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}," +
|
||||
"\"fields\":{\"foo\":[\"bar\"],\"baz\":[\"baz_0\",\"baz_1\"]}}", originalBytes.utf8ToString());
|
||||
}
|
||||
|
||||
public void testToXContentEmbeddedNotFound() throws IOException {
|
||||
GetResult getResult = new GetResult("index", "type", "id", 1, false, null, null);
|
||||
GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null);
|
||||
|
||||
BytesReference originalBytes = toXContentEmbedded(getResult, XContentType.JSON, false);
|
||||
assertEquals("{\"found\":false}", originalBytes.utf8ToString());
|
||||
|
@ -149,25 +151,34 @@ public class GetResultTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public static GetResult copyGetResult(GetResult getResult) {
|
||||
return new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields());
|
||||
return new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
|
||||
getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields());
|
||||
}
|
||||
|
||||
public static GetResult mutateGetResult(GetResult getResult) {
|
||||
List<Supplier<GetResult>> mutations = new ArrayList<>();
|
||||
mutations.add(() -> new GetResult(randomUnicodeOfLength(15), getResult.getType(), getResult.getId(), getResult.getVersion(),
|
||||
mutations.add(() -> new GetResult(randomUnicodeOfLength(15), getResult.getType(), getResult.getId(),
|
||||
getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), randomUnicodeOfLength(15), getResult.getId(), getResult.getVersion(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), randomUnicodeOfLength(15), getResult.getVersion(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), randomNonNegativeLong(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
|
||||
getResult.isExists() == false, getResult.internalSourceRef(), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
|
||||
getResult.isExists(), RandomObjects.randomSource(random()), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), randomUnicodeOfLength(15), getResult.getId(),
|
||||
getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), randomUnicodeOfLength(15),
|
||||
getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
|
||||
getResult.getSeqNo(), getResult.getPrimaryTerm(), randomNonNegativeLong(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
|
||||
getResult.isExists() ? UNASSIGNED_SEQ_NO : getResult.getSeqNo(),
|
||||
getResult.isExists() ? 0 : getResult.getPrimaryTerm(),
|
||||
getResult.getVersion(), getResult.isExists() == false, getResult.internalSourceRef(), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
|
||||
getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), getResult.isExists(),
|
||||
RandomObjects.randomSource(random()), getResult.getFields()));
|
||||
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
|
||||
getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
|
||||
getResult.isExists(), getResult.internalSourceRef(), randomDocumentFields(XContentType.JSON).v1()));
|
||||
return randomFrom(mutations).get();
|
||||
}
|
||||
|
@ -177,12 +188,16 @@ public class GetResultTests extends ESTestCase {
|
|||
final String type = randomAlphaOfLengthBetween(3, 10);
|
||||
final String id = randomAlphaOfLengthBetween(3, 10);
|
||||
final long version;
|
||||
final long seqNo;
|
||||
final long primaryTerm;
|
||||
final boolean exists;
|
||||
BytesReference source = null;
|
||||
Map<String, DocumentField> fields = null;
|
||||
Map<String, DocumentField> expectedFields = null;
|
||||
if (frequently()) {
|
||||
version = randomNonNegativeLong();
|
||||
seqNo = randomNonNegativeLong();
|
||||
primaryTerm = randomLongBetween(1, 100);
|
||||
exists = true;
|
||||
if (frequently()) {
|
||||
source = RandomObjects.randomSource(random());
|
||||
|
@ -193,11 +208,13 @@ public class GetResultTests extends ESTestCase {
|
|||
expectedFields = tuple.v2();
|
||||
}
|
||||
} else {
|
||||
seqNo = UNASSIGNED_SEQ_NO;
|
||||
primaryTerm = 0;
|
||||
version = -1;
|
||||
exists = false;
|
||||
}
|
||||
GetResult getResult = new GetResult(index, type, id, version, exists, source, fields);
|
||||
GetResult expectedGetResult = new GetResult(index, type, id, version, exists, source, expectedFields);
|
||||
GetResult getResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, fields);
|
||||
GetResult expectedGetResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, expectedFields);
|
||||
return Tuple.tuple(getResult, expectedGetResult);
|
||||
}
|
||||
|
||||
|
|
|
@ -131,7 +131,8 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
} catch (IOException ex) {
|
||||
throw new ElasticsearchException("boom", ex);
|
||||
}
|
||||
return new GetResponse(new GetResult(indexedShapeIndex, indexedShapeType, indexedShapeId, 0, true, new BytesArray(json), null));
|
||||
return new GetResponse(new GetResult(indexedShapeIndex, indexedShapeType, indexedShapeId, 0, 1, 0, true, new BytesArray(json),
|
||||
null));
|
||||
}
|
||||
|
||||
@After
|
||||
|
|
|
@ -201,7 +201,8 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
} catch (IOException ex) {
|
||||
throw new ElasticsearchException("boom", ex);
|
||||
}
|
||||
return new GetResponse(new GetResult(getRequest.index(), getRequest.type(), getRequest.id(), 0, true, new BytesArray(json), null));
|
||||
return new GetResponse(new GetResult(getRequest.index(), getRequest.type(), getRequest.id(), 0, 1, 0, true,
|
||||
new BytesArray(json), null));
|
||||
}
|
||||
|
||||
public void testNumeric() throws IOException {
|
||||
|
|
|
@ -62,6 +62,7 @@ import org.elasticsearch.test.ESIntegTestCase;
|
|||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -1007,6 +1008,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
assertEquals(total, shardTotal);
|
||||
}
|
||||
|
||||
@TestLogging("_root:DEBUG") // this fails at a very low rate on CI: https://github.com/elastic/elasticsearch/issues/32506
|
||||
public void testFilterCacheStats() throws Exception {
|
||||
Settings settings = Settings.builder().put(indexSettings()).put("number_of_replicas", 0).build();
|
||||
assertAcked(prepareCreate("index").setSettings(settings).get());
|
||||
|
|
|
@ -129,6 +129,35 @@ public class CompoundProcessorTests extends ESTestCase {
|
|||
assertThat(processor2.getInvokedCounter(), equalTo(1));
|
||||
}
|
||||
|
||||
public void testSingleProcessorWithOnFailureDropProcessor() throws Exception {
|
||||
TestProcessor processor1 = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");});
|
||||
Processor processor2 = new Processor() {
|
||||
@Override
|
||||
public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
|
||||
//Simulates the drop processor
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "drop";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTag() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
|
||||
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
|
||||
CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor1),
|
||||
Collections.singletonList(processor2), relativeTimeProvider);
|
||||
assertNull(compoundProcessor.execute(ingestDocument));
|
||||
assertThat(processor1.getInvokedCounter(), equalTo(1));
|
||||
assertStats(compoundProcessor, 1, 1, 0);
|
||||
}
|
||||
|
||||
public void testSingleProcessorWithNestedFailures() throws Exception {
|
||||
TestProcessor processor = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");});
|
||||
TestProcessor processorToFail = new TestProcessor("id2", "second", ingestDocument -> {
|
||||
|
|
|
@ -76,14 +76,16 @@ public class FullRollingRestartIT extends ESIntegTestCase {
|
|||
internalCluster().startNode(settings);
|
||||
|
||||
// make sure the cluster state is green, and all has been recovered
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3"));
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
|
||||
.setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3"));
|
||||
|
||||
logger.info("--> add two more nodes");
|
||||
internalCluster().startNode(settings);
|
||||
internalCluster().startNode(settings);
|
||||
|
||||
// make sure the cluster state is green, and all has been recovered
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("5"));
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
|
||||
.setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("5"));
|
||||
|
||||
logger.info("--> refreshing and checking data");
|
||||
refresh();
|
||||
|
@ -94,11 +96,13 @@ public class FullRollingRestartIT extends ESIntegTestCase {
|
|||
// now start shutting nodes down
|
||||
internalCluster().stopRandomDataNode();
|
||||
// make sure the cluster state is green, and all has been recovered
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("4"));
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
|
||||
.setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("4"));
|
||||
|
||||
internalCluster().stopRandomDataNode();
|
||||
// make sure the cluster state is green, and all has been recovered
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3"));
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
|
||||
.setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3"));
|
||||
|
||||
logger.info("--> stopped two nodes, verifying data");
|
||||
refresh();
|
||||
|
@ -109,12 +113,14 @@ public class FullRollingRestartIT extends ESIntegTestCase {
|
|||
// closing the 3rd node
|
||||
internalCluster().stopRandomDataNode();
|
||||
// make sure the cluster state is green, and all has been recovered
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("2"));
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
|
||||
.setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("2"));
|
||||
|
||||
internalCluster().stopRandomDataNode();
|
||||
|
||||
// make sure the cluster state is yellow, and all has been recovered
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForYellowStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("1"));
|
||||
assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout)
|
||||
.setWaitForYellowStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("1"));
|
||||
|
||||
logger.info("--> one node left, verifying data");
|
||||
refresh();
|
||||
|
@ -133,7 +139,9 @@ public class FullRollingRestartIT extends ESIntegTestCase {
|
|||
* to relocating to the restarting node since all had 2 shards and now one node has nothing allocated.
|
||||
* We have a fix for this to wait until we have allocated unallocated shards now so this shouldn't happen.
|
||||
*/
|
||||
prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6").put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0").put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get();
|
||||
prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6")
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0")
|
||||
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get();
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
client().prepareIndex("test", "type1", Long.toString(i))
|
||||
|
@ -152,7 +160,8 @@ public class FullRollingRestartIT extends ESIntegTestCase {
|
|||
|
||||
recoveryResponse = client().admin().indices().prepareRecoveries("test").get();
|
||||
for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) {
|
||||
assertTrue("relocated from: " + recoveryState.getSourceNode() + " to: " + recoveryState.getTargetNode()+ "-- \nbefore: \n" + state,
|
||||
assertTrue("relocated from: " + recoveryState.getSourceNode() + " to: " +
|
||||
recoveryState.getTargetNode()+ "-- \nbefore: \n" + state,
|
||||
recoveryState.getRecoverySource().getType() != RecoverySource.Type.PEER || recoveryState.getPrimary() == false);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,14 +53,18 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllS
|
|||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
||||
|
||||
@TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.cluster.service:TRACE,org.elasticsearch.index.seqno:TRACE,org.elasticsearch.indices.recovery:TRACE")
|
||||
@TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.cluster.service:TRACE," +
|
||||
"org.elasticsearch.index.seqno:TRACE,org.elasticsearch.indices.recovery:TRACE")
|
||||
public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
||||
private final Logger logger = LogManager.getLogger(RecoveryWhileUnderLoadIT.class);
|
||||
|
||||
public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception {
|
||||
logger.info("--> creating test index ...");
|
||||
int numberOfShards = numberOfShards();
|
||||
assertAcked(prepareCreate("test", 1, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
|
||||
assertAcked(prepareCreate("test", 1, Settings.builder()
|
||||
.put(SETTING_NUMBER_OF_SHARDS, numberOfShards)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
|
||||
|
||||
final int totalNumDocs = scaledRandomIntBetween(200, 10000);
|
||||
int waitFor = totalNumDocs / 10;
|
||||
|
@ -92,7 +96,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
|
||||
logger.info("--> waiting for GREEN health status ...");
|
||||
// make sure the cluster state is green, and all has been recovered
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus());
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth()
|
||||
.setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus());
|
||||
|
||||
logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs);
|
||||
waitForDocs(totalNumDocs, indexer);
|
||||
|
@ -113,7 +118,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
public void testRecoverWhileUnderLoadAllocateReplicasRelocatePrimariesTest() throws Exception {
|
||||
logger.info("--> creating test index ...");
|
||||
int numberOfShards = numberOfShards();
|
||||
assertAcked(prepareCreate("test", 1, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
|
||||
assertAcked(prepareCreate("test", 1, Settings.builder()
|
||||
.put(SETTING_NUMBER_OF_SHARDS, numberOfShards)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
|
||||
|
||||
final int totalNumDocs = scaledRandomIntBetween(200, 10000);
|
||||
int waitFor = totalNumDocs / 10;
|
||||
|
@ -142,7 +150,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
allowNodes("test", 4);
|
||||
|
||||
logger.info("--> waiting for GREEN health status ...");
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus());
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth()
|
||||
.setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus());
|
||||
|
||||
|
||||
logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs);
|
||||
|
@ -164,7 +173,9 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
public void testRecoverWhileUnderLoadWithReducedAllowedNodes() throws Exception {
|
||||
logger.info("--> creating test index ...");
|
||||
int numberOfShards = numberOfShards();
|
||||
assertAcked(prepareCreate("test", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
|
||||
assertAcked(prepareCreate("test", 2, Settings.builder()
|
||||
.put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
|
||||
|
||||
final int totalNumDocs = scaledRandomIntBetween(200, 10000);
|
||||
int waitFor = totalNumDocs / 10;
|
||||
|
@ -194,7 +205,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
allowNodes("test", 4);
|
||||
|
||||
logger.info("--> waiting for GREEN health status ...");
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus().setWaitForNoRelocatingShards(true));
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth()
|
||||
.setWaitForEvents(Priority.LANGUID).setTimeout("5m")
|
||||
.setWaitForGreenStatus()
|
||||
.setWaitForNoRelocatingShards(true));
|
||||
|
||||
logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs);
|
||||
waitForDocs(totalNumDocs, indexer);
|
||||
|
@ -205,23 +219,31 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
logger.info("--> allow 3 nodes for index [test] ...");
|
||||
allowNodes("test", 3);
|
||||
logger.info("--> waiting for relocations ...");
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true));
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth()
|
||||
.setWaitForEvents(Priority.LANGUID).setTimeout("5m")
|
||||
.setWaitForNoRelocatingShards(true));
|
||||
|
||||
logger.info("--> allow 2 nodes for index [test] ...");
|
||||
allowNodes("test", 2);
|
||||
logger.info("--> waiting for relocations ...");
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true));
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth()
|
||||
.setWaitForEvents(Priority.LANGUID).setTimeout("5m")
|
||||
.setWaitForNoRelocatingShards(true));
|
||||
|
||||
logger.info("--> allow 1 nodes for index [test] ...");
|
||||
allowNodes("test", 1);
|
||||
logger.info("--> waiting for relocations ...");
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true));
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth()
|
||||
.setWaitForEvents(Priority.LANGUID).setTimeout("5m")
|
||||
.setWaitForNoRelocatingShards(true));
|
||||
|
||||
logger.info("--> marking and waiting for indexing threads to stop ...");
|
||||
indexer.stop();
|
||||
logger.info("--> indexing threads stopped");
|
||||
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true));
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth()
|
||||
.setWaitForEvents(Priority.LANGUID).setTimeout("5m")
|
||||
.setWaitForNoRelocatingShards(true));
|
||||
|
||||
logger.info("--> refreshing the index");
|
||||
refreshAndAssert();
|
||||
|
@ -235,7 +257,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
final int numReplicas = 0;
|
||||
logger.info("--> creating test index ...");
|
||||
int allowNodes = 2;
|
||||
assertAcked(prepareCreate("test", 3, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
|
||||
assertAcked(prepareCreate("test", 3, Settings.builder()
|
||||
.put(SETTING_NUMBER_OF_SHARDS, numShards)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, numReplicas)
|
||||
.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)));
|
||||
|
||||
final int numDocs = scaledRandomIntBetween(200, 9999);
|
||||
|
||||
|
@ -258,7 +283,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
logger.info("--> indexing threads stopped");
|
||||
logger.info("--> bump up number of replicas to 1 and allow all nodes to hold the index");
|
||||
allowNodes("test", 3);
|
||||
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("number_of_replicas", 1)).get());
|
||||
assertAcked(client().admin().indices().prepareUpdateSettings("test")
|
||||
.setSettings(Settings.builder().put("number_of_replicas", 1)).get());
|
||||
ensureGreen(TimeValue.timeValueMinutes(5));
|
||||
|
||||
logger.info("--> refreshing the index");
|
||||
|
@ -273,7 +299,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
SearchResponse[] iterationResults = new SearchResponse[iterations];
|
||||
boolean error = false;
|
||||
for (int i = 0; i < iterations; i++) {
|
||||
SearchResponse searchResponse = client().prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).addSort("id", SortOrder.ASC).get();
|
||||
SearchResponse searchResponse = client().prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery())
|
||||
.addSort("id", SortOrder.ASC).get();
|
||||
logSearchResponse(numberOfShards, numberOfDocs, i, searchResponse);
|
||||
iterationResults[i] = searchResponse;
|
||||
if (searchResponse.getHits().getTotalHits().value != numberOfDocs) {
|
||||
|
@ -286,7 +313,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats().get();
|
||||
for (ShardStats shardStats : indicesStatsResponse.getShards()) {
|
||||
DocsStats docsStats = shardStats.getStats().docs;
|
||||
logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(), shardStats.getShardRouting().primary());
|
||||
logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(),
|
||||
shardStats.getShardRouting().primary());
|
||||
}
|
||||
|
||||
ClusterService clusterService = clusterService();
|
||||
|
@ -332,12 +360,14 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
private void logSearchResponse(int numberOfShards, long numberOfDocs, int iteration, SearchResponse searchResponse) {
|
||||
logger.info("iteration [{}] - successful shards: {} (expected {})", iteration, searchResponse.getSuccessfulShards(), numberOfShards);
|
||||
logger.info("iteration [{}] - successful shards: {} (expected {})", iteration,
|
||||
searchResponse.getSuccessfulShards(), numberOfShards);
|
||||
logger.info("iteration [{}] - failed shards: {} (expected 0)", iteration, searchResponse.getFailedShards());
|
||||
if (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) {
|
||||
logger.info("iteration [{}] - shard failures: {}", iteration, Arrays.toString(searchResponse.getShardFailures()));
|
||||
}
|
||||
logger.info("iteration [{}] - returned documents: {} (expected {})", iteration, searchResponse.getHits().getTotalHits().value, numberOfDocs);
|
||||
logger.info("iteration [{}] - returned documents: {} (expected {})", iteration,
|
||||
searchResponse.getHits().getTotalHits().value, numberOfDocs);
|
||||
}
|
||||
|
||||
private void refreshAndAssert() throws Exception {
|
||||
|
|
|
@ -133,7 +133,8 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
|
||||
logger.info("--> start another node");
|
||||
final String node_2 = internalCluster().startNode();
|
||||
ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("2").execute().actionGet();
|
||||
ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID)
|
||||
.setWaitForNodes("2").execute().actionGet();
|
||||
assertThat(clusterHealthResponse.isTimedOut(), equalTo(false));
|
||||
|
||||
logger.info("--> relocate the shard from node1 to node2");
|
||||
|
@ -141,7 +142,8 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
.add(new MoveAllocationCommand("test", 0, node_1, node_2))
|
||||
.execute().actionGet();
|
||||
|
||||
clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet();
|
||||
clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID)
|
||||
.setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet();
|
||||
assertThat(clusterHealthResponse.isTimedOut(), equalTo(false));
|
||||
|
||||
logger.info("--> verifying count again...");
|
||||
|
@ -155,7 +157,8 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
int numberOfReplicas = randomBoolean() ? 0 : 1;
|
||||
int numberOfNodes = numberOfReplicas == 0 ? 2 : 3;
|
||||
|
||||
logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", numberOfRelocations, numberOfReplicas, numberOfNodes);
|
||||
logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})",
|
||||
numberOfRelocations, numberOfReplicas, numberOfNodes);
|
||||
|
||||
String[] nodes = new String[numberOfNodes];
|
||||
logger.info("--> starting [node1] ...");
|
||||
|
@ -172,8 +175,10 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
logger.info("--> starting [node{}] ...", i);
|
||||
nodes[i - 1] = internalCluster().startNode();
|
||||
if (i != numberOfNodes) {
|
||||
ClusterHealthResponse healthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID)
|
||||
.setWaitForNodes(Integer.toString(i)).setWaitForGreenStatus().execute().actionGet();
|
||||
ClusterHealthResponse healthResponse = client().admin().cluster().prepareHealth()
|
||||
.setWaitForEvents(Priority.LANGUID)
|
||||
.setWaitForNodes(Integer.toString(i))
|
||||
.setWaitForGreenStatus().execute().actionGet();
|
||||
assertThat(healthResponse.isTimedOut(), equalTo(false));
|
||||
}
|
||||
}
|
||||
|
@ -202,7 +207,10 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
logger.debug("--> flushing");
|
||||
client().admin().indices().prepareFlush().get();
|
||||
}
|
||||
ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet();
|
||||
ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth()
|
||||
.setWaitForEvents(Priority.LANGUID)
|
||||
.setWaitForNoRelocatingShards(true)
|
||||
.setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet();
|
||||
assertThat(clusterHealthResponse.isTimedOut(), equalTo(false));
|
||||
indexer.pauseIndexing();
|
||||
logger.info("--> DONE relocate the shard from {} to {}", fromNode, toNode);
|
||||
|
@ -218,7 +226,8 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
boolean ranOnce = false;
|
||||
for (int i = 0; i < 10; i++) {
|
||||
logger.info("--> START search test round {}", i + 1);
|
||||
SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields().execute().actionGet().getHits();
|
||||
SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.setSize((int) indexer.totalIndexedDocs()).storedFields().execute().actionGet().getHits();
|
||||
ranOnce = true;
|
||||
if (hits.getTotalHits().value != indexer.totalIndexedDocs()) {
|
||||
int[] hitIds = new int[(int) indexer.totalIndexedDocs()];
|
||||
|
@ -252,7 +261,8 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
int numberOfReplicas = randomBoolean() ? 0 : 1;
|
||||
int numberOfNodes = numberOfReplicas == 0 ? 2 : 3;
|
||||
|
||||
logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", numberOfRelocations, numberOfReplicas, numberOfNodes);
|
||||
logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})",
|
||||
numberOfRelocations, numberOfReplicas, numberOfNodes);
|
||||
|
||||
String[] nodes = new String[numberOfNodes];
|
||||
logger.info("--> starting [node_0] ...");
|
||||
|
@ -281,13 +291,15 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
final Semaphore postRecoveryShards = new Semaphore(0);
|
||||
final IndexEventListener listener = new IndexEventListener() {
|
||||
@Override
|
||||
public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState, IndexShardState currentState, @Nullable String reason) {
|
||||
public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState,
|
||||
IndexShardState currentState, @Nullable String reason) {
|
||||
if (currentState == IndexShardState.POST_RECOVERY) {
|
||||
postRecoveryShards.release();
|
||||
}
|
||||
}
|
||||
};
|
||||
for (MockIndexEventListener.TestEventListener eventListener : internalCluster().getInstances(MockIndexEventListener.TestEventListener.class)) {
|
||||
for (MockIndexEventListener.TestEventListener eventListener : internalCluster()
|
||||
.getInstances(MockIndexEventListener.TestEventListener.class)) {
|
||||
eventListener.setNewDelegate(listener);
|
||||
}
|
||||
|
||||
|
@ -327,7 +339,10 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
indexRandom(true, true, builders2);
|
||||
|
||||
// verify cluster was finished.
|
||||
assertFalse(client().admin().cluster().prepareHealth().setWaitForNoRelocatingShards(true).setWaitForEvents(Priority.LANGUID).setTimeout("30s").get().isTimedOut());
|
||||
assertFalse(client().admin().cluster().prepareHealth()
|
||||
.setWaitForNoRelocatingShards(true)
|
||||
.setWaitForEvents(Priority.LANGUID)
|
||||
.setTimeout("30s").get().isTimedOut());
|
||||
logger.info("--> DONE relocate the shard from {} to {}", fromNode, toNode);
|
||||
|
||||
logger.debug("--> verifying all searches return the same number of docs");
|
||||
|
@ -374,17 +389,20 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
MockTransportService mockTransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, p_node);
|
||||
for (DiscoveryNode node : clusterService.state().nodes()) {
|
||||
if (!node.equals(clusterService.localNode())) {
|
||||
mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, node.getName()), new RecoveryCorruption(corruptionCount));
|
||||
mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, node.getName()),
|
||||
new RecoveryCorruption(corruptionCount));
|
||||
}
|
||||
}
|
||||
|
||||
client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get();
|
||||
client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get();
|
||||
|
||||
corruptionCount.await();
|
||||
|
||||
logger.info("--> stopping replica assignment");
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings()
|
||||
.setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none")));
|
||||
.setTransientSettings(Settings.builder()
|
||||
.put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none")));
|
||||
|
||||
logger.info("--> wait for all replica shards to be removed, on all nodes");
|
||||
assertBusy(() -> {
|
||||
|
@ -408,7 +426,8 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
Files.walkFileTree(shardLoc, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
assertThat("found a temporary recovery file: " + file, file.getFileName().toString(), not(startsWith("recovery.")));
|
||||
assertThat("found a temporary recovery file: " + file, file.getFileName().toString(),
|
||||
not(startsWith("recovery.")));
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
|
@ -496,13 +515,15 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void sendRequest(Transport.Connection connection, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException {
|
||||
public void sendRequest(Transport.Connection connection, long requestId, String action, TransportRequest request,
|
||||
TransportRequestOptions options) throws IOException {
|
||||
if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) {
|
||||
RecoveryFileChunkRequest chunkRequest = (RecoveryFileChunkRequest) request;
|
||||
if (chunkRequest.name().startsWith(IndexFileNames.SEGMENTS)) {
|
||||
// corrupting the segments_N files in order to make sure future recovery re-send files
|
||||
logger.debug("corrupting [{}] to {}. file name: [{}]", action, connection.getNode(), chunkRequest.name());
|
||||
assert chunkRequest.content().toBytesRef().bytes == chunkRequest.content().toBytesRef().bytes : "no internal reference!!";
|
||||
assert chunkRequest.content().toBytesRef().bytes ==
|
||||
chunkRequest.content().toBytesRef().bytes : "no internal reference!!";
|
||||
byte[] array = chunkRequest.content().toBytesRef().bytes;
|
||||
array[0] = (byte) ~array[0]; // flip one byte in the content
|
||||
corruptionCount.countDown();
|
||||
|
|
|
@ -89,7 +89,8 @@ public class TruncatedRecoveryIT extends ESIntegTestCase {
|
|||
// we have no replicas so far and make sure that we allocate the primary on the lucky node
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", "field1", "type=text", "the_id", "type=text")
|
||||
.setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards())
|
||||
.setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards())
|
||||
.put("index.routing.allocation.include._name", primariesNode.getNode().getName()))); // only allocate on the lucky node
|
||||
|
||||
// index some docs and check if they are coming back
|
||||
|
@ -112,7 +113,8 @@ public class TruncatedRecoveryIT extends ESIntegTestCase {
|
|||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final AtomicBoolean truncate = new AtomicBoolean(true);
|
||||
for (NodeStats dataNode : dataNodeStats) {
|
||||
MockTransportService mockTransportService = ((MockTransportService) internalCluster().getInstance(TransportService.class, dataNode.getNode().getName()));
|
||||
MockTransportService mockTransportService = ((MockTransportService) internalCluster()
|
||||
.getInstance(TransportService.class, dataNode.getNode().getName()));
|
||||
mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, unluckyNode.getNode().getName()),
|
||||
(connection, requestId, action, request, options) -> {
|
||||
if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) {
|
||||
|
|
|
@ -23,25 +23,38 @@ import org.elasticsearch.ResourceNotFoundException;
|
|||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.rest.action.document.RestGetSourceAction.RestGetSourceResponseListener;
|
||||
import org.elasticsearch.test.rest.FakeRestChannel;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||
import org.elasticsearch.test.rest.RestActionTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
import static org.elasticsearch.rest.RestStatus.OK;
|
||||
import static org.elasticsearch.rest.action.document.RestGetSourceAction.RestGetSourceResponseListener;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class RestGetSourceActionTests extends ESTestCase {
|
||||
public class RestGetSourceActionTests extends RestActionTestCase {
|
||||
|
||||
private static RestRequest request = new FakeRestRequest();
|
||||
private static FakeRestChannel channel = new FakeRestChannel(request, true, 0);
|
||||
private static RestGetSourceResponseListener listener = new RestGetSourceResponseListener(channel, request);
|
||||
|
||||
@Before
|
||||
public void setUpAction() {
|
||||
new RestGetSourceAction(Settings.EMPTY, controller());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void cleanupReferences() {
|
||||
request = null;
|
||||
|
@ -49,9 +62,41 @@ public class RestGetSourceActionTests extends ESTestCase {
|
|||
listener = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* test deprecation is logged if type is used in path
|
||||
*/
|
||||
public void testTypeInPath() {
|
||||
for (Method method : Arrays.asList(Method.GET, Method.HEAD)) {
|
||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry())
|
||||
.withMethod(method)
|
||||
.withPath("/some_index/some_type/id/_source")
|
||||
.build();
|
||||
dispatchRequest(request);
|
||||
assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test deprecation is logged if type is used as parameter
|
||||
*/
|
||||
public void testTypeParameter() {
|
||||
Map<String, String> params = new HashMap<>();
|
||||
params.put("type", "some_type");
|
||||
for (Method method : Arrays.asList(Method.GET, Method.HEAD)) {
|
||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry())
|
||||
.withMethod(method)
|
||||
.withPath("/some_index/_source/id")
|
||||
.withParams(params)
|
||||
.build();
|
||||
dispatchRequest(request);
|
||||
assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE);
|
||||
}
|
||||
}
|
||||
|
||||
public void testRestGetSourceAction() throws Exception {
|
||||
final BytesReference source = new BytesArray("{\"foo\": \"bar\"}");
|
||||
final GetResponse response = new GetResponse(new GetResult("index1", "_doc", "1", -1, true, source, emptyMap()));
|
||||
final GetResponse response =
|
||||
new GetResponse(new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, true, source, emptyMap()));
|
||||
|
||||
final RestResponse restResponse = listener.buildResponse(response);
|
||||
|
||||
|
@ -61,7 +106,8 @@ public class RestGetSourceActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRestGetSourceActionWithMissingDocument() {
|
||||
final GetResponse response = new GetResponse(new GetResult("index1", "_doc", "1", -1, false, null, emptyMap()));
|
||||
final GetResponse response =
|
||||
new GetResponse(new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, false, null, emptyMap()));
|
||||
|
||||
final ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.buildResponse(response));
|
||||
|
||||
|
@ -69,7 +115,8 @@ public class RestGetSourceActionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRestGetSourceActionWithMissingDocumentSource() {
|
||||
final GetResponse response = new GetResponse(new GetResult("index1", "_doc", "1", -1, true, null, emptyMap()));
|
||||
final GetResponse response =
|
||||
new GetResponse(new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, true, null, emptyMap()));
|
||||
|
||||
final ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.buildResponse(response));
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.search;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.LuceneTests;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -31,23 +32,36 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
import org.elasticsearch.test.RandomObjects;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
|
||||
public class SearchSortValuesTests extends AbstractSerializingTestCase<SearchSortValues> {
|
||||
|
||||
public static SearchSortValues createTestItem(XContentType xContentType, boolean transportSerialization) {
|
||||
int size = randomIntBetween(1, 20);
|
||||
Object[] values = new Object[size];
|
||||
DocValueFormat[] sortValueFormats = new DocValueFormat[size];
|
||||
for (int i = 0; i < size; i++) {
|
||||
Object sortValue = randomSortValue(xContentType, transportSerialization);
|
||||
values[i] = sortValue;
|
||||
//make sure that for BytesRef, we provide a specific doc value format that overrides format(BytesRef)
|
||||
sortValueFormats[i] = sortValue instanceof BytesRef ? DocValueFormat.RAW : randomDocValueFormat();
|
||||
if (transportSerialization) {
|
||||
DocValueFormat[] sortValueFormats = new DocValueFormat[size];
|
||||
for (int i = 0; i < size; i++) {
|
||||
Object sortValue = randomSortValue(xContentType, transportSerialization);
|
||||
values[i] = sortValue;
|
||||
//make sure that for BytesRef, we provide a specific doc value format that overrides format(BytesRef)
|
||||
sortValueFormats[i] = sortValue instanceof BytesRef ? DocValueFormat.RAW : randomDocValueFormat();
|
||||
}
|
||||
return new SearchSortValues(values, sortValueFormats);
|
||||
} else {
|
||||
//xcontent serialization doesn't write/parse the raw sort values, only the formatted ones
|
||||
for (int i = 0; i < size; i++) {
|
||||
Object sortValue = randomSortValue(xContentType, transportSerialization);
|
||||
//make sure that BytesRef are not provided as formatted values
|
||||
sortValue = sortValue instanceof BytesRef ? DocValueFormat.RAW.format((BytesRef)sortValue) : sortValue;
|
||||
values[i] = sortValue;
|
||||
}
|
||||
return new SearchSortValues(values);
|
||||
}
|
||||
return new SearchSortValues(values, sortValueFormats);
|
||||
}
|
||||
|
||||
private static Object randomSortValue(XContentType xContentType, boolean transportSerialization) {
|
||||
|
@ -79,7 +93,7 @@ public class SearchSortValuesTests extends AbstractSerializingTestCase<SearchSor
|
|||
|
||||
@Override
|
||||
protected SearchSortValues createTestInstance() {
|
||||
return createTestItem(randomFrom(XContentType.values()), true);
|
||||
return createTestItem(randomFrom(XContentType.values()), randomBoolean());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -113,20 +127,32 @@ public class SearchSortValuesTests extends AbstractSerializingTestCase<SearchSor
|
|||
|
||||
@Override
|
||||
protected SearchSortValues mutateInstance(SearchSortValues instance) {
|
||||
Object[] sortValues = instance.sortValues();
|
||||
if (sortValues.length == 0) {
|
||||
return createTestInstance();
|
||||
}
|
||||
Object[] sortValues = instance.getFormattedSortValues();
|
||||
if (randomBoolean()) {
|
||||
return new SearchSortValues(new Object[0]);
|
||||
}
|
||||
Object[] values = Arrays.copyOf(sortValues, sortValues.length + 1);
|
||||
values[sortValues.length] = randomSortValue(randomFrom(XContentType.values()), true);
|
||||
values[sortValues.length] = randomSortValue(randomFrom(XContentType.values()), randomBoolean());
|
||||
return new SearchSortValues(values);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SearchSortValues copyInstance(SearchSortValues instance, Version version) {
|
||||
return new SearchSortValues(Arrays.copyOf(instance.sortValues(), instance.sortValues().length));
|
||||
//TODO rename and update version after backport
|
||||
public void testSerializationPre70() throws IOException {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0));
|
||||
SearchSortValues original = createTestInstance();
|
||||
SearchSortValues deserialized = copyInstance(original, version);
|
||||
assertArrayEquals(original.getFormattedSortValues(), deserialized.getFormattedSortValues());
|
||||
assertEquals(0, deserialized.getRawSortValues().length);
|
||||
}
|
||||
|
||||
//TODO rename method and adapt versions after backport
|
||||
public void testReadFromPre70() throws IOException {
|
||||
try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode("AwIAAAABAQEyBUAIAAAAAAAAAAAAAAAA"))) {
|
||||
in.setVersion(VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)));
|
||||
SearchSortValues deserialized = new SearchSortValues(in);
|
||||
SearchSortValues expected = new SearchSortValues(new Object[]{1, "2", 3d});
|
||||
assertEquals(expected, deserialized);
|
||||
assertEquals(0, deserialized.getRawSortValues().length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -80,6 +81,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.emptySet;
|
||||
|
@ -164,9 +166,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
ConnectionManager connectionManager = connection.getConnectionManager();
|
||||
updateSeedNodes(connection, Arrays.asList(() -> seedNode));
|
||||
updateSeedNodes(connection, seedNodes(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(discoverableNode));
|
||||
assertTrue(connection.assertNoRunningConnections());
|
||||
|
@ -206,9 +208,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
ConnectionManager connectionManager = connection.getConnectionManager();
|
||||
updateSeedNodes(connection, Arrays.asList(() -> seedNode));
|
||||
updateSeedNodes(connection, seedNodes(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(discoverableNode));
|
||||
assertTrue(connection.assertNoRunningConnections());
|
||||
|
@ -259,9 +261,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
ConnectionManager connectionManager = connection.getConnectionManager();
|
||||
updateSeedNodes(connection, Arrays.asList(() -> seedNode));
|
||||
updateSeedNodes(connection, seedNodes(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(discoverableNode));
|
||||
assertTrue(connection.assertNoRunningConnections());
|
||||
|
@ -282,7 +284,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
knownNodes.add(discoverableTransport.getLocalDiscoNode());
|
||||
knownNodes.add(incompatibleTransport.getLocalDiscoNode());
|
||||
Collections.shuffle(knownNodes, random());
|
||||
List<Supplier<DiscoveryNode>> seedNodes = Arrays.asList(() -> incompatibleSeedNode, () -> seedNode);
|
||||
List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes = Arrays.asList(
|
||||
Tuple.tuple(incompatibleSeedNode.toString(), () -> incompatibleSeedNode),
|
||||
Tuple.tuple(seedNode.toString(), () -> seedNode));
|
||||
Collections.shuffle(seedNodes, random());
|
||||
|
||||
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
|
||||
|
@ -317,9 +321,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
ConnectionManager connectionManager = connection.getConnectionManager();
|
||||
updateSeedNodes(connection, Arrays.asList(() -> seedNode));
|
||||
updateSeedNodes(connection, seedNodes(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(discoverableNode));
|
||||
assertFalse(connectionManager.nodeConnected(spareNode));
|
||||
|
@ -367,9 +371,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false, null)) {
|
||||
seedNodes(seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false, null)) {
|
||||
ConnectionManager connectionManager = connection.getConnectionManager();
|
||||
updateSeedNodes(connection, Arrays.asList(() -> seedNode));
|
||||
updateSeedNodes(connection, seedNodes(seedNode));
|
||||
if (rejectedNode.equals(seedNode)) {
|
||||
assertFalse(connectionManager.nodeConnected(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(discoverableNode));
|
||||
|
@ -382,11 +386,15 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
}
|
||||
private void updateSeedNodes(RemoteClusterConnection connection, List<Supplier<DiscoveryNode>> seedNodes) throws Exception {
|
||||
private void updateSeedNodes(
|
||||
final RemoteClusterConnection connection, final List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes) throws Exception {
|
||||
updateSeedNodes(connection, seedNodes, null);
|
||||
}
|
||||
|
||||
private void updateSeedNodes(RemoteClusterConnection connection, List<Supplier<DiscoveryNode>> seedNodes, String proxyAddress)
|
||||
private void updateSeedNodes(
|
||||
final RemoteClusterConnection connection,
|
||||
final List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes,
|
||||
final String proxyAddress)
|
||||
throws Exception {
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
|
||||
|
@ -428,9 +436,11 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
ConnectionManager connectionManager = connection.getConnectionManager();
|
||||
expectThrows(Exception.class, () -> updateSeedNodes(connection, Arrays.asList(() -> seedNode)));
|
||||
expectThrows(
|
||||
Exception.class,
|
||||
() -> updateSeedNodes(connection, Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode))));
|
||||
assertFalse(connectionManager.nodeConnected(seedNode));
|
||||
assertTrue(connection.assertNoRunningConnections());
|
||||
}
|
||||
|
@ -481,7 +491,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) {
|
||||
seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) {
|
||||
connection.addConnectedNode(seedNode);
|
||||
for (DiscoveryNode node : knownNodes) {
|
||||
final Transport.Connection transportConnection = connection.getConnection(node);
|
||||
|
@ -524,7 +534,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
CountDownLatch listenerCalled = new CountDownLatch(1);
|
||||
AtomicReference<Exception> exceptionReference = new AtomicReference<>();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
ActionListener<Void> listener = ActionListener.wrap(x -> {
|
||||
listenerCalled.countDown();
|
||||
fail("expected exception");
|
||||
|
@ -532,7 +542,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
exceptionReference.set(x);
|
||||
listenerCalled.countDown();
|
||||
});
|
||||
connection.updateSeedNodes(null, Arrays.asList(() -> seedNode), listener);
|
||||
connection.updateSeedNodes(null, seedNodes(seedNode), listener);
|
||||
acceptedLatch.await();
|
||||
connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on
|
||||
assertTrue(connection.assertNoRunningConnections());
|
||||
|
@ -548,6 +558,18 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes(final DiscoveryNode... seedNodes) {
|
||||
if (seedNodes.length == 0) {
|
||||
return Collections.emptyList();
|
||||
} else if (seedNodes.length == 1) {
|
||||
return Collections.singletonList(Tuple.tuple(seedNodes[0].toString(), () -> seedNodes[0]));
|
||||
} else {
|
||||
return Arrays.stream(seedNodes)
|
||||
.map(s -> Tuple.tuple(s.toString(), (Supplier<DiscoveryNode>)() -> s))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
||||
public void testFetchShards() throws Exception {
|
||||
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
|
||||
try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT);
|
||||
|
@ -559,11 +581,11 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
|
||||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
List<Supplier<DiscoveryNode>> nodes = Collections.singletonList(() -> seedNode);
|
||||
final List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes = seedNodes(seedNode);
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
nodes, service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
seedNodes, service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
if (randomBoolean()) {
|
||||
updateSeedNodes(connection, nodes);
|
||||
updateSeedNodes(connection, seedNodes);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
connection.updateSkipUnavailable(randomBoolean());
|
||||
|
@ -599,9 +621,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
|
||||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
List<Supplier<DiscoveryNode>> nodes = Collections.singletonList(() -> seedNode);
|
||||
final List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes = seedNodes(seedNode);
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
nodes, service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
seedNodes, service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
SearchRequest request = new SearchRequest("test-index");
|
||||
Thread[] threads = new Thread[10];
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
|
@ -655,7 +677,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Collections.singletonList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
ConnectionManager connectionManager = connection.getConnectionManager();
|
||||
|
||||
SearchRequest request = new SearchRequest("test-index");
|
||||
|
@ -759,7 +781,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
knownNodes.add(discoverableTransport.getLocalDiscoNode());
|
||||
knownNodes.add(seedTransport1.getLocalDiscoNode());
|
||||
Collections.shuffle(knownNodes, random());
|
||||
List<Supplier<DiscoveryNode>> seedNodes = Arrays.asList(() -> seedNode1, () -> seedNode);
|
||||
List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes = seedNodes(seedNode1, seedNode);
|
||||
Collections.shuffle(seedNodes, random());
|
||||
|
||||
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
|
||||
|
@ -839,7 +861,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
knownNodes.add(discoverableTransport.getLocalDiscoNode());
|
||||
knownNodes.add(seedTransport1.getLocalDiscoNode());
|
||||
Collections.shuffle(knownNodes, random());
|
||||
List<Supplier<DiscoveryNode>> seedNodes = Arrays.asList(() -> seedNode1, () -> seedNode);
|
||||
List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes = seedNodes(seedNode1, seedNode);
|
||||
Collections.shuffle(seedNodes, random());
|
||||
|
||||
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
|
||||
|
@ -926,7 +948,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
knownNodes.add(transport3.getLocalDiscoNode());
|
||||
knownNodes.add(transport2.getLocalDiscoNode());
|
||||
Collections.shuffle(knownNodes, random());
|
||||
List<Supplier<DiscoveryNode>> seedNodes = Arrays.asList(() -> node3, () -> node1, () -> node2);
|
||||
List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes = seedNodes(node3, node1, node2);
|
||||
Collections.shuffle(seedNodes, random());
|
||||
|
||||
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
|
||||
|
@ -958,44 +980,32 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRemoteConnectionInfo() throws IOException {
|
||||
RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster",
|
||||
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
|
||||
4, 3, TimeValue.timeValueMinutes(30), false);
|
||||
RemoteConnectionInfo stats =
|
||||
new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), false);
|
||||
assertSerialization(stats);
|
||||
|
||||
RemoteConnectionInfo stats1 = new RemoteConnectionInfo("test_cluster",
|
||||
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
|
||||
4, 4, TimeValue.timeValueMinutes(30), true);
|
||||
RemoteConnectionInfo stats1 =
|
||||
new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 4, TimeValue.timeValueMinutes(30), true);
|
||||
assertSerialization(stats1);
|
||||
assertNotEquals(stats, stats1);
|
||||
|
||||
stats1 = new RemoteConnectionInfo("test_cluster_1",
|
||||
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
|
||||
4, 3, TimeValue.timeValueMinutes(30), false);
|
||||
stats1 = new RemoteConnectionInfo("test_cluster_1", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), false);
|
||||
assertSerialization(stats1);
|
||||
assertNotEquals(stats, stats1);
|
||||
|
||||
stats1 = new RemoteConnectionInfo("test_cluster",
|
||||
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 15)),
|
||||
4, 3, TimeValue.timeValueMinutes(30), false);
|
||||
stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:15"), 4, 3, TimeValue.timeValueMinutes(30), false);
|
||||
assertSerialization(stats1);
|
||||
assertNotEquals(stats, stats1);
|
||||
|
||||
stats1 = new RemoteConnectionInfo("test_cluster",
|
||||
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
|
||||
4, 3, TimeValue.timeValueMinutes(30), true);
|
||||
stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), true);
|
||||
assertSerialization(stats1);
|
||||
assertNotEquals(stats, stats1);
|
||||
|
||||
stats1 = new RemoteConnectionInfo("test_cluster",
|
||||
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
|
||||
4, 3, TimeValue.timeValueMinutes(325), true);
|
||||
stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(325), true);
|
||||
assertSerialization(stats1);
|
||||
assertNotEquals(stats, stats1);
|
||||
|
||||
stats1 = new RemoteConnectionInfo("test_cluster",
|
||||
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
|
||||
5, 3, TimeValue.timeValueMinutes(30), false);
|
||||
stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 5, 3, TimeValue.timeValueMinutes(30), false);
|
||||
assertSerialization(stats1);
|
||||
assertNotEquals(stats, stats1);
|
||||
}
|
||||
|
@ -1016,9 +1026,8 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
public void testRemoteConnectionInfoBwComp() throws IOException {
|
||||
final Version version = VersionUtils.randomVersionBetween(random(),
|
||||
Version.V_6_1_0, VersionUtils.getPreviousVersion(Version.V_7_0_0));
|
||||
RemoteConnectionInfo expected = new RemoteConnectionInfo("test_cluster",
|
||||
Collections.singletonList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
|
||||
4, 4, new TimeValue(30, TimeUnit.MINUTES), false);
|
||||
RemoteConnectionInfo expected =
|
||||
new RemoteConnectionInfo("test_cluster", Arrays.asList("0.0.0.0:1"), 4, 4, new TimeValue(30, TimeUnit.MINUTES), false);
|
||||
|
||||
// This version was created using the serialization code in use from 6.1 but before 7.0
|
||||
String encoded = "AQQAAAAABzAuMC4wLjAAAAABAQQAAAAABzAuMC4wLjAAAABQBDwEBAx0ZXN0X2NsdXN0ZXIA";
|
||||
|
@ -1042,27 +1051,25 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRenderConnectionInfoXContent() throws IOException {
|
||||
RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster",
|
||||
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
|
||||
4, 3, TimeValue.timeValueMinutes(30), true);
|
||||
RemoteConnectionInfo stats =
|
||||
new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), true);
|
||||
stats = assertSerialization(stats);
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
stats.toXContent(builder, null);
|
||||
builder.endObject();
|
||||
assertEquals("{\"test_cluster\":{\"seeds\":[\"0.0.0.0:1\"],\"connected\":true," +
|
||||
assertEquals("{\"test_cluster\":{\"seeds\":[\"seed:1\"],\"connected\":true," +
|
||||
"\"num_nodes_connected\":3,\"max_connections_per_cluster\":4,\"initial_connect_timeout\":\"30m\"," +
|
||||
"\"skip_unavailable\":true}}", Strings.toString(builder));
|
||||
|
||||
stats = new RemoteConnectionInfo("some_other_cluster",
|
||||
Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1), new TransportAddress(TransportAddress.META_ADDRESS, 2)),
|
||||
2, 0, TimeValue.timeValueSeconds(30), false);
|
||||
stats = new RemoteConnectionInfo(
|
||||
"some_other_cluster", Arrays.asList("seed:1", "seed:2"), 2, 0, TimeValue.timeValueSeconds(30), false);
|
||||
stats = assertSerialization(stats);
|
||||
builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
stats.toXContent(builder, null);
|
||||
builder.endObject();
|
||||
assertEquals("{\"some_other_cluster\":{\"seeds\":[\"0.0.0.0:1\",\"0.0.0.0:2\"],"
|
||||
assertEquals("{\"some_other_cluster\":{\"seeds\":[\"seed:1\",\"seed:2\"],"
|
||||
+ "\"connected\":false,\"num_nodes_connected\":0,\"max_connections_per_cluster\":2,\"initial_connect_timeout\":\"30s\"," +
|
||||
"\"skip_unavailable\":false}}", Strings.toString(builder));
|
||||
}
|
||||
|
@ -1081,7 +1088,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
ConnectionManager connectionManager = connection.getConnectionManager();
|
||||
assertFalse(connectionManager.nodeConnected(seedNode));
|
||||
assertFalse(connectionManager.nodeConnected(discoverableNode));
|
||||
|
@ -1131,9 +1138,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
if (randomBoolean()) {
|
||||
updateSeedNodes(connection, Arrays.asList(() -> seedNode));
|
||||
updateSeedNodes(connection, seedNodes(seedNode));
|
||||
}
|
||||
CountDownLatch responseLatch = new CountDownLatch(1);
|
||||
AtomicReference<Function<String, DiscoveryNode>> reference = new AtomicReference<>();
|
||||
|
@ -1165,14 +1172,14 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
List<MockTransportService> discoverableTransports = new CopyOnWriteArrayList<>();
|
||||
try {
|
||||
final int numDiscoverableNodes = randomIntBetween(5, 20);
|
||||
List<Supplier<DiscoveryNode>> discoverableNodes = new ArrayList<>(numDiscoverableNodes);
|
||||
List<Tuple<String, Supplier<DiscoveryNode>>> discoverableNodes = new ArrayList<>(numDiscoverableNodes);
|
||||
for (int i = 0; i < numDiscoverableNodes; i++ ) {
|
||||
MockTransportService transportService = startTransport("discoverable_node" + i, knownNodes, Version.CURRENT);
|
||||
discoverableNodes.add(transportService::getLocalDiscoNode);
|
||||
discoverableNodes.add(Tuple.tuple("discoverable_node" + i, transportService::getLocalDiscoNode));
|
||||
discoverableTransports.add(transportService);
|
||||
}
|
||||
|
||||
List<Supplier<DiscoveryNode>> seedNodes = randomSubsetOf(discoverableNodes);
|
||||
List<Tuple<String, Supplier<DiscoveryNode>>> seedNodes = randomSubsetOf(discoverableNodes);
|
||||
Collections.shuffle(seedNodes, random());
|
||||
|
||||
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) {
|
||||
|
@ -1221,7 +1228,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
discoverableTransports.add(transportService);
|
||||
connection.addConnectedNode(transportService.getLocalDiscoNode());
|
||||
} else {
|
||||
DiscoveryNode node = randomFrom(discoverableNodes).get();
|
||||
DiscoveryNode node = randomFrom(discoverableNodes).v2().get();
|
||||
connection.onNodeDisconnected(node);
|
||||
}
|
||||
}
|
||||
|
@ -1269,14 +1276,16 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList( () -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
ConnectionManager connectionManager = connection.getConnectionManager();
|
||||
updateSeedNodes(connection, Collections.singletonList(() -> seedNode));
|
||||
updateSeedNodes(connection, seedNodes(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(seedNode));
|
||||
assertTrue(connectionManager.nodeConnected(discoverableNode));
|
||||
assertTrue(connection.assertNoRunningConnections());
|
||||
List<Supplier<DiscoveryNode>> discoveryNodes =
|
||||
Arrays.asList(otherClusterTransport::getLocalDiscoNode, () -> seedNode);
|
||||
List<Tuple<String, Supplier<DiscoveryNode>>> discoveryNodes =
|
||||
Arrays.asList(
|
||||
Tuple.tuple("other", otherClusterTransport::getLocalDiscoNode),
|
||||
Tuple.tuple(seedNode.toString(), () -> seedNode));
|
||||
Collections.shuffle(discoveryNodes, random());
|
||||
updateSeedNodes(connection, discoveryNodes);
|
||||
assertTrue(connectionManager.nodeConnected(seedNode));
|
||||
|
@ -1287,7 +1296,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
assertTrue(connectionManager.nodeConnected(discoverableNode));
|
||||
assertTrue(connection.assertNoRunningConnections());
|
||||
IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () ->
|
||||
updateSeedNodes(connection, Arrays.asList(() -> otherClusterTransport.getLocalDiscoNode())));
|
||||
updateSeedNodes(connection, Arrays.asList(Tuple.tuple("other", otherClusterTransport::getLocalDiscoNode))));
|
||||
assertThat(illegalStateException.getMessage(),
|
||||
startsWith("handshake failed, mismatched cluster name [Cluster [otherCluster]]" +
|
||||
" - {other_cluster_discoverable_node}"));
|
||||
|
@ -1339,7 +1348,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Collections.singletonList(() -> connectedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) {
|
||||
seedNodes(connectedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) {
|
||||
connection.addConnectedNode(connectedNode);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
//always a direct connection as the remote node is already connected
|
||||
|
@ -1376,10 +1385,10 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
CountDownLatch multipleResolveLatch = new CountDownLatch(2);
|
||||
Supplier<DiscoveryNode> seedSupplier = () -> {
|
||||
Tuple<String, Supplier<DiscoveryNode>> seedSupplier = Tuple.tuple(seedNode.toString(), () -> {
|
||||
multipleResolveLatch.countDown();
|
||||
return seedNode;
|
||||
};
|
||||
});
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(seedSupplier), service, Integer.MAX_VALUE, n -> true, null)) {
|
||||
updateSeedNodes(connection, Arrays.asList(seedSupplier));
|
||||
|
@ -1409,9 +1418,9 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
threadPool, null, Collections.emptySet())) {
|
||||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
Supplier<DiscoveryNode> seedSupplier = () ->
|
||||
RemoteClusterAware.buildSeedNode("some-remote-cluster", "node_0:" + randomIntBetween(1, 10000), true);
|
||||
assertEquals("node_0", seedSupplier.get().getAttributes().get("server_name"));
|
||||
Tuple<String, Supplier<DiscoveryNode>> seedSupplier = Tuple.tuple("node_0", () ->
|
||||
RemoteClusterAware.buildSeedNode("some-remote-cluster", "node_0:" + randomIntBetween(1, 10000), true));
|
||||
assertEquals("node_0", seedSupplier.v2().get().getAttributes().get("server_name"));
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(seedSupplier), service, Integer.MAX_VALUE, n -> true, proxyAddress)) {
|
||||
updateSeedNodes(connection, Arrays.asList(seedSupplier), proxyAddress);
|
||||
|
|
|
@ -125,41 +125,42 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testBuildRemoteClustersDynamicConfig() throws Exception {
|
||||
Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> map = RemoteClusterService.buildRemoteClustersDynamicConfig(
|
||||
Settings.builder()
|
||||
.put("cluster.remote.foo.seeds", "192.168.0.1:8080")
|
||||
.put("cluster.remote.bar.seeds", "[::1]:9090")
|
||||
.put("cluster.remote.boom.seeds", "boom-node1.internal:1000")
|
||||
.put("cluster.remote.boom.proxy", "foo.bar.com:1234")
|
||||
.put("search.remote.quux.seeds", "quux:9300")
|
||||
.put("search.remote.quux.proxy", "quux-proxy:19300")
|
||||
.build());
|
||||
Map<String, Tuple<String, List<Tuple<String, Supplier<DiscoveryNode>>>>> map =
|
||||
RemoteClusterService.buildRemoteClustersDynamicConfig(
|
||||
Settings.builder()
|
||||
.put("cluster.remote.foo.seeds", "192.168.0.1:8080")
|
||||
.put("cluster.remote.bar.seeds", "[::1]:9090")
|
||||
.put("cluster.remote.boom.seeds", "boom-node1.internal:1000")
|
||||
.put("cluster.remote.boom.proxy", "foo.bar.com:1234")
|
||||
.put("search.remote.quux.seeds", "quux:9300")
|
||||
.put("search.remote.quux.proxy", "quux-proxy:19300")
|
||||
.build());
|
||||
assertThat(map.keySet(), containsInAnyOrder(equalTo("foo"), equalTo("bar"), equalTo("boom"), equalTo("quux")));
|
||||
assertThat(map.get("foo").v2(), hasSize(1));
|
||||
assertThat(map.get("bar").v2(), hasSize(1));
|
||||
assertThat(map.get("boom").v2(), hasSize(1));
|
||||
assertThat(map.get("quux").v2(), hasSize(1));
|
||||
|
||||
DiscoveryNode foo = map.get("foo").v2().get(0).get();
|
||||
DiscoveryNode foo = map.get("foo").v2().get(0).v2().get();
|
||||
assertEquals("", map.get("foo").v1());
|
||||
assertEquals(foo.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("192.168.0.1"), 8080)));
|
||||
assertEquals(foo.getId(), "foo#192.168.0.1:8080");
|
||||
assertEquals(foo.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
||||
|
||||
DiscoveryNode bar = map.get("bar").v2().get(0).get();
|
||||
DiscoveryNode bar = map.get("bar").v2().get(0).v2().get();
|
||||
assertEquals(bar.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("[::1]"), 9090)));
|
||||
assertEquals(bar.getId(), "bar#[::1]:9090");
|
||||
assertEquals("", map.get("bar").v1());
|
||||
assertEquals(bar.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
||||
|
||||
DiscoveryNode boom = map.get("boom").v2().get(0).get();
|
||||
DiscoveryNode boom = map.get("boom").v2().get(0).v2().get();
|
||||
assertEquals(boom.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0));
|
||||
assertEquals("boom-node1.internal", boom.getHostName());
|
||||
assertEquals(boom.getId(), "boom#boom-node1.internal:1000");
|
||||
assertEquals("foo.bar.com:1234", map.get("boom").v1());
|
||||
assertEquals(boom.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
||||
|
||||
DiscoveryNode quux = map.get("quux").v2().get(0).get();
|
||||
DiscoveryNode quux = map.get("quux").v2().get(0).v2().get();
|
||||
assertEquals(quux.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0));
|
||||
assertEquals("quux", quux.getHostName());
|
||||
assertEquals(quux.getId(), "quux#quux:9300");
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.DocWriteRequest;
|
|||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -311,12 +312,11 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(1, 2).execute(), VersionConflictEngineException.class);
|
||||
|
||||
client().admin().indices().prepareRefresh().execute().actionGet();
|
||||
// TODO: Enable once get response returns seqNo
|
||||
// for (int i = 0; i < 10; i++) {
|
||||
// final GetResponse response = client().prepareGet("test", "type", "1").get();
|
||||
// assertThat(response.getSeqNo(), equalTo(1L));
|
||||
// assertThat(response.getPrimaryTerm(), equalTo(1L));
|
||||
// }
|
||||
for (int i = 0; i < 10; i++) {
|
||||
final GetResponse response = client().prepareGet("test", "type", "1").get();
|
||||
assertThat(response.getSeqNo(), equalTo(1L));
|
||||
assertThat(response.getPrimaryTerm(), equalTo(1L));
|
||||
}
|
||||
|
||||
// search with versioning
|
||||
for (int i = 0; i < 10; i++) {
|
||||
|
|
|
@ -27,6 +27,6 @@ public abstract class AbstractWireSerializingTestCase<T extends Writeable> exten
|
|||
|
||||
@Override
|
||||
protected T copyInstance(T instance, Version version) throws IOException {
|
||||
return copyWriteable(instance, getNamedWriteableRegistry(), instanceReader());
|
||||
return copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), version);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -135,13 +135,16 @@ public final class RandomObjects {
|
|||
}
|
||||
}
|
||||
if (value instanceof Float) {
|
||||
if (xContentType == XContentType.CBOR) {
|
||||
//with CBOR we get back a float
|
||||
return value;
|
||||
}
|
||||
if (xContentType == XContentType.SMILE) {
|
||||
//with SMILE we get back a double (this will change in Jackson 2.9 where it will return a Float)
|
||||
return ((Float)value).doubleValue();
|
||||
} else {
|
||||
//with JSON AND YAML we get back a double, but with float precision.
|
||||
return Double.parseDouble(value.toString());
|
||||
}
|
||||
//with JSON AND YAML we get back a double, but with float precision.
|
||||
return Double.parseDouble(value.toString());
|
||||
}
|
||||
if (value instanceof Byte) {
|
||||
return ((Byte)value).intValue();
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
[[security-api-invalidate-token]]
|
||||
=== Invalidate token API
|
||||
|
||||
Invalidates an access token or a refresh token.
|
||||
Invalidates one or more access tokens or refresh tokens.
|
||||
|
||||
==== Request
|
||||
|
||||
|
@ -19,21 +19,31 @@ can no longer be used. That time period is defined by the
|
|||
The refresh tokens returned by the <<security-api-get-token,get token API>> are
|
||||
only valid for 24 hours. They can also be used exactly once.
|
||||
|
||||
If you want to invalidate an access or refresh token immediately, use this invalidate token API.
|
||||
If you want to invalidate one or more access or refresh tokens immediately, use this invalidate token API.
|
||||
|
||||
|
||||
==== Request Body
|
||||
|
||||
The following parameters can be specified in the body of a DELETE request and
|
||||
pertain to invalidating a token:
|
||||
pertain to invalidating tokens:
|
||||
|
||||
`token` (optional)::
|
||||
(string) An access token. This parameter cannot be used when `refresh_token` is used.
|
||||
(string) An access token. This parameter cannot be used any of `refresh_token`, `realm_name` or
|
||||
`username` are used.
|
||||
|
||||
`refresh_token` (optional)::
|
||||
(string) A refresh token. This parameter cannot be used when `token` is used.
|
||||
(string) A refresh token. This parameter cannot be used any of `refresh_token`, `realm_name` or
|
||||
`username` are used.
|
||||
|
||||
NOTE: One of `token` or `refresh_token` parameters is required.
|
||||
`realm_name` (optional)::
|
||||
(string) The name of an authentication realm. This parameter cannot be used with either `refresh_token` or `token`.
|
||||
|
||||
`username` (optional)::
|
||||
(string) The username of a user. This parameter cannot be used with either `refresh_token` or `token`
|
||||
|
||||
NOTE: While all parameters are optional, at least one of them is required. More specifically, either one of `token`
|
||||
or `refresh_token` parameters is required. If none of these two are specified, then `realm_name` and/or `username`
|
||||
need to be specified.
|
||||
|
||||
==== Examples
|
||||
|
||||
|
@ -59,15 +69,75 @@ DELETE /_security/oauth2/token
|
|||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
A successful call returns a JSON structure that indicates whether the token
|
||||
has already been invalidated.
|
||||
The following example invalidates all access tokens and refresh tokens for the `saml1` realm immediately:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE /_xpack/security/oauth2/token
|
||||
{
|
||||
"created" : true <1>
|
||||
"realm_name" : "saml1"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
<1> When a token has already been invalidated, `created` is set to false.
|
||||
The following example invalidates all access tokens and refresh tokens for the user `myuser` in all realms immediately:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE /_xpack/security/oauth2/token
|
||||
{
|
||||
"username" : "myuser"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
Finally, the following example invalidates all access tokens and refresh tokens for the user `myuser` in
|
||||
the `saml1` realm immediately:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE /_xpack/security/oauth2/token
|
||||
{
|
||||
"username" : "myuser",
|
||||
"realm_name" : "saml1"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
A successful call returns a JSON structure that contains the number of tokens that were invalidated, the number
|
||||
of tokens that had already been invalidated, and potentially a list of errors encountered while invalidating
|
||||
specific tokens.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"invalidated_tokens":9, <1>
|
||||
"previously_invalidated_tokens":15, <2>
|
||||
"error_count":2, <3>
|
||||
"error_details":[ <4>
|
||||
{
|
||||
"type":"exception",
|
||||
"reason":"Elasticsearch exception [type=exception, reason=foo]",
|
||||
"caused_by":{
|
||||
"type":"exception",
|
||||
"reason":"Elasticsearch exception [type=illegal_argument_exception, reason=bar]"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type":"exception",
|
||||
"reason":"Elasticsearch exception [type=exception, reason=boo]",
|
||||
"caused_by":{
|
||||
"type":"exception",
|
||||
"reason":"Elasticsearch exception [type=illegal_argument_exception, reason=far]"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
<1> The number of the tokens that were invalidated as part of this request.
|
||||
<2> The number of tokens that were already invalidated.
|
||||
<3> The number of errors that were encountered when invalidating the tokens.
|
||||
<4> Details about these errors. This field is not present in the response when
|
||||
`error_count` is 0.
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
|
||||
package org.elasticsearch.xpack.ccr;
|
||||
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
|
@ -111,7 +110,6 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E
|
|||
private final boolean enabled;
|
||||
private final Settings settings;
|
||||
private final CcrLicenseChecker ccrLicenseChecker;
|
||||
private final SetOnce<CcrRepositoryManager> repositoryManager = new SetOnce<>();
|
||||
private Client client;
|
||||
|
||||
/**
|
||||
|
@ -152,11 +150,10 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E
|
|||
return emptyList();
|
||||
}
|
||||
|
||||
this.repositoryManager.set(new CcrRepositoryManager(settings, clusterService, client));
|
||||
|
||||
return Arrays.asList(
|
||||
ccrLicenseChecker,
|
||||
new AutoFollowCoordinator(client, clusterService, ccrLicenseChecker)
|
||||
new CcrRepositoryManager(settings, clusterService, client),
|
||||
new AutoFollowCoordinator(client, clusterService, ccrLicenseChecker, threadPool::relativeTimeInMillis)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequest;
|
|||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.transport.RemoteClusterAware;
|
||||
import org.elasticsearch.xpack.ccr.action.repositories.DeleteInternalCcrRepositoryAction;
|
||||
|
@ -18,31 +19,70 @@ import org.elasticsearch.xpack.ccr.action.repositories.PutInternalCcrRepositoryA
|
|||
import org.elasticsearch.xpack.ccr.action.repositories.PutInternalCcrRepositoryRequest;
|
||||
import org.elasticsearch.xpack.ccr.repository.CcrRepository;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
class CcrRepositoryManager extends RemoteClusterAware {
|
||||
class CcrRepositoryManager extends AbstractLifecycleComponent {
|
||||
|
||||
private final Client client;
|
||||
private final RemoteSettingsUpdateListener updateListener;
|
||||
|
||||
CcrRepositoryManager(Settings settings, ClusterService clusterService, Client client) {
|
||||
super(settings);
|
||||
this.client = client;
|
||||
listenForUpdates(clusterService.getClusterSettings());
|
||||
updateListener = new RemoteSettingsUpdateListener(settings);
|
||||
updateListener.listenForUpdates(clusterService.getClusterSettings());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void updateRemoteCluster(String clusterAlias, List<String> addresses, String proxyAddress) {
|
||||
String repositoryName = CcrRepository.NAME_PREFIX + clusterAlias;
|
||||
if (addresses.isEmpty()) {
|
||||
DeleteInternalCcrRepositoryRequest request = new DeleteInternalCcrRepositoryRequest(repositoryName);
|
||||
PlainActionFuture<DeleteInternalCcrRepositoryAction.DeleteInternalCcrRepositoryResponse> f = PlainActionFuture.newFuture();
|
||||
client.execute(DeleteInternalCcrRepositoryAction.INSTANCE, request, f);
|
||||
assert f.isDone() : "Should be completed as it is executed synchronously";
|
||||
} else {
|
||||
ActionRequest request = new PutInternalCcrRepositoryRequest(repositoryName, CcrRepository.TYPE);
|
||||
PlainActionFuture<PutInternalCcrRepositoryAction.PutInternalCcrRepositoryResponse> f = PlainActionFuture.newFuture();
|
||||
client.execute(PutInternalCcrRepositoryAction.INSTANCE, request, f);
|
||||
assert f.isDone() : "Should be completed as it is executed synchronously";
|
||||
protected void doStart() {
|
||||
updateListener.init();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doStop() {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doClose() throws IOException {
|
||||
}
|
||||
|
||||
private void putRepository(String repositoryName) {
|
||||
ActionRequest request = new PutInternalCcrRepositoryRequest(repositoryName, CcrRepository.TYPE);
|
||||
PlainActionFuture<PutInternalCcrRepositoryAction.PutInternalCcrRepositoryResponse> f = PlainActionFuture.newFuture();
|
||||
client.execute(PutInternalCcrRepositoryAction.INSTANCE, request, f);
|
||||
assert f.isDone() : "Should be completed as it is executed synchronously";
|
||||
}
|
||||
|
||||
private void deleteRepository(String repositoryName) {
|
||||
DeleteInternalCcrRepositoryRequest request = new DeleteInternalCcrRepositoryRequest(repositoryName);
|
||||
PlainActionFuture<DeleteInternalCcrRepositoryAction.DeleteInternalCcrRepositoryResponse> f = PlainActionFuture.newFuture();
|
||||
client.execute(DeleteInternalCcrRepositoryAction.INSTANCE, request, f);
|
||||
assert f.isDone() : "Should be completed as it is executed synchronously";
|
||||
}
|
||||
|
||||
private class RemoteSettingsUpdateListener extends RemoteClusterAware {
|
||||
|
||||
private RemoteSettingsUpdateListener(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
void init() {
|
||||
Set<String> clusterAliases = buildRemoteClustersDynamicConfig(settings).keySet();
|
||||
for (String clusterAlias : clusterAliases) {
|
||||
putRepository(CcrRepository.NAME_PREFIX + clusterAlias);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void updateRemoteCluster(String clusterAlias, List<String> addresses, String proxy) {
|
||||
String repositoryName = CcrRepository.NAME_PREFIX + clusterAlias;
|
||||
if (addresses.isEmpty()) {
|
||||
deleteRepository(repositoryName);
|
||||
} else {
|
||||
putRepository(repositoryName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,9 +52,12 @@ import java.util.TreeMap;
|
|||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.LongSupplier;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster;
|
||||
|
||||
/**
|
||||
* A component that runs only on the elected master node and follows leader indices automatically
|
||||
* if they match with a auto follow pattern that is defined in {@link AutoFollowMetadata}.
|
||||
|
@ -67,6 +70,7 @@ public class AutoFollowCoordinator implements ClusterStateListener {
|
|||
private final Client client;
|
||||
private final ClusterService clusterService;
|
||||
private final CcrLicenseChecker ccrLicenseChecker;
|
||||
private final LongSupplier relativeMillisTimeProvider;
|
||||
|
||||
private volatile Map<String, AutoFollower> autoFollowers = Collections.emptyMap();
|
||||
|
||||
|
@ -79,10 +83,13 @@ public class AutoFollowCoordinator implements ClusterStateListener {
|
|||
public AutoFollowCoordinator(
|
||||
Client client,
|
||||
ClusterService clusterService,
|
||||
CcrLicenseChecker ccrLicenseChecker) {
|
||||
CcrLicenseChecker ccrLicenseChecker,
|
||||
LongSupplier relativeMillisTimeProvider) {
|
||||
|
||||
this.client = client;
|
||||
this.clusterService = clusterService;
|
||||
this.ccrLicenseChecker = Objects.requireNonNull(ccrLicenseChecker, "ccrLicenseChecker");
|
||||
this.relativeMillisTimeProvider = relativeMillisTimeProvider;
|
||||
clusterService.addListener(this);
|
||||
this.recentAutoFollowErrors = new LinkedHashMap<String, ElasticsearchException>() {
|
||||
@Override
|
||||
|
@ -93,11 +100,26 @@ public class AutoFollowCoordinator implements ClusterStateListener {
|
|||
}
|
||||
|
||||
public synchronized AutoFollowStats getStats() {
|
||||
final Map<String, AutoFollower> autoFollowers = this.autoFollowers;
|
||||
final TreeMap<String, AutoFollowedCluster> timesSinceLastAutoFollowPerRemoteCluster = new TreeMap<>();
|
||||
for (Map.Entry<String, AutoFollower> entry : autoFollowers.entrySet()) {
|
||||
long lastAutoFollowTimeInMillis = entry.getValue().lastAutoFollowTimeInMillis;
|
||||
long lastSeenMetadataVersion = entry.getValue().metadataVersion;
|
||||
if (lastAutoFollowTimeInMillis != -1) {
|
||||
long timeSinceLastCheckInMillis = relativeMillisTimeProvider.getAsLong() - lastAutoFollowTimeInMillis;
|
||||
timesSinceLastAutoFollowPerRemoteCluster.put(entry.getKey(),
|
||||
new AutoFollowedCluster(timeSinceLastCheckInMillis, lastSeenMetadataVersion));
|
||||
} else {
|
||||
timesSinceLastAutoFollowPerRemoteCluster.put(entry.getKey(), new AutoFollowedCluster(-1L, lastSeenMetadataVersion));
|
||||
}
|
||||
}
|
||||
|
||||
return new AutoFollowStats(
|
||||
numberOfFailedIndicesAutoFollowed,
|
||||
numberOfFailedRemoteClusterStateRequests,
|
||||
numberOfSuccessfulIndicesAutoFollowed,
|
||||
new TreeMap<>(recentAutoFollowErrors)
|
||||
new TreeMap<>(recentAutoFollowErrors),
|
||||
timesSinceLastAutoFollowPerRemoteCluster
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -146,7 +168,8 @@ public class AutoFollowCoordinator implements ClusterStateListener {
|
|||
|
||||
Map<String, AutoFollower> newAutoFollowers = new HashMap<>(newRemoteClusters.size());
|
||||
for (String remoteCluster : newRemoteClusters) {
|
||||
AutoFollower autoFollower = new AutoFollower(remoteCluster, this::updateStats, clusterService::state) {
|
||||
AutoFollower autoFollower =
|
||||
new AutoFollower(remoteCluster, this::updateStats, clusterService::state, relativeMillisTimeProvider) {
|
||||
|
||||
@Override
|
||||
void getRemoteClusterState(final String remoteCluster,
|
||||
|
@ -239,20 +262,25 @@ public class AutoFollowCoordinator implements ClusterStateListener {
|
|||
private final String remoteCluster;
|
||||
private final Consumer<List<AutoFollowResult>> statsUpdater;
|
||||
private final Supplier<ClusterState> followerClusterStateSupplier;
|
||||
private final LongSupplier relativeTimeProvider;
|
||||
|
||||
private volatile long lastAutoFollowTimeInMillis = -1;
|
||||
private volatile long metadataVersion = 0;
|
||||
private volatile CountDown autoFollowPatternsCountDown;
|
||||
private volatile AtomicArray<AutoFollowResult> autoFollowResults;
|
||||
|
||||
AutoFollower(final String remoteCluster,
|
||||
final Consumer<List<AutoFollowResult>> statsUpdater,
|
||||
final Supplier<ClusterState> followerClusterStateSupplier) {
|
||||
final Supplier<ClusterState> followerClusterStateSupplier,
|
||||
LongSupplier relativeTimeProvider) {
|
||||
this.remoteCluster = remoteCluster;
|
||||
this.statsUpdater = statsUpdater;
|
||||
this.followerClusterStateSupplier = followerClusterStateSupplier;
|
||||
this.relativeTimeProvider = relativeTimeProvider;
|
||||
}
|
||||
|
||||
void start() {
|
||||
lastAutoFollowTimeInMillis = relativeTimeProvider.getAsLong();
|
||||
final ClusterState clusterState = followerClusterStateSupplier.get();
|
||||
final AutoFollowMetadata autoFollowMetadata = clusterState.metaData().custom(AutoFollowMetadata.TYPE);
|
||||
if (autoFollowMetadata == null) {
|
||||
|
|
|
@ -12,7 +12,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
|||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
|
||||
|
@ -117,27 +116,23 @@ public abstract class CcrIntegTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
stopClusters();
|
||||
NodeConfigurationSource nodeConfigurationSource = createNodeConfigurationSource();
|
||||
Collection<Class<? extends Plugin>> mockPlugins = Arrays.asList(ESIntegTestCase.TestSeedPlugin.class,
|
||||
TestZenDiscovery.TestPlugin.class, MockHttpTransport.TestPlugin.class, getTestTransportPlugin());
|
||||
|
||||
InternalTestCluster leaderCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(),
|
||||
numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), nodeConfigurationSource, 0, "leader", mockPlugins,
|
||||
numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), createNodeConfigurationSource(null), 0, "leader", mockPlugins,
|
||||
Function.identity());
|
||||
InternalTestCluster followerCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(),
|
||||
numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), nodeConfigurationSource, 0, "follower", mockPlugins,
|
||||
Function.identity());
|
||||
clusterGroup = new ClusterGroup(leaderCluster, followerCluster);
|
||||
|
||||
leaderCluster.beforeTest(random(), 0.0D);
|
||||
leaderCluster.ensureAtLeastNumDataNodes(numberOfNodesPerCluster());
|
||||
|
||||
String address = leaderCluster.getDataNodeInstance(TransportService.class).boundAddress().publishAddress().toString();
|
||||
InternalTestCluster followerCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(),
|
||||
numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), createNodeConfigurationSource(address), 0, "follower",
|
||||
mockPlugins, Function.identity());
|
||||
clusterGroup = new ClusterGroup(leaderCluster, followerCluster);
|
||||
|
||||
followerCluster.beforeTest(random(), 0.0D);
|
||||
followerCluster.ensureAtLeastNumDataNodes(numberOfNodesPerCluster());
|
||||
|
||||
ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest();
|
||||
String address = leaderCluster.getDataNodeInstance(TransportService.class).boundAddress().publishAddress().toString();
|
||||
updateSettingsRequest.persistentSettings(Settings.builder().put("cluster.remote.leader_cluster.seeds", address));
|
||||
assertAcked(followerClient().admin().cluster().updateSettings(updateSettingsRequest).actionGet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -175,7 +170,7 @@ public abstract class CcrIntegTestCase extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private NodeConfigurationSource createNodeConfigurationSource() {
|
||||
private NodeConfigurationSource createNodeConfigurationSource(String leaderSeedAddress) {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), Integer.MAX_VALUE);
|
||||
// Default the watermarks to absurdly low to prevent the tests
|
||||
|
@ -195,6 +190,9 @@ public abstract class CcrIntegTestCase extends ESTestCase {
|
|||
builder.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false);
|
||||
builder.put(XPackSettings.LOGSTASH_ENABLED.getKey(), false);
|
||||
builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
|
||||
if (leaderSeedAddress != null) {
|
||||
builder.put("cluster.remote.leader_cluster.seeds", leaderSeedAddress);
|
||||
}
|
||||
return new NodeConfigurationSource() {
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
|
|
|
@ -89,7 +89,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
|
|||
assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101"));
|
||||
assertThat(entries.get(0).getValue(), nullValue());
|
||||
};
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(currentState)) {
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(currentState), () -> 1L) {
|
||||
@Override
|
||||
void getRemoteClusterState(String remoteCluster,
|
||||
long metadataVersion,
|
||||
|
@ -154,7 +154,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
|
|||
assertThat(results.get(0).clusterStateFetchException, sameInstance(failure));
|
||||
assertThat(results.get(0).autoFollowExecutionResults.entrySet().size(), equalTo(0));
|
||||
};
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) {
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) {
|
||||
@Override
|
||||
void getRemoteClusterState(String remoteCluster,
|
||||
long metadataVersion,
|
||||
|
@ -209,7 +209,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
|
|||
assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101"));
|
||||
assertThat(entries.get(0).getValue(), sameInstance(failure));
|
||||
};
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) {
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) {
|
||||
@Override
|
||||
void getRemoteClusterState(String remoteCluster,
|
||||
long metadataVersion,
|
||||
|
@ -266,7 +266,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
|
|||
assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101"));
|
||||
assertThat(entries.get(0).getValue(), sameInstance(failure));
|
||||
};
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) {
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) {
|
||||
@Override
|
||||
void getRemoteClusterState(String remoteCluster,
|
||||
long metadataVersion,
|
||||
|
@ -532,8 +532,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
|
|||
AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator(
|
||||
null,
|
||||
mock(ClusterService.class),
|
||||
new CcrLicenseChecker(() -> true, () -> false)
|
||||
);
|
||||
new CcrLicenseChecker(() -> true, () -> false),
|
||||
() -> 1L);
|
||||
|
||||
autoFollowCoordinator.updateStats(Collections.singletonList(
|
||||
new AutoFollowCoordinator.AutoFollowResult("_alias1"))
|
||||
|
@ -585,6 +585,92 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
|
|||
assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias2:index2").getCause().getMessage(), equalTo("error"));
|
||||
}
|
||||
|
||||
public void testUpdateAutoFollowers() {
|
||||
ClusterService clusterService = mock(ClusterService.class);
|
||||
// Return a cluster state with no patterns so that the auto followers never really execute:
|
||||
ClusterState followerState = ClusterState.builder(new ClusterName("remote"))
|
||||
.metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
|
||||
new AutoFollowMetadata(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap())))
|
||||
.build();
|
||||
when(clusterService.state()).thenReturn(followerState);
|
||||
AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator(
|
||||
null,
|
||||
clusterService,
|
||||
new CcrLicenseChecker(() -> true, () -> false),
|
||||
() -> 1L);
|
||||
// Add 3 patterns:
|
||||
Map<String, AutoFollowPattern> patterns = new HashMap<>();
|
||||
patterns.put("pattern1", new AutoFollowPattern("remote1", Collections.singletonList("logs-*"), null, null, null,
|
||||
null, null, null, null, null, null, null, null));
|
||||
patterns.put("pattern2", new AutoFollowPattern("remote2", Collections.singletonList("logs-*"), null, null, null,
|
||||
null, null, null, null, null, null, null, null));
|
||||
patterns.put("pattern3", new AutoFollowPattern("remote2", Collections.singletonList("metrics-*"), null, null, null,
|
||||
null, null, null, null, null, null, null, null));
|
||||
ClusterState clusterState = ClusterState.builder(new ClusterName("remote"))
|
||||
.metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
|
||||
new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())))
|
||||
.build();
|
||||
autoFollowCoordinator.updateAutoFollowers(clusterState);
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(2));
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote1"), notNullValue());
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue());
|
||||
// Remove patterns 1 and 3:
|
||||
patterns.remove("pattern1");
|
||||
patterns.remove("pattern3");
|
||||
clusterState = ClusterState.builder(new ClusterName("remote"))
|
||||
.metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
|
||||
new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())))
|
||||
.build();
|
||||
autoFollowCoordinator.updateAutoFollowers(clusterState);
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(1));
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue());
|
||||
// Add pattern 4:
|
||||
patterns.put("pattern4", new AutoFollowPattern("remote1", Collections.singletonList("metrics-*"), null, null, null,
|
||||
null, null, null, null, null, null, null, null));
|
||||
clusterState = ClusterState.builder(new ClusterName("remote"))
|
||||
.metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
|
||||
new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())))
|
||||
.build();
|
||||
autoFollowCoordinator.updateAutoFollowers(clusterState);
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(2));
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote1"), notNullValue());
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue());
|
||||
// Remove patterns 2 and 4:
|
||||
patterns.remove("pattern2");
|
||||
patterns.remove("pattern4");
|
||||
clusterState = ClusterState.builder(new ClusterName("remote"))
|
||||
.metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
|
||||
new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())))
|
||||
.build();
|
||||
autoFollowCoordinator.updateAutoFollowers(clusterState);
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0));
|
||||
}
|
||||
|
||||
public void testUpdateAutoFollowersNoPatterns() {
|
||||
AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator(
|
||||
null,
|
||||
mock(ClusterService.class),
|
||||
new CcrLicenseChecker(() -> true, () -> false),
|
||||
() -> 1L);
|
||||
ClusterState clusterState = ClusterState.builder(new ClusterName("remote"))
|
||||
.metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE,
|
||||
new AutoFollowMetadata(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap())))
|
||||
.build();
|
||||
autoFollowCoordinator.updateAutoFollowers(clusterState);
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0));
|
||||
}
|
||||
|
||||
public void testUpdateAutoFollowersNoAutoFollowMetadata() {
|
||||
AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator(
|
||||
null,
|
||||
mock(ClusterService.class),
|
||||
new CcrLicenseChecker(() -> true, () -> false),
|
||||
() -> 1L);
|
||||
ClusterState clusterState = ClusterState.builder(new ClusterName("remote")).build();
|
||||
autoFollowCoordinator.updateAutoFollowers(clusterState);
|
||||
assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0));
|
||||
}
|
||||
|
||||
public void testWaitForMetadataVersion() {
|
||||
Client client = mock(Client.class);
|
||||
when(client.getRemoteClusterClient(anyString())).thenReturn(client);
|
||||
|
@ -611,7 +697,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
|
|||
|
||||
List<AutoFollowCoordinator.AutoFollowResult> allResults = new ArrayList<>();
|
||||
Consumer<List<AutoFollowCoordinator.AutoFollowResult>> handler = allResults::addAll;
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states)) {
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states), () -> 1L) {
|
||||
|
||||
long previousRequestedMetadataVersion = 0;
|
||||
|
||||
|
@ -669,7 +755,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase {
|
|||
fail("should not be invoked");
|
||||
};
|
||||
AtomicInteger counter = new AtomicInteger();
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states)) {
|
||||
AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states), () -> 1L) {
|
||||
|
||||
long previousRequestedMetadataVersion = 0;
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction;
|
|||
import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction;
|
||||
|
||||
import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomReadExceptions;
|
||||
import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomTrackingClusters;
|
||||
import static org.elasticsearch.xpack.ccr.action.StatsResponsesTests.createStatsResponse;
|
||||
|
||||
public class AutoFollowStatsResponseTests extends AbstractWireSerializingTestCase<CcrStatsAction.Response> {
|
||||
|
@ -27,7 +28,8 @@ public class AutoFollowStatsResponseTests extends AbstractWireSerializingTestCas
|
|||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomReadExceptions()
|
||||
randomReadExceptions(),
|
||||
randomTrackingClusters()
|
||||
);
|
||||
FollowStatsAction.StatsResponses statsResponse = createStatsResponse();
|
||||
return new CcrStatsAction.Response(autoFollowStats, statsResponse);
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
import org.elasticsearch.xpack.core.ccr.AutoFollowStats;
|
||||
import org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -34,7 +35,8 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase<AutoFollow
|
|||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomReadExceptions()
|
||||
randomReadExceptions(),
|
||||
randomTrackingClusters()
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -47,6 +49,15 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase<AutoFollow
|
|||
return readExceptions;
|
||||
}
|
||||
|
||||
static NavigableMap<String, AutoFollowedCluster> randomTrackingClusters() {
|
||||
final int count = randomIntBetween(0, 16);
|
||||
final NavigableMap<String, AutoFollowedCluster> readExceptions = new TreeMap<>();
|
||||
for (int i = 0; i < count; i++) {
|
||||
readExceptions.put("" + i, new AutoFollowedCluster(randomLong(), randomNonNegativeLong()));
|
||||
}
|
||||
return readExceptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Writeable.Reader<AutoFollowStats> instanceReader() {
|
||||
return AutoFollowStats::new;
|
||||
|
@ -56,6 +67,11 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase<AutoFollow
|
|||
protected void assertEqualInstances(AutoFollowStats expectedInstance, AutoFollowStats newInstance) {
|
||||
assertNotSame(expectedInstance, newInstance);
|
||||
|
||||
assertThat(newInstance.getNumberOfFailedRemoteClusterStateRequests(),
|
||||
equalTo(expectedInstance.getNumberOfFailedRemoteClusterStateRequests()));
|
||||
assertThat(newInstance.getNumberOfFailedFollowIndices(), equalTo(expectedInstance.getNumberOfFailedFollowIndices()));
|
||||
assertThat(newInstance.getNumberOfSuccessfulFollowIndices(), equalTo(expectedInstance.getNumberOfSuccessfulFollowIndices()));
|
||||
|
||||
assertThat(newInstance.getRecentAutoFollowErrors().size(), equalTo(expectedInstance.getRecentAutoFollowErrors().size()));
|
||||
assertThat(newInstance.getRecentAutoFollowErrors().keySet(), equalTo(expectedInstance.getRecentAutoFollowErrors().keySet()));
|
||||
for (final Map.Entry<String, ElasticsearchException> entry : newInstance.getRecentAutoFollowErrors().entrySet()) {
|
||||
|
@ -68,6 +84,8 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase<AutoFollow
|
|||
anyOf(instanceOf(ElasticsearchException.class), instanceOf(IllegalStateException.class)));
|
||||
assertThat(entry.getValue().getCause().getMessage(), containsString(expected.getCause().getMessage()));
|
||||
}
|
||||
|
||||
assertThat(newInstance.getAutoFollowedClusters(), equalTo(expectedInstance.getAutoFollowedClusters()));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue