Make sure that BWC tests run successfully, even with types deprecation messages. (#36511)

This commit is contained in:
Julie Tibshirani 2018-12-12 12:57:32 -08:00 committed by GitHub
parent 9ac7359643
commit 71a39d10be
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 78 additions and 96 deletions

View File

@ -22,11 +22,11 @@ package org.elasticsearch.upgrades;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.test.rest.TypesRemovalWarningsHandler;
import org.elasticsearch.rest.action.document.RestGetAction;
import org.elasticsearch.rest.action.search.RestExplainAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedFunction;
@ -82,7 +82,6 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
index = getTestName().toLowerCase(Locale.ROOT);
}
public void testSearch() throws Exception {
int count;
if (isRunningAgainstOldCluster()) {
@ -96,7 +95,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
}
{
mappingsAndSettings.startObject("mappings");
mappingsAndSettings.startObject("_doc");
mappingsAndSettings.startObject("doc");
mappingsAndSettings.startObject("properties");
{
mappingsAndSettings.startObject("string");
@ -162,7 +161,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
}
{
mappingsAndSettings.startObject("mappings");
mappingsAndSettings.startObject("_doc");
mappingsAndSettings.startObject("doc");
mappingsAndSettings.startObject("properties");
{
mappingsAndSettings.startObject("field");
@ -233,7 +232,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
}
{
mappingsAndSettings.startObject("mappings");
mappingsAndSettings.startObject("_doc");
mappingsAndSettings.startObject("doc");
mappingsAndSettings.startObject("properties");
{
mappingsAndSettings.startObject("key");
@ -336,7 +335,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
mappingsAndSettings.startObject();
{
mappingsAndSettings.startObject("mappings");
mappingsAndSettings.startObject("_doc");
mappingsAndSettings.startObject("doc");
mappingsAndSettings.startObject("properties");
{
mappingsAndSettings.startObject("field");
@ -404,7 +403,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
mappingsAndSettings.startObject();
{
mappingsAndSettings.startObject("mappings");
mappingsAndSettings.startObject("_doc");
mappingsAndSettings.startObject("doc");
mappingsAndSettings.startObject("properties");
{
mappingsAndSettings.startObject("field");
@ -492,7 +491,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
bulk.append("{\"index\":{}}\n");
bulk.append("{\"test\":\"test\"}\n");
}
Request bulkRequest = new Request("POST", "/" + index + "_write/_doc/_bulk");
Request bulkRequest = new Request("POST", "/" + index + "_write/doc/_bulk");
bulkRequest.setJsonEntity(bulk.toString());
bulkRequest.addParameter("refresh", "");
assertThat(EntityUtils.toString(client().performRequest(bulkRequest).getEntity()), containsString("\"errors\":false"));
@ -571,19 +570,15 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
String type = (String) bestHit.get("_type");
String id = (String) bestHit.get("_id");
Request explanationRequest = new Request("GET", "/" + index + "/" + type + "/" + id + "/_explain");
explanationRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}");
RequestOptions.Builder explanationOptions = RequestOptions.DEFAULT.toBuilder();
explanationOptions.setWarningsHandler(TypesRemovalWarningsHandler.INSTANCE);
explanationRequest.setOptions(explanationOptions);
String explanation = toStr(client().performRequest(explanationRequest));
Request explainRequest = new Request("GET", "/" + index + "/" + type + "/" + id + "/_explain");
explainRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}");
explainRequest.setOptions(expectWarnings(RestExplainAction.TYPES_DEPRECATION_MESSAGE));
String explanation = toStr(client().performRequest(explainRequest));
assertFalse("Could not find payload boost in explanation\n" + explanation, explanation.contains("payloadBoost"));
// Make sure the query can run on the whole index
Request searchRequest = new Request("GET", "/" + index + "/_search");
searchRequest.setEntity(explanationRequest.getEntity());
searchRequest.setEntity(explainRequest.getEntity());
searchRequest.addParameter("explain", "true");
Map<?, ?> matchAllResponse = entityAsMap(client().performRequest(searchRequest));
assertNoFailures(matchAllResponse);
@ -628,11 +623,13 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
Map<?, ?> hit = (Map<?, ?>) ((List<?>)(XContentMapValues.extractValue("hits.hits", searchResponse))).get(0);
String docId = (String) hit.get("_id");
Request updateRequest = new Request("POST", "/" + index + "/_doc/" + docId + "/_update");
Request updateRequest = new Request("POST", "/" + index + "/doc/" + docId + "/_update");
updateRequest.setJsonEntity("{ \"doc\" : { \"foo\": \"bar\"}}");
client().performRequest(updateRequest);
Map<String, Object> getRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_doc/" + docId)));
Request getRequest = new Request("GET", "/" + index + "/doc/" + docId);
getRequest.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
Map<String, Object> getRsp = entityAsMap(client().performRequest(getRequest));
Map<?, ?> source = (Map<?, ?>) getRsp.get("_source");
assertTrue("doc does not contain 'foo' key: " + source, source.containsKey("foo"));
@ -685,7 +682,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
* Tests that a single document survives. Super basic smoke test.
*/
public void testSingleDoc() throws IOException {
String docLocation = "/" + index + "/_doc/1";
String docLocation = "/" + index + "/doc/1";
String doc = "{\"test\": \"test\"}";
if (isRunningAgainstOldCluster()) {
@ -694,7 +691,10 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
client().performRequest(createDoc);
}
assertThat(toStr(client().performRequest(new Request("GET", docLocation))), containsString(doc));
Request request = new Request("GET", docLocation);
request.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
assertThat(toStr(client().performRequest(request)), containsString(doc));
}
/**
@ -872,7 +872,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
}
templateBuilder.endObject();
templateBuilder.startObject("mappings"); {
templateBuilder.startObject("_doc"); {
templateBuilder.startObject("doc"); {
templateBuilder.startObject("_source"); {
templateBuilder.field("enabled", true);
}
@ -982,7 +982,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
int numDocs = between(10, 100);
for (int i = 0; i < numDocs; i++) {
String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v1").endObject());
Request request = new Request("POST", "/" + index + "/_doc/" + i);
Request request = new Request("POST", "/" + index + "/doc/" + i);
request.setJsonEntity(doc);
client().performRequest(request);
if (rarely()) {
@ -995,11 +995,11 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
for (int i = 0; i < numDocs; i++) {
if (randomBoolean()) {
String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v2").endObject());
Request request = new Request("POST", "/" + index + "/_doc/" + i);
Request request = new Request("POST", "/" + index + "/doc/" + i);
request.setJsonEntity(doc);
client().performRequest(request);
} else if (randomBoolean()) {
client().performRequest(new Request("DELETE", "/" + index + "/_doc/" + i));
client().performRequest(new Request("DELETE", "/" + index + "/doc/" + i));
liveDocs--;
}
}
@ -1065,7 +1065,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
bulk.append("{\"index\":{\"_id\":\"").append(count + i).append("\"}}\n");
bulk.append("{\"test\":\"test\"}\n");
}
Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/_doc/_bulk");
Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/doc/_bulk");
writeToRestoredRequest.addParameter("refresh", "true");
writeToRestoredRequest.setJsonEntity(bulk.toString());
assertThat(EntityUtils.toString(client().performRequest(writeToRestoredRequest).getEntity()), containsString("\"errors\":false"));
@ -1097,7 +1097,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
expectedTemplate.put("index_patterns", singletonList("evil_*"));
}
expectedTemplate.put("settings", singletonMap("index", singletonMap("number_of_shards", "1")));
expectedTemplate.put("mappings", singletonMap("_doc", singletonMap("_source", singletonMap("enabled", true))));
expectedTemplate.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true))));
expectedTemplate.put("order", 0);
Map<String, Object> aliases = new HashMap<>();
aliases.put("alias1", emptyMap());
@ -1118,7 +1118,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
logger.info("Indexing {} random documents", count);
for (int i = 0; i < count; i++) {
logger.debug("Indexing document [{}]", i);
Request createDocument = new Request("POST", "/" + index + "/_doc/" + i);
Request createDocument = new Request("POST", "/" + index + "/doc/" + i);
createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i)));
client().performRequest(createDocument);
if (rarely()) {
@ -1143,15 +1143,16 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
infoDoc.field("value", value);
infoDoc.endObject();
// Only create the first version so we know how many documents are created when the index is first created
Request request = new Request("PUT", "/info/_doc/" + index + "_" + type);
Request request = new Request("PUT", "/info/doc/" + index + "_" + type);
request.addParameter("op_type", "create");
request.setJsonEntity(Strings.toString(infoDoc));
client().performRequest(request);
}
private String loadInfoDocument(String type) throws IOException {
Request request = new Request("GET", "/info/_doc/" + index + "_" + type);
Request request = new Request("GET", "/info/doc/" + index + "_" + type);
request.addParameter("filter_path", "_source");
request.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
String doc = toStr(client().performRequest(request));
Matcher m = Pattern.compile("\"value\":\"(.+)\"").matcher(doc);
assertTrue(doc, m.find());

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.seqno.SeqNoStats;
import org.elasticsearch.rest.action.document.RestGetAction;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.yaml.ObjectPath;
@ -45,7 +46,7 @@ public class IndexingIT extends ESRestTestCase {
private int indexDocs(String index, final int idStart, final int numDocs) throws IOException {
for (int i = 0; i < numDocs; i++) {
final int id = idStart + i;
Request request = new Request("PUT", index + "/_doc/" + id);
Request request = new Request("PUT", index + "/doc/" + id);
request.setJsonEntity("{\"test\": \"test_" + randomAlphaOfLength(2) + "\"}");
assertOK(client().performRequest(request));
}
@ -284,8 +285,10 @@ public class IndexingIT extends ESRestTestCase {
}
private void assertVersion(final String index, final int docId, final String preference, final int expectedVersion) throws IOException {
Request request = new Request("GET", index + "/_doc/" + docId);
Request request = new Request("GET", index + "/doc/" + docId);
request.addParameter("preference", preference);
request.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
final Response response = client().performRequest(request);
assertOK(response);
final int actualVersion = Integer.parseInt(ObjectPath.createFromResponse(response).evaluate("_version").toString());

View File

@ -46,7 +46,7 @@ import static org.elasticsearch.rest.RestStatus.OK;
public class RestGetAction extends BaseRestHandler {
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(
LogManager.getLogger(RestGetAction.class));
static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " +
public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " +
"document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead.";
public RestGetAction(final Settings settings, final RestController controller) {

View File

@ -44,7 +44,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST;
public class RestExplainAction extends BaseRestHandler {
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(
LogManager.getLogger(RestExplainAction.class));
static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " +
public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " +
"Specifying a type in explain requests is deprecated.";
public RestExplainAction(Settings settings, RestController controller) {

View File

@ -83,7 +83,6 @@ public class TypeFieldTypeTests extends FieldTypeTestCase {
assertEquals(new MatchNoDocsQuery(), query);
}
static DirectoryReader openReaderWithNewType(String type, IndexWriter writer) throws IOException {
Document doc = new Document();
StringField typeField = new StringField(TypeFieldMapper.NAME, type, Store.NO);

View File

@ -245,7 +245,18 @@ public abstract class ESRestTestCase extends ESTestCase {
expectationsSetter.accept(warningsHandler);
builder.setWarningsHandler(warningsHandler);
return builder.build();
}
}
/**
* Creates request options designed to be used when making a call that can return warnings, for example a
* deprecated request. The options will ensure that the given warnings are returned if all nodes are on
* {@link Version#CURRENT} and will allow (but not require) the warnings if any node is running an older version.
*
* @param warnings The expected warnings.
*/
public static RequestOptions expectWarnings(String... warnings) {
return expectVersionSpecificWarnings(consumer -> consumer.current(warnings));
}
/**
* Construct an HttpHost from the given host and port

View File

@ -1,47 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest;
import org.elasticsearch.client.WarningsHandler;
import java.util.List;
/**
* An implementation of {@link WarningsHandler} that ignores warnings related to types removal,
* but fails the request on all other warnings.
*/
public class TypesRemovalWarningsHandler implements WarningsHandler {
public static final TypesRemovalWarningsHandler INSTANCE = new TypesRemovalWarningsHandler();
private TypesRemovalWarningsHandler() {
// Prevent instantiation.
}
@Override
public boolean warningsShouldFailRequest(List<String> warnings) {
for (String warning : warnings) {
if (warning.startsWith("[types removal]") == false) {
return true;
}
}
return false;
}
}

View File

@ -12,15 +12,16 @@ import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.ObjectPath;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.document.RestGetAction;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.test.StreamsUtils;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.upgrades.AbstractFullClusterRestartTestCase;
import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder;
import org.elasticsearch.common.xcontent.ObjectPath;
import org.elasticsearch.xpack.security.support.SecurityIndexManager;
import org.elasticsearch.xpack.test.rest.XPackRestTestHelper;
import org.elasticsearch.xpack.watcher.actions.logging.LoggingAction;
@ -80,7 +81,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
* Tests that a single document survives. Super basic smoke test.
*/
public void testSingleDoc() throws IOException {
String docLocation = "/testsingledoc/_doc/1";
String docLocation = "/testsingledoc/doc/1";
String doc = "{\"test\": \"test\"}";
if (isRunningAgainstOldCluster()) {
@ -90,7 +91,9 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
client().performRequest(createDoc);
}
assertThat(toStr(client().performRequest(new Request("GET", docLocation))), containsString(doc));
Request getRequest = new Request("GET", docLocation);
getRequest.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
assertThat(toStr(client().performRequest(getRequest)), containsString(doc));
}
@SuppressWarnings("unchecked")
@ -248,7 +251,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
// index documents for the rollup job
final StringBuilder bulk = new StringBuilder();
for (int i = 0; i < numDocs; i++) {
bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"_doc\"}}\n");
bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"doc\"}}\n");
String date = String.format(Locale.ROOT, "%04d-01-01T00:%02d:00Z", year, i);
bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n");
}
@ -306,7 +309,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
assumeTrue("Rollup ID scheme changed in 6.4", getOldClusterVersion().before(Version.V_6_4_0));
if (isRunningAgainstOldCluster()) {
final Request indexRequest = new Request("POST", "/id-test-rollup/_doc/1");
final Request indexRequest = new Request("POST", "/id-test-rollup/doc/1");
indexRequest.setJsonEntity("{\"timestamp\":\"2018-01-01T00:00:01\",\"value\":123}");
client().performRequest(indexRequest);
@ -367,7 +370,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
} else {
final Request indexRequest = new Request("POST", "/id-test-rollup/_doc/2");
final Request indexRequest = new Request("POST", "/id-test-rollup/doc/2");
indexRequest.setJsonEntity("{\"timestamp\":\"2018-01-02T00:00:01\",\"value\":345}");
client().performRequest(indexRequest);

View File

@ -15,6 +15,7 @@ import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.action.document.RestGetAction;
import org.elasticsearch.test.rest.yaml.ObjectPath;
import java.io.IOException;
@ -63,7 +64,7 @@ public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase {
assertNotNull(token);
assertTokenWorks(token);
Request indexRequest1 = new Request("PUT", "token_backwards_compatibility_it/_doc/old_cluster_token1");
Request indexRequest1 = new Request("PUT", "token_backwards_compatibility_it/doc/old_cluster_token1");
indexRequest1.setJsonEntity(
"{\n" +
" \"token\": \"" + token + "\"\n" +
@ -77,7 +78,7 @@ public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase {
token = (String) responseMap.get("access_token");
assertNotNull(token);
assertTokenWorks(token);
Request indexRequest2 = new Request("PUT", "token_backwards_compatibility_it/_doc/old_cluster_token2");
Request indexRequest2 = new Request("PUT", "token_backwards_compatibility_it/doc/old_cluster_token2");
indexRequest2.setJsonEntity(
"{\n" +
" \"token\": \"" + token + "\"\n" +
@ -88,7 +89,9 @@ public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase {
public void testTokenWorksInMixedOrUpgradedCluster() throws Exception {
assumeTrue("this test should only run against the mixed or upgraded cluster",
CLUSTER_TYPE == ClusterType.MIXED || CLUSTER_TYPE == ClusterType.UPGRADED);
Response getResponse = client().performRequest(new Request("GET", "token_backwards_compatibility_it/_doc/old_cluster_token1"));
Request getRequest = new Request("GET", "token_backwards_compatibility_it/doc/old_cluster_token1");
getRequest.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
Response getResponse = client().performRequest(getRequest);
assertOK(getResponse);
Map<String, Object> source = (Map<String, Object>) entityAsMap(getResponse).get("_source");
assertTokenWorks((String) source.get("token"));
@ -97,7 +100,10 @@ public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase {
public void testMixedCluster() throws Exception {
assumeTrue("this test should only run against the mixed cluster", CLUSTER_TYPE == ClusterType.MIXED);
assumeTrue("the master must be on the latest version before we can write", isMasterOnLatestVersion());
Response getResponse = client().performRequest(new Request("GET", "token_backwards_compatibility_it/_doc/old_cluster_token2"));
Request getRequest = new Request("GET", "token_backwards_compatibility_it/doc/old_cluster_token2");
getRequest.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
Response getResponse = client().performRequest(getRequest);
Map<String, Object> source = (Map<String, Object>) entityAsMap(getResponse).get("_source");
final String token = (String) source.get("token");
assertTokenWorks(token);
@ -146,7 +152,10 @@ public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase {
public void testUpgradedCluster() throws Exception {
assumeTrue("this test should only run against the mixed cluster", CLUSTER_TYPE == ClusterType.UPGRADED);
Response getResponse = client().performRequest(new Request("GET", "token_backwards_compatibility_it/_doc/old_cluster_token2"));
Request getRequest = new Request("GET", "token_backwards_compatibility_it/doc/old_cluster_token2");
getRequest.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
Response getResponse = client().performRequest(getRequest);
assertOK(getResponse);
Map<String, Object> source = (Map<String, Object>) entityAsMap(getResponse).get("_source");
final String token = (String) source.get("token");
@ -159,7 +168,10 @@ public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase {
assertOK(invalidationResponse);
assertTokenDoesNotWork(token);
getResponse = client().performRequest(new Request("GET", "token_backwards_compatibility_it/_doc/old_cluster_token1"));
getRequest = new Request("GET", "token_backwards_compatibility_it/doc/old_cluster_token1");
getRequest.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
getResponse = client().performRequest(getRequest);
source = (Map<String, Object>) entityAsMap(getResponse).get("_source");
final String workingToken = (String) source.get("token");
assertTokenWorks(workingToken);