Move OldIndexBackwardsCompatibilityIT#assertBasicSearchWorks over to full cluster restart qa module.

Relates to #24939
This commit is contained in:
Martijn van Groningen 2017-05-30 11:14:20 +02:00
parent 8e0d6015f9
commit 9531ef25ec
No known key found for this signature in database
GPG Key ID: AB236F4FCF2AF12A
2 changed files with 101 additions and 42 deletions

View File

@ -24,7 +24,6 @@ import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.Version;
import org.elasticsearch.VersionTests;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
@ -53,7 +52,6 @@ import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.OldIndexUtils;
@ -229,7 +227,6 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
// node startup
upgradeIndexFolder();
importIndex(indexName);
assertBasicSearchWorks(indexName);
assertAllSearchWorks(indexName);
assertBasicAggregationWorks(indexName);
assertRealtimeGetWorks(indexName);
@ -241,31 +238,6 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
unloadIndex(indexName);
}
void assertBasicSearchWorks(String indexName) {
logger.info("--> testing basic search");
SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery());
SearchResponse searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
long numDocs = searchRsp.getHits().getTotalHits();
logger.info("Found {} in old index", numDocs);
logger.info("--> testing basic search with sort");
searchReq.addSort("long_sort", SortOrder.ASC);
ElasticsearchAssertions.assertNoFailures(searchReq.get());
logger.info("--> testing exists filter");
searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.existsQuery("string"));
searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
assertEquals(numDocs, searchRsp.getHits().getTotalHits());
GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings(indexName).get();
searchReq = client().prepareSearch(indexName)
.setQuery(QueryBuilders.existsQuery("field.with.dots"));
searchRsp = searchReq.get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
assertEquals(numDocs, searchRsp.getHits().getTotalHits());
}
boolean findPayloadBoostInExplanation(Explanation expl) {
if (expl.getDescription().startsWith("payloadBoost=") && expl.getValue() != 1f) {
return true;

View File

@ -24,12 +24,17 @@ import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.test.rest.ESRestTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
@ -38,6 +43,7 @@ import java.util.regex.Pattern;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.containsString;
/**
@ -64,6 +70,95 @@ public class FullClusterRestartIT extends ESRestTestCase {
return true;
}
public void testSearch() throws Exception {
if (runningAgainstOldCluster) {
XContentBuilder mappingsAndSettings = jsonBuilder();
mappingsAndSettings.startObject();
{
mappingsAndSettings.startObject("settings");
mappingsAndSettings.field("number_of_shards", 1);
mappingsAndSettings.field("number_of_replicas", 0);
mappingsAndSettings.endObject();
}
{
mappingsAndSettings.startObject("mappings");
mappingsAndSettings.startObject("doc");
mappingsAndSettings.startObject("properties");
{
mappingsAndSettings.startObject("string");
mappingsAndSettings.field("type", "text");
mappingsAndSettings.endObject();
}
{
mappingsAndSettings.startObject("dots_in_field_names");
mappingsAndSettings.field("type", "text");
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
mappingsAndSettings.endObject();
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/index", Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));
int numDocs = randomIntBetween(2000, 3000);
indexRandomDocuments("index", numDocs, true, i -> {
return JsonXContent.contentBuilder().startObject()
.field("string", randomAlphaOfLength(10))
.field("int", randomInt(100))
.field("float", randomFloat())
// be sure to create a "proper" boolean (True, False) for the first document so that automapping is correct
.field("bool", i > 0 && supportsLenientBooleans ? randomLenientBoolean() : randomBoolean())
.field("field.with.dots", randomAlphaOfLength(10))
// TODO a binary field
.endObject();
});
client().performRequest("POST", "/_flush");
}
assertBasicSearchWorks();
}
void assertBasicSearchWorks() throws IOException {
logger.info("--> testing basic search");
Map<String, Object> response = toMap(client().performRequest("GET", "/index/_search"));
assertNoFailures(response);
int numDocs1 = (int) XContentMapValues.extractValue("hits.total", response);
logger.info("Found {} in old index", numDocs1);
logger.info("--> testing basic search with sort");
String searchRequestBody = "{ \"sort\": [{ \"int\" : \"asc\" }]}";
response = toMap(client().performRequest("GET", "/index/_search", Collections.emptyMap(),
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
assertNoFailures(response);
int numDocs2 = (int) XContentMapValues.extractValue("hits.total", response);
assertEquals(numDocs1, numDocs2);
logger.info("--> testing exists filter");
searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"string\"} }}";
response = toMap(client().performRequest("GET", "/index/_search", Collections.emptyMap(),
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
assertNoFailures(response);
numDocs2 = (int) XContentMapValues.extractValue("hits.total", response);
assertEquals(numDocs1, numDocs2);
searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}";
response = toMap(client().performRequest("GET", "/index/_search", Collections.emptyMap(),
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
assertNoFailures(response);
numDocs2 = (int) XContentMapValues.extractValue("hits.total", response);
assertEquals(numDocs1, numDocs2);
}
static Map<String, Object> toMap(Response response) throws IOException {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false);
}
static void assertNoFailures(Map<String, Object> response) {
int failed = (int) XContentMapValues.extractValue("_shards.failed", response);
assertEquals(0, failed);
}
/**
* Tests that a single document survives. Super basic smoke test.
*/
@ -93,13 +188,14 @@ public class FullClusterRestartIT extends ESRestTestCase {
* or not we have one. */
shouldHaveTranslog = randomBoolean();
logger.info("Creating {} documents", count);
indexRandomDocuments(index, count, true);
indexRandomDocuments(index, count, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
createSnapshot();
// Explicitly flush so we're sure to have a bunch of documents in the Lucene index
client().performRequest("POST", "/_flush");
if (shouldHaveTranslog) {
// Update a few documents so we are sure to have a translog
indexRandomDocuments(index, count / 10, false /* Flushing here would invalidate the whole thing....*/);
indexRandomDocuments(index, count / 10, false /* Flushing here would invalidate the whole thing....*/,
i -> jsonBuilder().startObject().field("field", "value").endObject());
}
// Record how many documents we built so we can compare later
@ -137,20 +233,11 @@ public class FullClusterRestartIT extends ESRestTestCase {
// TODO tests for upgrades after shrink. We've had trouble with shrink in the past.
private void indexRandomDocuments(String index, int count, boolean flushAllowed) throws IOException {
private void indexRandomDocuments(String index, int count, boolean flushAllowed,
CheckedFunction<Integer, XContentBuilder, IOException> docSupplier) throws IOException {
for (int i = 0; i < count; i++) {
XContentBuilder doc = JsonXContent.contentBuilder().startObject(); {
doc.field("string", randomAlphaOfLength(10));
doc.field("int", randomInt(100));
doc.field("float", randomFloat());
// be sure to create a "proper" boolean (True, False) for the first document so that automapping is correct
doc.field("bool", i > 0 && supportsLenientBooleans ? randomLenientBoolean() : randomBoolean());
doc.field("field.with.dots", randomAlphaOfLength(10));
// TODO a binary field
}
doc.endObject();
client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(),
new StringEntity(doc.string(), ContentType.APPLICATION_JSON));
new StringEntity(docSupplier.apply(i).string(), ContentType.APPLICATION_JSON));
if (rarely()) {
client().performRequest("POST", "/_refresh");
}