Test: testRecoverFromPreviousVersion should refresh from new nodes if running against v<1.3.0
This is due to a bug in older version causing refreshes to potentially be missed due to relocations #6545 Also: - Changed test to keep track of ids and report missing ones. - Removed total count check from assertSearchHits in order to enable per id checks in cased of a mismatch - Added a printable unique id part the ids of dummy documents added by indexRandom. The current random unicode id sometimes prints as ???? to the logs making them hard to trace
This commit is contained in:
parent
4da407a869
commit
b82e2c7083
|
@ -39,6 +39,7 @@ import org.elasticsearch.action.index.IndexResponse;
|
|||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.termvector.TermVectorResponse;
|
||||
import org.elasticsearch.action.update.UpdateRequestBuilder;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
|
@ -48,6 +49,7 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
|||
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
|
@ -62,6 +64,7 @@ import org.elasticsearch.index.query.QueryBuilders;
|
|||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ElasticsearchBackwardsCompatIntegrationTest;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -94,12 +97,12 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
final long version = randomIntBetween(0, Integer.MAX_VALUE);
|
||||
client().prepareIndex("test", "type1", id).setRouting(routingKey).setVersion(version).setVersionType(VersionType.EXTERNAL).setSource("field1", English.intToEnglish(i)).get();
|
||||
GetResponse get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(version).get();
|
||||
assertThat("Document with ID " +id + " should exist but doesn't", get.isExists(), is(true));
|
||||
assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
|
||||
assertThat(get.getVersion(), equalTo(version));
|
||||
final long nextVersion = version + randomIntBetween(0, Integer.MAX_VALUE);
|
||||
client().prepareIndex("test", "type1", id).setRouting(routingKey).setVersion(nextVersion).setVersionType(VersionType.EXTERNAL).setSource("field1", English.intToEnglish(i)).get();
|
||||
get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(nextVersion).get();
|
||||
assertThat("Document with ID " +id + " should exist but doesn't", get.isExists(), is(true));
|
||||
assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
|
||||
assertThat(get.getVersion(), equalTo(nextVersion));
|
||||
}
|
||||
}
|
||||
|
@ -117,11 +120,11 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
String id = Integer.toString(i);
|
||||
assertThat(id, client().prepareIndex("test", "type1", id).setRouting(routingKey).setSource("field1", English.intToEnglish(i)).get().isCreated(), is(true));
|
||||
GetResponse get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(1).get();
|
||||
assertThat("Document with ID " +id + " should exist but doesn't", get.isExists(), is(true));
|
||||
assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
|
||||
assertThat(get.getVersion(), equalTo(1l));
|
||||
client().prepareIndex("test", "type1", id).setRouting(routingKey).setSource("field1", English.intToEnglish(i)).execute().actionGet();
|
||||
get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(2).get();
|
||||
assertThat("Document with ID " +id + " should exist but doesn't", get.isExists(), is(true));
|
||||
assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
|
||||
assertThat(get.getVersion(), equalTo(2l));
|
||||
}
|
||||
|
||||
|
@ -149,8 +152,8 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
}
|
||||
|
||||
@Test
|
||||
@TestLogging("action.search.type:TRACE,action.support.replication:TRACE,cluster.service:TRACE,indices.recovery:TRACE,index.shard.service:TRACE")
|
||||
public void testRecoverFromPreviousVersion() throws ExecutionException, InterruptedException {
|
||||
|
||||
if (backwardsCluster().numNewDataNodes() == 0) {
|
||||
backwardsCluster().startNewNode();
|
||||
}
|
||||
|
@ -158,10 +161,13 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
ensureYellow();
|
||||
assertAllShardsOnNodes("test", backwardsCluster().backwardsNodePattern());
|
||||
int numDocs = randomIntBetween(100, 150);
|
||||
ArrayList<String> ids = new ArrayList<>();
|
||||
logger.info(" --> indexing [{}] docs", numDocs);
|
||||
IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
docs[i] = client().prepareIndex("test", "type1", randomRealisticUnicodeOfLength(10) + String.valueOf(i)).setSource("field1", English.intToEnglish(i));
|
||||
String id = randomRealisticUnicodeOfLength(10) + String.valueOf(i);
|
||||
ids.add(id);
|
||||
docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(i));
|
||||
}
|
||||
indexRandom(true, docs);
|
||||
CountResponse countResponse = client().prepareCount().get();
|
||||
|
@ -179,9 +185,15 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
// sometimes index while relocating
|
||||
if (randomBoolean()) {
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
docs[i] = client().prepareIndex("test", "type1", randomRealisticUnicodeOfLength(10) + String.valueOf(numDocs + i)).setSource("field1", English.intToEnglish(numDocs + i));
|
||||
String id = randomRealisticUnicodeOfLength(10) + String.valueOf(numDocs + i);
|
||||
ids.add(id);
|
||||
docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(numDocs + i));
|
||||
}
|
||||
indexRandom(true, docs);
|
||||
if (compatibilityVersion().before(Version.V_1_3_0)) {
|
||||
// issue another refresh through a new node to side step issue #6545
|
||||
assertNoFailures(backwardsCluster().internalCluster().dataNodeClient().admin().indices().prepareRefresh().setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get());
|
||||
}
|
||||
numDocs *= 2;
|
||||
}
|
||||
|
||||
|
@ -189,8 +201,7 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
ensureYellow("test");// move all shards to the new node (it waits on relocation)
|
||||
final int numIters = randomIntBetween(10, 20);
|
||||
for (int i = 0; i < numIters; i++) {
|
||||
countResponse = client().prepareCount().get();
|
||||
assertHitCount(countResponse, numDocs);
|
||||
assertSearchHits(client().prepareSearch().setSize(ids.size()).get(), ids.toArray(new String[ids.size()]));
|
||||
}
|
||||
assertVersionCreated(compatibilityVersion(), "test");
|
||||
}
|
||||
|
@ -216,7 +227,7 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
}
|
||||
indexRandom(true, docs);
|
||||
backwardsCluster().allowOnAllNodes("test");
|
||||
while(ensureYellow() != ClusterHealthStatus.GREEN) {
|
||||
while (ensureYellow() != ClusterHealthStatus.GREEN) {
|
||||
backwardsCluster().startNewNode();
|
||||
}
|
||||
assertAllShardsOnNodes("test", backwardsCluster().newNodePattern());
|
||||
|
@ -233,7 +244,7 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
|
||||
|
||||
public void assertSimpleSort(String... numericFields) {
|
||||
for(String field : numericFields) {
|
||||
for (String field : numericFields) {
|
||||
SearchResponse searchResponse = client().prepareSearch().addSort(field, SortOrder.ASC).get();
|
||||
SearchHit[] hits = searchResponse.getHits().getHits();
|
||||
assertThat(hits.length, greaterThan(0));
|
||||
|
@ -248,6 +259,20 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
}
|
||||
}
|
||||
|
||||
public void assertAllShardsOnNodes(String index, String pattern) {
|
||||
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||
for (ShardRouting shardRouting : indexShardRoutingTable) {
|
||||
if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndex())) {
|
||||
String name = clusterState.nodes().get(shardRouting.currentNodeId()).name();
|
||||
assertThat("Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upgrades a single node to the current version
|
||||
*/
|
||||
|
@ -292,7 +317,7 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
*/
|
||||
@Test
|
||||
public void testIndexRollingUpgrade() throws Exception {
|
||||
String[] indices = new String[randomIntBetween(1,3)];
|
||||
String[] indices = new String[randomIntBetween(1, 3)];
|
||||
for (int i = 0; i < indices.length; i++) {
|
||||
indices[i] = "test" + i;
|
||||
assertAcked(prepareCreate(indices[i]).setSettings(ImmutableSettings.builder().put("index.routing.allocation.exclude._name", backwardsCluster().newNodePattern()).put(indexSettings())));
|
||||
|
@ -331,7 +356,7 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
assertHitCount(countResponse, numDocs);
|
||||
assertSimpleSort("num_double", "num_int");
|
||||
|
||||
String[] newIndices = new String[randomIntBetween(1,3)];
|
||||
String[] newIndices = new String[randomIntBetween(1, 3)];
|
||||
|
||||
for (int i = 0; i < newIndices.length; i++) {
|
||||
newIndices[i] = "new_index" + i;
|
||||
|
@ -343,7 +368,7 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
|
||||
public void assertVersionCreated(Version version, String... indices) {
|
||||
GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings(indices).get();
|
||||
ImmutableOpenMap<String,Settings> indexToSettings = getSettingsResponse.getIndexToSettings();
|
||||
ImmutableOpenMap<String, Settings> indexToSettings = getSettingsResponse.getIndexToSettings();
|
||||
for (String index : indices) {
|
||||
Settings settings = indexToSettings.get(index);
|
||||
assertThat(settings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null), notNullValue());
|
||||
|
@ -389,8 +414,8 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
*/
|
||||
@Test
|
||||
public void testExistsFilter() throws IOException, ExecutionException, InterruptedException {
|
||||
assumeTrue("this test fails often with 1.0.3 skipping for now....",compatibilityVersion().onOrAfter(Version.V_1_1_0));
|
||||
for (;;) {
|
||||
assumeTrue("this test fails often with 1.0.3 skipping for now....", compatibilityVersion().onOrAfter(Version.V_1_1_0));
|
||||
for (; ; ) {
|
||||
createIndex("test");
|
||||
ensureYellow();
|
||||
indexRandom(true,
|
||||
|
@ -472,7 +497,7 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
|
||||
SearchResponse searchResponse = client().prepareSearch("test").get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo((long)numDocs + 1));
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo((long) numDocs + 1));
|
||||
|
||||
DeleteByQueryResponse deleteByQueryResponse = client().prepareDeleteByQuery("test").setQuery(QueryBuilders.termQuery("field", "value")).get();
|
||||
assertThat(deleteByQueryResponse.getIndices().size(), equalTo(1));
|
||||
|
@ -570,7 +595,7 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
|
|||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch(indexOrAlias()).get();
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo((long)numDocs - 1));
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo((long) numDocs - 1));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -118,6 +118,7 @@ import java.lang.annotation.RetentionPolicy;
|
|||
import java.lang.annotation.Target;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
|
@ -1293,9 +1294,9 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
|
|||
final int numBogusDocs = scaledRandomIntBetween(1, builders.size() * 2);
|
||||
final int unicodeLen = between(1, 10);
|
||||
for (int i = 0; i < numBogusDocs; i++) {
|
||||
String id = randomRealisticUnicodeOfLength(unicodeLen);
|
||||
String id = randomRealisticUnicodeOfLength(unicodeLen) + Integer.toString(dummmyDocIdGenerator.incrementAndGet());
|
||||
String index = RandomPicks.randomFrom(random, indices);
|
||||
bogusIds.add(new Tuple<String, String>(index, id));
|
||||
bogusIds.add(new Tuple<>(index, id));
|
||||
builders.add(client().prepareIndex(index, RANDOM_BOGUS_TYPE, id).setSource("{}"));
|
||||
}
|
||||
}
|
||||
|
@ -1352,9 +1353,10 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
|
|||
if (forceRefresh) {
|
||||
assertNoFailures(client().admin().indices().prepareRefresh(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private AtomicInteger dummmyDocIdGenerator = new AtomicInteger();
|
||||
|
||||
/** Disables translog flushing for the specified index */
|
||||
public static void disableTranslogFlush(String index) {
|
||||
Settings settings = ImmutableSettings.builder().put(TranslogService.INDEX_TRANSLOG_DISABLE_FLUSH, true).build();
|
||||
|
|
|
@ -146,14 +146,14 @@ public class ElasticsearchAssertions {
|
|||
|
||||
public static void assertSearchHits(SearchResponse searchResponse, String... ids) {
|
||||
String shardStatus = formatShardStatus(searchResponse);
|
||||
assertThat("Expected different hit count. " + shardStatus, searchResponse.getHits().hits().length, equalTo(ids.length));
|
||||
|
||||
Set<String> idsSet = new HashSet<>(Arrays.asList(ids));
|
||||
for (SearchHit hit : searchResponse.getHits()) {
|
||||
assertThat("Expected id: " + hit.getId() + " in the result but wasn't." + shardStatus, idsSet.remove(hit.getId()),
|
||||
assertThat("id [" + hit.getId() + "] was found in search results but wasn't expected (type [" + hit.getType() + "], index [" + hit.index() + "])"
|
||||
+ shardStatus, idsSet.remove(hit.getId()),
|
||||
equalTo(true));
|
||||
}
|
||||
assertThat("Expected ids: " + Arrays.toString(idsSet.toArray(new String[idsSet.size()])) + " in the result - result size differs."
|
||||
assertThat("Some expected ids were not found in search results: " + Arrays.toString(idsSet.toArray(new String[idsSet.size()])) + "."
|
||||
+ shardStatus, idsSet.size(), equalTo(0));
|
||||
assertVersionSerializable(searchResponse);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue