Add dummy docs injection to indexRandom

This commit add `dummy docs` to `ElasticsearchIntegrationTest#indexRandom`.
It indexes document with an empty body into the indices specified by the docs
and deletes them after all docs have been indexed. This produces gaps in
the segments and enforces usage of accept docs on lower levels to ensure
the features work with delete documents as well.
This commit is contained in:
Simon Willnauer 2014-05-19 17:23:14 +02:00
parent 579a79d1ac
commit fc28fbfada
3 changed files with 36 additions and 24 deletions

View File

@ -86,9 +86,8 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
jsonBuilder().startObject().field("test", "value").startObject("loc").field("lat", 11 + i).field("lon", 22 + i)
.endObject().endObject()));
}
IndexRequestBuilder[] builders = indexBuilders.toArray(new IndexRequestBuilder[indexBuilders.size()]);
indexRandom(true, builders);
indexRandom(true, indexBuilders);
// Test Gauss
List<Float> lonlat = new ArrayList<>();
@ -173,9 +172,8 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
indexBuilders.add(client().prepareIndex().setType("type1").setId(Integer.toString(i + 3)).setIndex("test")
.setSource(jsonBuilder().startObject().field("test", "value").field("num", 3.0 + i).endObject()));
}
IndexRequestBuilder[] builders = indexBuilders.toArray(new IndexRequestBuilder[indexBuilders.size()]);
indexRandom(true, builders);
indexRandom(true, indexBuilders);
// Test Gauss
@ -254,9 +252,8 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
.setSource(
jsonBuilder().startObject().field("test", "value value").startObject("loc").field("lat", 11).field("lon", 20)
.endObject().endObject()));
IndexRequestBuilder[] builders = indexBuilders.toArray(new IndexRequestBuilder[indexBuilders.size()]);
indexRandom(true, builders);
indexRandom(false, indexBuilders); // force no dummy docs
refresh();
// Test Gauss
List<Float> lonlat = new ArrayList<>();
@ -271,7 +268,7 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
SearchResponse sr = response.actionGet();
SearchHits sh = sr.getHits();
assertThat(sh.getTotalHits(), equalTo((long) (2)));
assertThat(sh.getAt(0).getId(), equalTo("1"));
assertThat(sh.getAt(0).getId(), isOneOf("1"));
assertThat(sh.getAt(1).getId(), equalTo("2"));
// Test Exp
@ -296,17 +293,13 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
.endObject().startObject("loc").field("type", "geo_point").endObject().endObject().endObject().endObject()));
ensureYellow();
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
indexBuilders.add(client().prepareIndex()
client().prepareIndex()
.setType("type1")
.setId("1")
.setIndex("test")
.setSource(
jsonBuilder().startObject().field("test", "value").startObject("loc").field("lat", 20).field("lon", 11).endObject()
.endObject()));
IndexRequestBuilder[] builders = indexBuilders.toArray(new IndexRequestBuilder[indexBuilders.size()]);
indexRandom(true, builders);
.endObject()).setRefresh(true).get();
GeoPoint point = new GeoPoint(20, 11);
ActionFuture<SearchResponse> response = client().search(
@ -342,12 +335,8 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
.endObject().startObject("num").field("type", "double").endObject().endObject().endObject().endObject()));
ensureYellow();
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
indexBuilders.add(client().prepareIndex().setType("type1").setId("1").setIndex("test")
.setSource(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject()));
IndexRequestBuilder[] builders = indexBuilders.toArray(new IndexRequestBuilder[indexBuilders.size()]);
indexRandom(true, builders);
client().prepareIndex().setType("type1").setId("1").setIndex("test")
.setSource(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject()).setRefresh(true).get();
// function score should return 0.5 for this function
@ -611,8 +600,7 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
jsonBuilder().startObject().field("test", "value").field("date", date).field("num", i).startObject("geo")
.field("lat", lat).field("lon", lon).endObject().endObject()));
}
IndexRequestBuilder[] builders = indexBuilders.toArray(new IndexRequestBuilder[indexBuilders.size()]);
indexRandom(true, builders);
indexRandom(true, indexBuilders);
List<Float> lonlat = new ArrayList<>();
lonlat.add(100f);
lonlat.add(110f);

View File

@ -70,14 +70,14 @@ public class SimpleQueryTests extends ElasticsearchIntegrationTest {
.addMapping("type1", jsonBuilder().startObject().startObject("type1")
.startObject("_all").field("omit_norms", true).endObject()
.endObject().endObject())
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, between(3, DEFAULT_MAX_NUM_SHARDS)));
.setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)); // only one shard otherwise IDF might be different for comparing scores
ensureGreen();
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox jumps"),
client().prepareIndex("test", "type1", "2").setSource("field1", "quick brown"),
client().prepareIndex("test", "type1", "3").setSource("field1", "quick"));
assertHitCount(client().prepareSearch().setQuery(matchQuery("_all", "quick")).get(), 3l);
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("_all", "quick")).get();
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("_all", "quick")).setExplain(true).get();
SearchHit[] hits = searchResponse.getHits().hits();
assertThat(hits.length, equalTo(3));
assertThat(hits[0].score(), allOf(equalTo(hits[1].getScore()), equalTo(hits[2].getScore())));

View File

@ -971,6 +971,8 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
indexRandom(forceRefresh, Arrays.asList(builders));
}
private static final String RANDOM_BOGUS_TYPE = "RANDOM_BOGUS_TYPE______";
/**
* Indexes the given {@link IndexRequestBuilder} instances randomly. It shuffles the given builders and either
* indexes they in a blocking or async fashion. This is very useful to catch problems that relate to internal document
@ -984,6 +986,21 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
for (IndexRequestBuilder builder : builders) {
indicesSet.add(builder.request().index());
}
Set<Tuple<String, String>> bogusIds = new HashSet<>();
if (random.nextBoolean() && !builders.isEmpty() && forceRefresh) {
// we only do this if we forceRefresh=true since we need to refresh to reflect the deletes
builders = new ArrayList<>(builders);
final String[] indices = indicesSet.toArray(new String[0]);
// inject some bogus docs
final int numBogusDocs = scaledRandomIntBetween(1, builders.size()*2);
final int unicodeLen = between(1, 10);
for (int i = 0; i < numBogusDocs; i++) {
String id = randomRealisticUnicodeOfLength(unicodeLen);
String index = RandomPicks.randomFrom(random, indices);
bogusIds.add(new Tuple<String, String>(index, id));
builders.add(client().prepareIndex(index, RANDOM_BOGUS_TYPE, id).setSource("{}"));
}
}
final String[] indices = indicesSet.toArray(new String[indicesSet.size()]);
Collections.shuffle(builders, random);
final CopyOnWriteArrayList<Tuple<IndexRequestBuilder, Throwable>> errors = new CopyOnWriteArrayList<>();
@ -1026,9 +1043,16 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
}
}
assertThat(actualErrors, emptyIterable());
if (!bogusIds.isEmpty()) {
// delete the bogus types again - it might trigger merges or at least holes in the segments and enforces deleted docs!
for (Tuple<String, String> doc : bogusIds) {
client().prepareDelete(doc.v1(), RANDOM_BOGUS_TYPE, doc.v2()).get();
}
}
if (forceRefresh) {
assertNoFailures(client().admin().indices().prepareRefresh(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get());
}
}
private static CountDownLatch newLatch(List<CountDownLatch> latches) {