[TEST] Reduce the amount of docs being indexed.

This commit is contained in:
Martijn van Groningen 2014-04-16 15:49:01 +07:00
parent 98deb5537f
commit 840d1b4b8e
4 changed files with 29 additions and 24 deletions

View File

@ -59,7 +59,6 @@ import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ElasticsearchLuceneTestCase; import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.cache.recycler.MockBigArrays;
import org.elasticsearch.test.index.service.StubIndexService; import org.elasticsearch.test.index.service.StubIndexService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.hamcrest.Description; import org.hamcrest.Description;
@ -149,7 +148,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here .setRAMBufferSizeMB(scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here
RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc); RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc);
int numUniqueChildValues = scaledRandomIntBetween(1, 10000); int numUniqueChildValues = scaledRandomIntBetween(100, 2000);
String[] childValues = new String[numUniqueChildValues]; String[] childValues = new String[numUniqueChildValues];
for (int i = 0; i < numUniqueChildValues; i++) { for (int i = 0; i < numUniqueChildValues; i++) {
childValues[i] = Integer.toString(i); childValues[i] = Integer.toString(i);
@ -157,7 +156,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet(); IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet();
int childDocId = 0; int childDocId = 0;
int numParentDocs = scaledRandomIntBetween(1, 2000); int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues);
ObjectObjectOpenHashMap<String, NavigableSet<String>> childValueToParentIds = new ObjectObjectOpenHashMap<>(); ObjectObjectOpenHashMap<String, NavigableSet<String>> childValueToParentIds = new ObjectObjectOpenHashMap<>();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) { for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
boolean markParentAsDeleted = rarely(); boolean markParentAsDeleted = rarely();

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.FloatArrayList; import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntOpenHashSet; import com.carrotsearch.hppc.IntOpenHashSet;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
@ -29,6 +30,7 @@ import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.lucene.search.NotFilter; import org.elasticsearch.common.lucene.search.NotFilter;
import org.elasticsearch.common.lucene.search.XFilteredQuery; import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
@ -48,6 +50,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.NavigableMap; import java.util.NavigableMap;
import java.util.Random;
import java.util.TreeMap; import java.util.TreeMap;
import static org.elasticsearch.index.search.child.ChildrenConstantScoreQueryTests.assertBitSet; import static org.elasticsearch.index.search.child.ChildrenConstantScoreQueryTests.assertBitSet;
@ -81,8 +84,13 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testRandom() throws Exception { public void testRandom() throws Exception {
Directory directory = newDirectory(); Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); final Random r = random();
int numUniqueChildValues = 1 + random().nextInt(TEST_NIGHTLY ? 6000 : 600); final IndexWriterConfig iwc = LuceneTestCase.newIndexWriterConfig(r,
LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(r))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here
RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc);
int numUniqueChildValues = scaledRandomIntBetween(100, 2000);
String[] childValues = new String[numUniqueChildValues]; String[] childValues = new String[numUniqueChildValues];
for (int i = 0; i < numUniqueChildValues; i++) { for (int i = 0; i < numUniqueChildValues; i++) {
childValues[i] = Integer.toString(i); childValues[i] = Integer.toString(i);
@ -91,7 +99,7 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet(); IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet();
int childDocId = 0; int childDocId = 0;
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000); int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues);
ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectOpenHashMap<>(); ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectOpenHashMap<>();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) { for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
boolean markParentAsDeleted = rarely(); boolean markParentAsDeleted = rarely();
@ -110,12 +118,7 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
} }
indexWriter.addDocument(document); indexWriter.addDocument(document);
int numChildDocs; int numChildDocs = scaledRandomIntBetween(0, 100);
if (rarely()) {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
} else {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 40 : 10);
}
for (int i = 0; i < numChildDocs; i++) { for (int i = 0; i < numChildDocs; i++) {
boolean markChildAsDeleted = rarely(); boolean markChildAsDeleted = rarely();
String childValue = childValues[random().nextInt(childValues.length)]; String childValue = childValues[random().nextInt(childValues.length)];

View File

@ -88,14 +88,14 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here .setRAMBufferSizeMB(scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here
RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc); RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc);
int numUniqueParentValues = scaledRandomIntBetween(1, 1000); int numUniqueParentValues = scaledRandomIntBetween(100, 2000);
String[] parentValues = new String[numUniqueParentValues]; String[] parentValues = new String[numUniqueParentValues];
for (int i = 0; i < numUniqueParentValues; i++) { for (int i = 0; i < numUniqueParentValues; i++) {
parentValues[i] = Integer.toString(i); parentValues[i] = Integer.toString(i);
} }
int childDocId = 0; int childDocId = 0;
int numParentDocs = scaledRandomIntBetween(1, 2000); int numParentDocs = scaledRandomIntBetween(1, numUniqueParentValues);
ObjectObjectOpenHashMap<String, NavigableSet<String>> parentValueToChildDocIds = new ObjectObjectOpenHashMap<>(); ObjectObjectOpenHashMap<String, NavigableSet<String>> parentValueToChildDocIds = new ObjectObjectOpenHashMap<>();
IntIntOpenHashMap childIdToParentId = new IntIntOpenHashMap(); IntIntOpenHashMap childIdToParentId = new IntIntOpenHashMap();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) { for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.FloatArrayList; import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntIntOpenHashMap; import com.carrotsearch.hppc.IntIntOpenHashMap;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
@ -29,6 +30,7 @@ import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.lucene.search.NotFilter; import org.elasticsearch.common.lucene.search.NotFilter;
import org.elasticsearch.common.lucene.search.XFilteredQuery; import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
@ -47,6 +49,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.NavigableMap; import java.util.NavigableMap;
import java.util.Random;
import java.util.TreeMap; import java.util.TreeMap;
import static org.elasticsearch.index.search.child.ChildrenConstantScoreQueryTests.assertBitSet; import static org.elasticsearch.index.search.child.ChildrenConstantScoreQueryTests.assertBitSet;
@ -79,15 +82,20 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testRandom() throws Exception { public void testRandom() throws Exception {
Directory directory = newDirectory(); Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); final Random r = random();
int numUniqueParentValues = 1 + random().nextInt(TEST_NIGHTLY ? 6000 : 600); final IndexWriterConfig iwc = LuceneTestCase.newIndexWriterConfig(r,
LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(r))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here
RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc);
int numUniqueParentValues = scaledRandomIntBetween(100, 2000);
String[] parentValues = new String[numUniqueParentValues]; String[] parentValues = new String[numUniqueParentValues];
for (int i = 0; i < numUniqueParentValues; i++) { for (int i = 0; i < numUniqueParentValues; i++) {
parentValues[i] = Integer.toString(i); parentValues[i] = Integer.toString(i);
} }
int childDocId = 0; int childDocId = 0;
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000); int numParentDocs = scaledRandomIntBetween(1, numUniqueParentValues);
ObjectObjectOpenHashMap<String, NavigableMap<String, Float>> parentValueToChildIds = new ObjectObjectOpenHashMap<>(); ObjectObjectOpenHashMap<String, NavigableMap<String, Float>> parentValueToChildIds = new ObjectObjectOpenHashMap<>();
IntIntOpenHashMap childIdToParentId = new IntIntOpenHashMap(); IntIntOpenHashMap childIdToParentId = new IntIntOpenHashMap();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) { for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
@ -103,12 +111,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
} }
indexWriter.addDocument(document); indexWriter.addDocument(document);
int numChildDocs; int numChildDocs = scaledRandomIntBetween(0, 100);
if (rarely()) {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
} else {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 40 : 10);
}
if (parentDocId == numParentDocs - 1 && childIdToParentId.isEmpty()) { if (parentDocId == numParentDocs - 1 && childIdToParentId.isEmpty()) {
// ensure there is at least one child in the index // ensure there is at least one child in the index
numChildDocs = Math.max(1, numChildDocs); numChildDocs = Math.max(1, numChildDocs);
@ -181,7 +184,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
// Simulate a child update // Simulate a child update
if (random().nextBoolean()) { if (random().nextBoolean()) {
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5); int numberOfUpdates = scaledRandomIntBetween(1, 5);
int[] childIds = childIdToParentId.keys().toArray(); int[] childIds = childIdToParentId.keys().toArray();
for (int j = 0; j < numberOfUpdates; j++) { for (int j = 0; j < numberOfUpdates; j++) {
int childId = childIds[random().nextInt(childIds.length)]; int childId = childIds[random().nextInt(childIds.length)];