force default codec

reduce p/c searches and num child docs being generated
This commit is contained in:
Martijn van Groningen 2013-11-24 15:35:11 +01:00
parent c7f6c5266d
commit 8fda8aaabf
4 changed files with 40 additions and 8 deletions

View File

@ -66,6 +66,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
@BeforeClass
public static void before() throws IOException {
forceDefaultCodec();
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
}
@ -138,7 +139,12 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
}
indexWriter.addDocument(document);
int numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
int numChildDocs;
if (rarely()) {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
} else {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 40 : 10);
}
for (int i = 0; i < numChildDocs; i++) {
boolean markChildAsDeleted = rarely();
String childValue = childValues[random().nextInt(childValues.length)];
@ -179,7 +185,9 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
for (String childValue : childValues) {
int max = numUniqueChildValues / 4;
for (int i = 0; i < max; i++) {
String childValue = childValues[random().nextInt(numUniqueChildValues)];
TermQuery childQuery = new TermQuery(new Term("field1", childValue));
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
Query query;

View File

@ -53,6 +53,7 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
@BeforeClass
public static void before() throws IOException {
forceDefaultCodec();
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
}
@ -85,7 +86,12 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
}
indexWriter.addDocument(document);
int numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
int numChildDocs;
if (rarely()) {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
} else {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 40 : 10);
}
for (int i = 0; i < numChildDocs; i++) {
boolean markChildAsDeleted = rarely();
String childValue = childValues[random().nextInt(childValues.length)];
@ -130,7 +136,9 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
for (String childValue : childValues) {
int max = numUniqueChildValues / 4;
for (int i = 0; i < max; i++) {
String childValue = childValues[random().nextInt(numUniqueChildValues)];
Query childQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", childValue)));
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];

View File

@ -55,6 +55,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
@BeforeClass
public static void before() throws IOException {
forceDefaultCodec();
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
}
@ -89,7 +90,12 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
}
indexWriter.addDocument(document);
int numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
int numChildDocs;
if (rarely()) {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
} else {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 40 : 10);
}
for (int i = 0; i < numChildDocs; i++) {
boolean markChildAsDeleted = rarely();
String child = Integer.toString(childDocId++);
@ -129,7 +135,9 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
TermFilter childrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
for (String parentValue : parentValues) {
int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) {
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
TermQuery parentQuery = new TermQuery(new Term("field1", parentValue));
Query query;
boolean applyAcceptedDocs = random().nextBoolean();

View File

@ -53,6 +53,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
@BeforeClass
public static void before() throws IOException {
forceDefaultCodec();
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
}
@ -87,7 +88,12 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
}
indexWriter.addDocument(document);
int numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
int numChildDocs;
if (rarely()) {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
} else {
numChildDocs = random().nextInt(TEST_NIGHTLY ? 40 : 10);
}
for (int i = 0; i < numChildDocs; i++) {
String child = Integer.toString(childDocId++);
boolean markChildAsDeleted = rarely();
@ -127,7 +133,9 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
TermFilter childFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
for (String parentValue : parentValues) {
int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) {
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
Query parentQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", parentValue)));
Query query = new ParentQuery(parentQuery,"parent", childFilter);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());