force default codec
reduce p/c searches and num child docs being generated
This commit is contained in:
parent
c7f6c5266d
commit
8fda8aaabf
|
@ -66,6 +66,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void before() throws IOException {
|
public static void before() throws IOException {
|
||||||
|
forceDefaultCodec();
|
||||||
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
|
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -138,7 +139,12 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
||||||
}
|
}
|
||||||
indexWriter.addDocument(document);
|
indexWriter.addDocument(document);
|
||||||
|
|
||||||
int numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
|
int numChildDocs;
|
||||||
|
if (rarely()) {
|
||||||
|
numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
|
||||||
|
} else {
|
||||||
|
numChildDocs = random().nextInt(TEST_NIGHTLY ? 40 : 10);
|
||||||
|
}
|
||||||
for (int i = 0; i < numChildDocs; i++) {
|
for (int i = 0; i < numChildDocs; i++) {
|
||||||
boolean markChildAsDeleted = rarely();
|
boolean markChildAsDeleted = rarely();
|
||||||
String childValue = childValues[random().nextInt(childValues.length)];
|
String childValue = childValues[random().nextInt(childValues.length)];
|
||||||
|
@ -179,7 +185,9 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
||||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||||
|
|
||||||
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
|
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
|
||||||
for (String childValue : childValues) {
|
int max = numUniqueChildValues / 4;
|
||||||
|
for (int i = 0; i < max; i++) {
|
||||||
|
String childValue = childValues[random().nextInt(numUniqueChildValues)];
|
||||||
TermQuery childQuery = new TermQuery(new Term("field1", childValue));
|
TermQuery childQuery = new TermQuery(new Term("field1", childValue));
|
||||||
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
|
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
|
||||||
Query query;
|
Query query;
|
||||||
|
|
|
@ -53,6 +53,7 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void before() throws IOException {
|
public static void before() throws IOException {
|
||||||
|
forceDefaultCodec();
|
||||||
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
|
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -85,7 +86,12 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
}
|
}
|
||||||
indexWriter.addDocument(document);
|
indexWriter.addDocument(document);
|
||||||
|
|
||||||
int numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
|
int numChildDocs;
|
||||||
|
if (rarely()) {
|
||||||
|
numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
|
||||||
|
} else {
|
||||||
|
numChildDocs = random().nextInt(TEST_NIGHTLY ? 40 : 10);
|
||||||
|
}
|
||||||
for (int i = 0; i < numChildDocs; i++) {
|
for (int i = 0; i < numChildDocs; i++) {
|
||||||
boolean markChildAsDeleted = rarely();
|
boolean markChildAsDeleted = rarely();
|
||||||
String childValue = childValues[random().nextInt(childValues.length)];
|
String childValue = childValues[random().nextInt(childValues.length)];
|
||||||
|
@ -130,7 +136,9 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||||
|
|
||||||
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
|
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
|
||||||
for (String childValue : childValues) {
|
int max = numUniqueChildValues / 4;
|
||||||
|
for (int i = 0; i < max; i++) {
|
||||||
|
String childValue = childValues[random().nextInt(numUniqueChildValues)];
|
||||||
Query childQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", childValue)));
|
Query childQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", childValue)));
|
||||||
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
|
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
|
||||||
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
|
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
|
||||||
|
|
|
@ -55,6 +55,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void before() throws IOException {
|
public static void before() throws IOException {
|
||||||
|
forceDefaultCodec();
|
||||||
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
|
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,7 +90,12 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
}
|
}
|
||||||
indexWriter.addDocument(document);
|
indexWriter.addDocument(document);
|
||||||
|
|
||||||
int numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
|
int numChildDocs;
|
||||||
|
if (rarely()) {
|
||||||
|
numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
|
||||||
|
} else {
|
||||||
|
numChildDocs = random().nextInt(TEST_NIGHTLY ? 40 : 10);
|
||||||
|
}
|
||||||
for (int i = 0; i < numChildDocs; i++) {
|
for (int i = 0; i < numChildDocs; i++) {
|
||||||
boolean markChildAsDeleted = rarely();
|
boolean markChildAsDeleted = rarely();
|
||||||
String child = Integer.toString(childDocId++);
|
String child = Integer.toString(childDocId++);
|
||||||
|
@ -129,7 +135,9 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||||
|
|
||||||
TermFilter childrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
|
TermFilter childrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
|
||||||
for (String parentValue : parentValues) {
|
int max = numUniqueParentValues / 4;
|
||||||
|
for (int i = 0; i < max; i++) {
|
||||||
|
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
|
||||||
TermQuery parentQuery = new TermQuery(new Term("field1", parentValue));
|
TermQuery parentQuery = new TermQuery(new Term("field1", parentValue));
|
||||||
Query query;
|
Query query;
|
||||||
boolean applyAcceptedDocs = random().nextBoolean();
|
boolean applyAcceptedDocs = random().nextBoolean();
|
||||||
|
|
|
@ -53,6 +53,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void before() throws IOException {
|
public static void before() throws IOException {
|
||||||
|
forceDefaultCodec();
|
||||||
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
|
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,7 +88,12 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
}
|
}
|
||||||
indexWriter.addDocument(document);
|
indexWriter.addDocument(document);
|
||||||
|
|
||||||
int numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
|
int numChildDocs;
|
||||||
|
if (rarely()) {
|
||||||
|
numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
|
||||||
|
} else {
|
||||||
|
numChildDocs = random().nextInt(TEST_NIGHTLY ? 40 : 10);
|
||||||
|
}
|
||||||
for (int i = 0; i < numChildDocs; i++) {
|
for (int i = 0; i < numChildDocs; i++) {
|
||||||
String child = Integer.toString(childDocId++);
|
String child = Integer.toString(childDocId++);
|
||||||
boolean markChildAsDeleted = rarely();
|
boolean markChildAsDeleted = rarely();
|
||||||
|
@ -127,7 +133,9 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||||
|
|
||||||
TermFilter childFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
|
TermFilter childFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
|
||||||
for (String parentValue : parentValues) {
|
int max = numUniqueParentValues / 4;
|
||||||
|
for (int i = 0; i < max; i++) {
|
||||||
|
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
|
||||||
Query parentQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", parentValue)));
|
Query parentQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", parentValue)));
|
||||||
Query query = new ParentQuery(parentQuery,"parent", childFilter);
|
Query query = new ParentQuery(parentQuery,"parent", childFilter);
|
||||||
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
|
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
|
||||||
|
|
Loading…
Reference in New Issue