Added random filtered query logic the to random parent query tests.

Fixed bug in ParentQuery.ChildScorer#advance(), the parent value was checked for the _uid field while the _parent should have been checked.
This commit is contained in:
Martijn van Groningen 2013-12-02 00:18:44 +01:00
parent 292e53fe77
commit 079ac79617
3 changed files with 42 additions and 8 deletions

View File

@ -58,7 +58,7 @@ public class ParentQuery extends Query {
public ParentQuery(Query parentQuery, String parentType, Filter childrenFilter) {
this.originalParentQuery = parentQuery;
this.parentType = parentType;
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
this.childrenFilter = childrenFilter;
}
@Override
@ -137,7 +137,7 @@ public class ParentQuery extends Query {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
ChildWeight childWeight = new ChildWeight(parentQuery.createWeight(searcher), searchContext, uidToScore);
ChildWeight childWeight = new ChildWeight(parentQuery.createWeight(searcher), childrenFilter, searchContext, uidToScore);
searchContext.addReleasable(childWeight);
return childWeight;
}
@ -181,11 +181,13 @@ public class ParentQuery extends Query {
private class ChildWeight extends Weight implements Releasable {
private final Weight parentWeight;
private final Filter childrenFilter;
private final SearchContext searchContext;
private final Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore;
private ChildWeight(Weight parentWeight, SearchContext searchContext, Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore) {
private ChildWeight(Weight parentWeight, Filter childrenFilter, SearchContext searchContext, Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore) {
this.parentWeight = parentWeight;
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
this.searchContext = searchContext;
this.uidToScore = uidToScore;
}
@ -291,7 +293,7 @@ public class ParentQuery extends Query {
if (currentChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
return currentChildDoc;
}
HashedBytesArray uid = typeCache.idByDoc(currentChildDoc);
HashedBytesArray uid = typeCache.parentIdByDoc(currentChildDoc);
if (uid == null) {
return nextDoc();
}

View File

@ -31,7 +31,9 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.search.NotFilter;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
@ -101,6 +103,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
}
for (int i = 0; i < numChildDocs; i++) {
boolean markChildAsDeleted = rarely();
boolean filterMe = rarely();
String child = Integer.toString(childDocId++);
document = new Document();
@ -110,16 +113,19 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
if (markChildAsDeleted) {
document.add(new StringField("delete", "me", Field.Store.NO));
}
if (filterMe) {
document.add(new StringField("filter", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
if (!markChildAsDeleted) {
if (!markParentAsDeleted) {
NavigableSet<String> childIds;
if (parentValueToChildDocIds.containsKey(parentValue)) {
childIds = parentValueToChildDocIds.lget();
} else {
parentValueToChildDocIds.put(parentValue, childIds = new TreeSet<String>());
}
if (!markParentAsDeleted) {
if (!markChildAsDeleted && !filterMe) {
childIdToParentId.put(Integer.valueOf(child), parentDocId);
childIds.add(child);
}
@ -139,6 +145,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
TermFilter rawChildrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me")));
int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) {
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
@ -150,6 +157,14 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
childrenFilter = rawChildrenFilter;
}
// Using this in FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
Filter filterMe;
if (random().nextBoolean()) {
filterMe = SearchContext.current().filterCache().cache(rawFilterMe);
} else {
filterMe = rawFilterMe;
}
// Simulate a child update
if (random().nextBoolean()) {
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
@ -190,6 +205,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
)
);
}
query = new XFilteredQuery(query, filterMe);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
searcher.search(query, collector);
FixedBitSet actualResult = collector.getResult();

View File

@ -29,6 +29,8 @@ import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.search.NotFilter;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
@ -99,6 +101,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
for (int i = 0; i < numChildDocs; i++) {
String child = Integer.toString(childDocId++);
boolean markChildAsDeleted = rarely();
boolean filterMe = rarely();
document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", child), Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
@ -106,16 +109,19 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
if (markChildAsDeleted) {
document.add(new StringField("delete", "me", Field.Store.NO));
}
if (filterMe) {
document.add(new StringField("filter", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
if (!markChildAsDeleted) {
if (!markParentAsDeleted) {
NavigableMap<String, Float> childIdToScore;
if (parentValueToChildIds.containsKey(parentValue)) {
childIdToScore = parentValueToChildIds.lget();
} else {
parentValueToChildIds.put(parentValue, childIdToScore = new TreeMap<String, Float>());
}
if (!markParentAsDeleted) {
if (!markChildAsDeleted && !filterMe) {
assert !childIdToScore.containsKey(child);
childIdToScore.put(child, 1f);
childIdToParentId.put(Integer.valueOf(child), parentDocId);
@ -136,6 +142,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
TermFilter rawChildrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me")));
int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) {
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
@ -147,6 +154,14 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
childrenFilter = rawChildrenFilter;
}
// Using this in FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
Filter filterMe;
if (random().nextBoolean()) {
filterMe = SearchContext.current().filterCache().cache(rawFilterMe);
} else {
filterMe = rawFilterMe;
}
// Simulate a child update
if (random().nextBoolean()) {
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
@ -176,6 +191,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
Query parentQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", parentValue)));
Query query = new ParentQuery(parentQuery,"parent", childrenFilter);
query = new XFilteredQuery(query, filterMe);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
int numHits = 1 + random().nextInt(25);
TopScoreDocCollector actualTopDocsCollector = TopScoreDocCollector.create(numHits, false);