The short circuit mechanism always needs to be wrapped with the live docs. In certain scenarios the live docs isn't passed down with acceptedDocs.
For example when a has_child is wrapped in a filtered query as query and the wrapped filter is cached. The short circuit mechanism in that case counts down based on deleted docs, which then yields lower results than is expected. Relates to #4306
This commit is contained in:
parent
27f9f9bdb4
commit
292e53fe77
|
@ -173,16 +173,17 @@ public class ChildrenConstantScoreQuery extends Query {
|
||||||
}
|
}
|
||||||
|
|
||||||
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, acceptDocs);
|
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, acceptDocs);
|
||||||
if (DocIdSets.isEmpty(parentDocIdSet)) {
|
if (!DocIdSets.isEmpty(parentDocIdSet)) {
|
||||||
return null;
|
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
||||||
}
|
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
|
||||||
|
// count down (short circuit) logic will then work as expected.
|
||||||
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
parentDocIdSet = BitsFilteredDocIdSet.wrap(parentDocIdSet, context.reader().getLiveDocs());
|
||||||
if (idReaderTypeCache != null) {
|
if (idReaderTypeCache != null) {
|
||||||
DocIdSetIterator innerIterator = parentDocIdSet.iterator();
|
DocIdSetIterator innerIterator = parentDocIdSet.iterator();
|
||||||
if (innerIterator != null) {
|
if (innerIterator != null) {
|
||||||
ParentDocIdIterator parentDocIdIterator = new ParentDocIdIterator(innerIterator, collectedUids.v(), idReaderTypeCache);
|
ParentDocIdIterator parentDocIdIterator = new ParentDocIdIterator(innerIterator, collectedUids.v(), idReaderTypeCache);
|
||||||
return ConstantScorer.create(parentDocIdIterator, this, queryWeight);
|
return ConstantScorer.create(parentDocIdIterator, this, queryWeight);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -69,7 +69,7 @@ public class ChildrenQuery extends Query {
|
||||||
public ChildrenQuery(String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int shortCircuitParentDocSet) {
|
public ChildrenQuery(String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int shortCircuitParentDocSet) {
|
||||||
this.parentType = parentType;
|
this.parentType = parentType;
|
||||||
this.childType = childType;
|
this.childType = childType;
|
||||||
this.parentFilter = new ApplyAcceptedDocsFilter(parentFilter);
|
this.parentFilter = parentFilter;
|
||||||
this.originalChildQuery = childQuery;
|
this.originalChildQuery = childQuery;
|
||||||
this.scoreType = scoreType;
|
this.scoreType = scoreType;
|
||||||
this.shortCircuitParentDocSet = shortCircuitParentDocSet;
|
this.shortCircuitParentDocSet = shortCircuitParentDocSet;
|
||||||
|
@ -190,7 +190,7 @@ public class ChildrenQuery extends Query {
|
||||||
|
|
||||||
private ParentWeight(Weight childWeight, Filter parentFilter, SearchContext searchContext, int remaining, Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore, Recycler.V<ObjectIntOpenHashMap<HashedBytesArray>> uidToCount) {
|
private ParentWeight(Weight childWeight, Filter parentFilter, SearchContext searchContext, int remaining, Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore, Recycler.V<ObjectIntOpenHashMap<HashedBytesArray>> uidToCount) {
|
||||||
this.childWeight = childWeight;
|
this.childWeight = childWeight;
|
||||||
this.parentFilter = parentFilter;
|
this.parentFilter = new ApplyAcceptedDocsFilter(parentFilter);
|
||||||
this.searchContext = searchContext;
|
this.searchContext = searchContext;
|
||||||
this.remaining = remaining;
|
this.remaining = remaining;
|
||||||
this.uidToScore = uidToScore;
|
this.uidToScore = uidToScore;
|
||||||
|
@ -226,7 +226,9 @@ public class ChildrenQuery extends Query {
|
||||||
}
|
}
|
||||||
|
|
||||||
IdReaderTypeCache idTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
IdReaderTypeCache idTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
||||||
DocIdSetIterator parentsIterator = parentsSet.iterator();
|
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
|
||||||
|
// count down (short circuit) logic will then work as expected.
|
||||||
|
DocIdSetIterator parentsIterator = BitsFilteredDocIdSet.wrap(parentsSet, context.reader().getLiveDocs()).iterator();
|
||||||
switch (scoreType) {
|
switch (scoreType) {
|
||||||
case AVG:
|
case AVG:
|
||||||
return new AvgParentScorer(this, idTypeCache, uidToScore.v(), uidToCount.v(), parentsIterator);
|
return new AvgParentScorer(this, idTypeCache, uidToScore.v(), uidToCount.v(), parentsIterator);
|
||||||
|
@ -247,7 +249,6 @@ public class ChildrenQuery extends Query {
|
||||||
final IdReaderTypeCache idTypeCache;
|
final IdReaderTypeCache idTypeCache;
|
||||||
final DocIdSetIterator parentsIterator;
|
final DocIdSetIterator parentsIterator;
|
||||||
|
|
||||||
int remaining;
|
|
||||||
int currentDocId = -1;
|
int currentDocId = -1;
|
||||||
float currentScore;
|
float currentScore;
|
||||||
|
|
||||||
|
@ -256,7 +257,6 @@ public class ChildrenQuery extends Query {
|
||||||
this.idTypeCache = idTypeCache;
|
this.idTypeCache = idTypeCache;
|
||||||
this.parentsIterator = parentsIterator;
|
this.parentsIterator = parentsIterator;
|
||||||
this.uidToScore = uidToScore;
|
this.uidToScore = uidToScore;
|
||||||
this.remaining = uidToScore.size();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -279,8 +279,7 @@ public class ChildrenQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public int nextDoc() throws IOException {
|
||||||
if (remaining == 0) {
|
if (remaining == 0) {
|
||||||
currentDocId = NO_MORE_DOCS;
|
return currentDocId = NO_MORE_DOCS;
|
||||||
return NO_MORE_DOCS;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
|
@ -303,8 +302,7 @@ public class ChildrenQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) throws IOException {
|
public int advance(int target) throws IOException {
|
||||||
if (remaining == 0) {
|
if (remaining == 0) {
|
||||||
currentDocId = NO_MORE_DOCS;
|
return currentDocId = NO_MORE_DOCS;
|
||||||
return NO_MORE_DOCS;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
currentDocId = parentsIterator.advance(target);
|
currentDocId = parentsIterator.advance(target);
|
||||||
|
@ -341,8 +339,7 @@ public class ChildrenQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public int nextDoc() throws IOException {
|
||||||
if (remaining == 0) {
|
if (remaining == 0) {
|
||||||
currentDocId = NO_MORE_DOCS;
|
return currentDocId = NO_MORE_DOCS;
|
||||||
return NO_MORE_DOCS;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
|
@ -365,8 +362,7 @@ public class ChildrenQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) throws IOException {
|
public int advance(int target) throws IOException {
|
||||||
if (remaining == 0) {
|
if (remaining == 0) {
|
||||||
currentDocId = NO_MORE_DOCS;
|
return currentDocId = NO_MORE_DOCS;
|
||||||
return NO_MORE_DOCS;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
currentDocId = parentsIterator.advance(target);
|
currentDocId = parentsIterator.advance(target);
|
||||||
|
@ -425,7 +421,6 @@ public class ChildrenQuery extends Query {
|
||||||
break;
|
break;
|
||||||
case AVG:
|
case AVG:
|
||||||
assert false : "AVG has its own collector";
|
assert false : "AVG has its own collector";
|
||||||
|
|
||||||
default:
|
default:
|
||||||
assert false : "Are we missing a score type here? -- " + scoreType;
|
assert false : "Are we missing a score type here? -- " + scoreType;
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -31,7 +31,9 @@ import org.apache.lucene.util.FixedBitSet;
|
||||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||||
import org.elasticsearch.cache.recycler.CacheRecycler;
|
import org.elasticsearch.cache.recycler.CacheRecycler;
|
||||||
import org.elasticsearch.common.compress.CompressedString;
|
import org.elasticsearch.common.compress.CompressedString;
|
||||||
|
import org.elasticsearch.common.lucene.search.NotFilter;
|
||||||
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
||||||
|
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
||||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
|
@ -129,20 +131,25 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
||||||
childValues[i] = Integer.toString(i);
|
childValues[i] = Integer.toString(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
IntOpenHashSet initialDeletedParentIds = new IntOpenHashSet();
|
IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet();
|
||||||
int childDocId = 0;
|
int childDocId = 0;
|
||||||
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000);
|
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000);
|
||||||
ObjectObjectOpenHashMap<String, NavigableSet<String>> childValueToParentIds = new ObjectObjectOpenHashMap<String, NavigableSet<String>>();
|
ObjectObjectOpenHashMap<String, NavigableSet<String>> childValueToParentIds = new ObjectObjectOpenHashMap<String, NavigableSet<String>>();
|
||||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||||
boolean markParentAsDeleted = rarely();
|
boolean markParentAsDeleted = rarely();
|
||||||
|
boolean filterMe = rarely();
|
||||||
String parent = Integer.toString(parentDocId);
|
String parent = Integer.toString(parentDocId);
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.YES));
|
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.YES));
|
||||||
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
|
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
|
||||||
if (markParentAsDeleted) {
|
if (markParentAsDeleted) {
|
||||||
initialDeletedParentIds.add(parentDocId);
|
filteredOrDeletedDocs.add(parentDocId);
|
||||||
document.add(new StringField("delete", "me", Field.Store.NO));
|
document.add(new StringField("delete", "me", Field.Store.NO));
|
||||||
}
|
}
|
||||||
|
if (filterMe) {
|
||||||
|
filteredOrDeletedDocs.add(parentDocId);
|
||||||
|
document.add(new StringField("filter", "me", Field.Store.NO));
|
||||||
|
}
|
||||||
indexWriter.addDocument(document);
|
indexWriter.addDocument(document);
|
||||||
|
|
||||||
int numChildDocs;
|
int numChildDocs;
|
||||||
|
@ -172,7 +179,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
||||||
} else {
|
} else {
|
||||||
childValueToParentIds.put(childValue, parentIds = new TreeSet<String>());
|
childValueToParentIds.put(childValue, parentIds = new TreeSet<String>());
|
||||||
}
|
}
|
||||||
if (!markParentAsDeleted) {
|
if (!markParentAsDeleted && !filterMe) {
|
||||||
parentIds.add(parent);
|
parentIds.add(parent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -191,6 +198,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
||||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||||
|
|
||||||
Filter rawParentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
|
Filter rawParentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
|
||||||
|
Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me")));
|
||||||
int max = numUniqueChildValues / 4;
|
int max = numUniqueChildValues / 4;
|
||||||
for (int i = 0; i < max; i++) {
|
for (int i = 0; i < max; i++) {
|
||||||
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
|
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
|
||||||
|
@ -202,6 +210,14 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
||||||
parentFilter = rawParentFilter;
|
parentFilter = rawParentFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Using this in FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
|
||||||
|
Filter filterMe;
|
||||||
|
if (random().nextBoolean()) {
|
||||||
|
filterMe = SearchContext.current().filterCache().cache(rawFilterMe);
|
||||||
|
} else {
|
||||||
|
filterMe = rawFilterMe;
|
||||||
|
}
|
||||||
|
|
||||||
// Simulate a parent update
|
// Simulate a parent update
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
|
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
|
||||||
|
@ -209,7 +225,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
||||||
int parentId;
|
int parentId;
|
||||||
do {
|
do {
|
||||||
parentId = random().nextInt(numParentDocs);
|
parentId = random().nextInt(numParentDocs);
|
||||||
} while (initialDeletedParentIds.contains(parentId));
|
} while (filteredOrDeletedDocs.contains(parentId));
|
||||||
|
|
||||||
String parentUid = Uid.createUid("parent", Integer.toString(parentId));
|
String parentUid = Uid.createUid("parent", Integer.toString(parentId));
|
||||||
indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, parentUid));
|
indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, parentUid));
|
||||||
|
@ -244,6 +260,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
query = new XFilteredQuery(query, filterMe);
|
||||||
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
|
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
|
||||||
searcher.search(query, collector);
|
searcher.search(query, collector);
|
||||||
FixedBitSet actualResult = collector.getResult();
|
FixedBitSet actualResult = collector.getResult();
|
||||||
|
|
|
@ -158,7 +158,7 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
|
||||||
parentFilter = rawParentFilter;
|
parentFilter = rawParentFilter;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Using this in FQ, will invoke / test the Scorer#advance(..)
|
// Using this in FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
|
||||||
Filter filterMe;
|
Filter filterMe;
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
filterMe = SearchContext.current().filterCache().cache(rawFilterMe);
|
filterMe = SearchContext.current().filterCache().cache(rawFilterMe);
|
||||||
|
|
Loading…
Reference in New Issue