lucene 4: upgraded o.e.index.search.nested package. Also fixed issue with liveDocs in child package.

This commit is contained in:
Martijn van Groningen 2012-10-28 17:04:10 +01:00 committed by Shay Banon
parent a49078dfc1
commit cdf1fc8981
8 changed files with 165 additions and 120 deletions

View File

@ -178,7 +178,7 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
} }
public boolean get(int doc) { public boolean get(int doc) {
return !acceptDocs.get(doc) && parents.contains(typeCache.idByDoc(doc)); return acceptDocs.get(doc) && parents.contains(typeCache.idByDoc(doc));
} }
} }

View File

@ -143,7 +143,7 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
} }
public boolean get(int doc) { public boolean get(int doc) {
return !acceptDocs.get(doc) && parents.contains(idReaderTypeCache.parentIdByDoc(doc)); return acceptDocs.get(doc) && parents.contains(idReaderTypeCache.parentIdByDoc(doc));
} }
} }
@ -229,7 +229,7 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
} }
public boolean get(int doc) { public boolean get(int doc) {
if (acceptDocs.get(doc) || doc == -1) { if (!acceptDocs.get(doc) || doc == -1) {
return false; return false;
} }

View File

@ -138,7 +138,7 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
for (AtomicReaderContext atomicReaderContext : context.searcher().getIndexReader().leaves()) { for (AtomicReaderContext atomicReaderContext : context.searcher().getIndexReader().leaves()) {
AtomicReader indexReader = atomicReaderContext.reader(); AtomicReader indexReader = atomicReaderContext.reader();
int parentDocId = context.idCache().reader(indexReader).docById(parentType, parentId); int parentDocId = context.idCache().reader(indexReader).docById(parentType, parentId);
if (parentDocId != -1 && !indexReader.getLiveDocs().get(parentDocId)) { if (parentDocId != -1 && indexReader.getLiveDocs().get(parentDocId)) {
// we found a match, add it and break // we found a match, add it and break
TIntObjectHashMap<ParentDoc> readerParentDocs = parentDocsPerReader.get(indexReader.getCoreCacheKey()); TIntObjectHashMap<ParentDoc> readerParentDocs = parentDocsPerReader.get(indexReader.getCoreCacheKey());

View File

@ -19,15 +19,20 @@
package org.elasticsearch.index.search.nested; package org.elasticsearch.index.search.nested;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.docset.FixedBitDocSet; import org.elasticsearch.common.lucene.docset.FixedBitDocSet;
import org.elasticsearch.common.lucene.search.NoopCollector; import org.elasticsearch.common.lucene.search.NoopCollector;
import java.io.IOException; import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Locale;
import java.util.Set; import java.util.Set;
/** /**
@ -75,8 +80,6 @@ public class BlockJoinQuery extends Query {
public static enum ScoreMode {None, Avg, Max, Total} public static enum ScoreMode {None, Avg, Max, Total}
;
private final Filter parentsFilter; private final Filter parentsFilter;
private final Query childQuery; private final Query childQuery;
@ -112,7 +115,7 @@ public class BlockJoinQuery extends Query {
} }
@Override @Override
public Weight createWeight(Searcher searcher) throws IOException { public Weight createWeight(IndexSearcher searcher) throws IOException {
return new BlockJoinWeight(this, childQuery.createWeight(searcher), parentsFilter, scoreMode, childCollector); return new BlockJoinWeight(this, childQuery.createWeight(searcher), parentsFilter, scoreMode, childCollector);
} }
@ -138,24 +141,19 @@ public class BlockJoinQuery extends Query {
} }
@Override @Override
public float getValue() { public float getValueForNormalization() throws IOException {
return childWeight.getValue(); return childWeight.getValueForNormalization() * joinQuery.getBoost() * joinQuery.getBoost();
} }
@Override @Override
public float sumOfSquaredWeights() throws IOException { public void normalize(float norm, float topLevelBoost) {
return childWeight.sumOfSquaredWeights() * joinQuery.getBoost() * joinQuery.getBoost(); childWeight.normalize(norm, topLevelBoost * joinQuery.getBoost());
} }
@Override @Override
public void normalize(float norm) { public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
childWeight.normalize(norm * joinQuery.getBoost());
}
@Override
public Scorer scorer(IndexReader reader, boolean scoreDocsInOrder, boolean topScorer) throws IOException {
// Pass scoreDocsInOrder true, topScorer false to our sub: // Pass scoreDocsInOrder true, topScorer false to our sub:
final Scorer childScorer = childWeight.scorer(reader, true, false); final Scorer childScorer = childWeight.scorer(context, true, false, null);
if (childScorer == null) { if (childScorer == null) {
// No matches // No matches
@ -168,7 +166,7 @@ public class BlockJoinQuery extends Query {
return null; return null;
} }
DocIdSet parents = parentsFilter.getDocIdSet(reader); DocIdSet parents = parentsFilter.getDocIdSet(context, null);
// TODO NESTED: We have random access in ES, not sure I understand what can be gain? // TODO NESTED: We have random access in ES, not sure I understand what can be gain?
// TODO: once we do random-access filters we can // TODO: once we do random-access filters we can
// generalize this: // generalize this:
@ -186,18 +184,22 @@ public class BlockJoinQuery extends Query {
// CHANGE: // CHANGE:
if (childCollector != null) { if (childCollector != null) {
childCollector.setNextReader(reader, 0); childCollector.setNextReader(context);
childCollector.setScorer(childScorer); childCollector.setScorer(childScorer);
} }
return new BlockJoinScorer(this, childScorer, (FixedBitSet) parents, firstChildDoc, scoreMode, childCollector); return new BlockJoinScorer(this, childScorer, (FixedBitSet) parents, firstChildDoc, scoreMode, childCollector, acceptDocs);
} }
@Override @Override
public Explanation explain(IndexReader reader, int doc) throws IOException { public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
// TODO BlockJoinScorer scorer = (BlockJoinScorer) scorer(context, true, false, context.reader().getLiveDocs());
throw new UnsupportedOperationException(getClass().getName() + if (scorer != null) {
" cannot explain match on parent document"); if (scorer.advance(doc) == doc) {
return scorer.explain(context.docBase);
}
}
return new ComplexExplanation(false, 0.0f, "Not a match");
} }
@Override @Override
@ -210,21 +212,25 @@ public class BlockJoinQuery extends Query {
private final Scorer childScorer; private final Scorer childScorer;
private final FixedBitSet parentBits; private final FixedBitSet parentBits;
private final ScoreMode scoreMode; private final ScoreMode scoreMode;
private final Bits acceptDocs; // LUCENE 4 UPGRADE: Why not make the parentBits already be filtered by acceptDocs?
private final Collector childCollector; private final Collector childCollector;
private int parentDoc = -1; private int parentDoc = -1;
private int prevParentDoc;
private float parentScore; private float parentScore;
private float parentFreq;
private int nextChildDoc; private int nextChildDoc;
private int[] pendingChildDocs = new int[5]; private int[] pendingChildDocs = new int[5];
private float[] pendingChildScores; private float[] pendingChildScores;
private int childDocUpto; private int childDocUpto;
public BlockJoinScorer(Weight weight, Scorer childScorer, FixedBitSet parentBits, int firstChildDoc, ScoreMode scoreMode, Collector childCollector) { public BlockJoinScorer(Weight weight, Scorer childScorer, FixedBitSet parentBits, int firstChildDoc, ScoreMode scoreMode, Collector childCollector, Bits acceptDocs) {
super(weight); super(weight);
//System.out.println("Q.init firstChildDoc=" + firstChildDoc); //System.out.println("Q.init firstChildDoc=" + firstChildDoc);
this.parentBits = parentBits; this.parentBits = parentBits;
this.childScorer = childScorer; this.childScorer = childScorer;
this.scoreMode = scoreMode; this.scoreMode = scoreMode;
this.acceptDocs = acceptDocs;
this.childCollector = childCollector; this.childCollector = childCollector;
if (scoreMode != ScoreMode.None) { if (scoreMode != ScoreMode.None) {
pendingChildScores = new float[5]; pendingChildScores = new float[5];
@ -233,11 +239,8 @@ public class BlockJoinQuery extends Query {
} }
@Override @Override
public void visitSubScorers(Query parent, BooleanClause.Occur relationship, public Collection<ChildScorer> getChildren() {
ScorerVisitor<Query, Query, Scorer> visitor) { return Collections.singleton(new ChildScorer(childScorer, "BLOCK_JOIN"));
super.visitSubScorers(parent, relationship, visitor);
//childScorer.visitSubScorers(weight.getQuery(), BooleanClause.Occur.MUST, visitor);
childScorer.visitScorers(visitor);
} }
int getChildCount() { int getChildCount() {
@ -271,35 +274,56 @@ public class BlockJoinQuery extends Query {
public int nextDoc() throws IOException { public int nextDoc() throws IOException {
//System.out.println("Q.nextDoc() nextChildDoc=" + nextChildDoc); //System.out.println("Q.nextDoc() nextChildDoc=" + nextChildDoc);
// Loop until we hit a parentDoc that's accepted
while (true) {
if (nextChildDoc == NO_MORE_DOCS) { if (nextChildDoc == NO_MORE_DOCS) {
//System.out.println(" end"); //System.out.println(" end");
return parentDoc = NO_MORE_DOCS; return parentDoc = NO_MORE_DOCS;
} }
// Gather all children sharing the same parent as nextChildDoc // Gather all children sharing the same parent as
// nextChildDoc
parentDoc = parentBits.nextSetBit(nextChildDoc); parentDoc = parentBits.nextSetBit(nextChildDoc);
//System.out.println(" parentDoc=" + parentDoc); //System.out.println(" parentDoc=" + parentDoc);
assert parentDoc != -1; assert parentDoc != -1;
//System.out.println(" nextChildDoc=" + nextChildDoc);
if (acceptDocs != null && !acceptDocs.get(parentDoc)) {
// Parent doc not accepted; skip child docs until
// we hit a new parent doc:
do {
nextChildDoc = childScorer.nextDoc();
} while (nextChildDoc < parentDoc);
continue;
}
float totalScore = 0; float totalScore = 0;
float totalFreq = 0;
float maxScore = Float.NEGATIVE_INFINITY; float maxScore = Float.NEGATIVE_INFINITY;
float maxFreq = 0;
childDocUpto = 0; childDocUpto = 0;
do { do {
//System.out.println(" c=" + nextChildDoc); //System.out.println(" c=" + nextChildDoc);
if (pendingChildDocs.length == childDocUpto) { if (pendingChildDocs.length == childDocUpto) {
pendingChildDocs = ArrayUtil.grow(pendingChildDocs); pendingChildDocs = ArrayUtil.grow(pendingChildDocs);
if (scoreMode != ScoreMode.None) {
pendingChildScores = ArrayUtil.grow(pendingChildScores);
} }
if (scoreMode != ScoreMode.None && pendingChildScores.length == childDocUpto) {
pendingChildScores = ArrayUtil.grow(pendingChildScores);
} }
pendingChildDocs[childDocUpto] = nextChildDoc; pendingChildDocs[childDocUpto] = nextChildDoc;
if (scoreMode != ScoreMode.None) { if (scoreMode != ScoreMode.None) {
// TODO: specialize this into dedicated classes per-scoreMode // TODO: specialize this into dedicated classes per-scoreMode
final float childScore = childScorer.score(); final float childScore = childScorer.score();
final float childFreq = childScorer.freq();
pendingChildScores[childDocUpto] = childScore; pendingChildScores[childDocUpto] = childScore;
maxScore = Math.max(childScore, maxScore); maxScore = Math.max(childScore, maxScore);
maxFreq = Math.max(childFreq, maxFreq);
totalScore += childScore; totalScore += childScore;
totalFreq += childFreq;
} }
// CHANGE: // CHANGE:
@ -308,7 +332,6 @@ public class BlockJoinQuery extends Query {
childDocUpto++; childDocUpto++;
nextChildDoc = childScorer.nextDoc(); nextChildDoc = childScorer.nextDoc();
} while (nextChildDoc < parentDoc); } while (nextChildDoc < parentDoc);
//System.out.println(" nextChildDoc=" + nextChildDoc);
// Parent & child docs are supposed to be orthogonal: // Parent & child docs are supposed to be orthogonal:
assert nextChildDoc != parentDoc; assert nextChildDoc != parentDoc;
@ -316,12 +339,15 @@ public class BlockJoinQuery extends Query {
switch(scoreMode) { switch(scoreMode) {
case Avg: case Avg:
parentScore = totalScore / childDocUpto; parentScore = totalScore / childDocUpto;
parentFreq = totalFreq / childDocUpto;
break; break;
case Max: case Max:
parentScore = maxScore; parentScore = maxScore;
parentFreq = maxFreq;
break; break;
case Total: case Total:
parentScore = totalScore; parentScore = totalScore;
parentFreq = totalFreq;
break; break;
case None: case None:
break; break;
@ -330,6 +356,7 @@ public class BlockJoinQuery extends Query {
//System.out.println(" return parentDoc=" + parentDoc); //System.out.println(" return parentDoc=" + parentDoc);
return parentDoc; return parentDoc;
} }
}
@Override @Override
public int docID() { public int docID() {
@ -341,6 +368,11 @@ public class BlockJoinQuery extends Query {
return parentScore; return parentScore;
} }
@Override
public float freq() throws IOException {
return parentFreq;
}
@Override @Override
public int advance(int parentTarget) throws IOException { public int advance(int parentTarget) throws IOException {
@ -359,7 +391,7 @@ public class BlockJoinQuery extends Query {
return nextDoc(); return nextDoc();
} }
final int prevParentDoc = parentBits.prevSetBit(parentTarget - 1); prevParentDoc = parentBits.prevSetBit(parentTarget - 1);
//System.out.println(" rolled back to prevParentDoc=" + prevParentDoc + " vs parentDoc=" + parentDoc); //System.out.println(" rolled back to prevParentDoc=" + prevParentDoc + " vs parentDoc=" + parentDoc);
assert prevParentDoc >= parentDoc; assert prevParentDoc >= parentDoc;
@ -377,6 +409,15 @@ public class BlockJoinQuery extends Query {
//System.out.println(" return nextParentDoc=" + nd); //System.out.println(" return nextParentDoc=" + nd);
return nd; return nd;
} }
public Explanation explain(int docBase) throws IOException {
int start = docBase + prevParentDoc + 1; // +1 b/c prevParentDoc is previous parent doc
int end = docBase + parentDoc - 1; // -1 b/c parentDoc is parent doc
return new ComplexExplanation(
true, score(), String.format(Locale.ROOT, "Score based on child doc range from %d to %d", start, end)
);
}
} }
@Override @Override
@ -427,8 +468,8 @@ public class BlockJoinQuery extends Query {
} }
@Override @Override
public Object clone() { public Query clone() {
return new BlockJoinQuery((Query) origChildQuery.clone(), return new BlockJoinQuery(origChildQuery.clone(),
parentsFilter, parentsFilter,
scoreMode).setCollector(childCollector); scoreMode).setCollector(childCollector);
} }

View File

@ -1,12 +1,15 @@
package org.elasticsearch.index.search.nested; package org.elasticsearch.index.search.nested;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.docset.FixedBitDocSet; import org.elasticsearch.common.lucene.docset.FixedBitDocSet;
import java.io.IOException; import java.io.IOException;
import java.util.Collection;
import java.util.Set; import java.util.Set;
/** /**
@ -48,7 +51,7 @@ public class IncludeNestedDocsQuery extends Query {
} }
@Override @Override
public Weight createWeight(Searcher searcher) throws IOException { public Weight createWeight(IndexSearcher searcher) throws IOException {
return new IncludeNestedDocsWeight(parentQuery, parentQuery.createWeight(searcher), parentFilter); return new IncludeNestedDocsWeight(parentQuery, parentQuery.createWeight(searcher), parentFilter);
} }
@ -70,30 +73,25 @@ public class IncludeNestedDocsQuery extends Query {
} }
@Override @Override
public float getValue() { public void normalize(float norm, float topLevelBoost) {
return parentWeight.getValue(); parentWeight.normalize(norm, topLevelBoost);
} }
@Override @Override
public float sumOfSquaredWeights() throws IOException { public float getValueForNormalization() throws IOException {
return parentWeight.sumOfSquaredWeights() * parentQuery.getBoost() * parentQuery.getBoost(); return parentWeight.getValueForNormalization(); // this query is never boosted so just delegate...
} }
@Override @Override
public void normalize(float norm) { public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
parentWeight.normalize(norm * parentQuery.getBoost()); final Scorer parentScorer = parentWeight.scorer(context, true, false, acceptDocs);
}
@Override
public Scorer scorer(IndexReader reader, boolean scoreDocsInOrder, boolean topScorer) throws IOException {
final Scorer parentScorer = parentWeight.scorer(reader, true, false);
// no matches // no matches
if (parentScorer == null) { if (parentScorer == null) {
return null; return null;
} }
DocIdSet parents = parentsFilter.getDocIdSet(reader); DocIdSet parents = parentsFilter.getDocIdSet(context, acceptDocs);
if (parents == null) { if (parents == null) {
// No matches // No matches
return null; return null;
@ -114,8 +112,8 @@ public class IncludeNestedDocsQuery extends Query {
} }
@Override @Override
public Explanation explain(IndexReader reader, int doc) throws IOException { public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
return null; return null; //Query is used internally and not by users, so explain can be empty
} }
@Override @Override
@ -154,12 +152,10 @@ public class IncludeNestedDocsQuery extends Query {
} }
@Override @Override
public void visitSubScorers(Query parent, BooleanClause.Occur relationship, ScorerVisitor<Query, Query, Scorer> visitor) { public Collection<ChildScorer> getChildren() {
super.visitSubScorers(parent, relationship, visitor); return parentScorer.getChildren();
parentScorer.visitScorers(visitor);
} }
@Override
public int nextDoc() throws IOException { public int nextDoc() throws IOException {
if (currentParentPointer == NO_MORE_DOCS) { if (currentParentPointer == NO_MORE_DOCS) {
return (currentDoc = NO_MORE_DOCS); return (currentDoc = NO_MORE_DOCS);
@ -187,7 +183,6 @@ public class IncludeNestedDocsQuery extends Query {
return currentDoc; return currentDoc;
} }
@Override
public int advance(int target) throws IOException { public int advance(int target) throws IOException {
if (target == NO_MORE_DOCS) { if (target == NO_MORE_DOCS) {
return (currentDoc = NO_MORE_DOCS); return (currentDoc = NO_MORE_DOCS);
@ -224,6 +219,10 @@ public class IncludeNestedDocsQuery extends Query {
return parentScorer.score(); return parentScorer.score();
} }
public float freq() throws IOException {
return parentScorer.freq();
}
public int docID() { public int docID() {
return currentDoc; return currentDoc;
} }
@ -269,8 +268,8 @@ public class IncludeNestedDocsQuery extends Query {
} }
@Override @Override
public Object clone() { public Query clone() {
Query clonedQuery = (Query) origParentQuery.clone(); Query clonedQuery = origParentQuery.clone();
return new IncludeNestedDocsQuery(clonedQuery, this); return new IncludeNestedDocsQuery(clonedQuery, this);
} }
} }

View File

@ -19,6 +19,8 @@
package org.elasticsearch.index.search.nested; package org.elasticsearch.index.search.nested;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
@ -47,8 +49,6 @@ public class NestedChildrenCollector extends FacetCollector {
private FixedBitSet parentDocs; private FixedBitSet parentDocs;
private IndexReader currentReader;
public NestedChildrenCollector(FacetCollector collector, Filter parentFilter, Filter childFilter) { public NestedChildrenCollector(FacetCollector collector, Filter parentFilter, Filter childFilter) {
this.collector = collector; this.collector = collector;
this.parentFilter = parentFilter; this.parentFilter = parentFilter;
@ -72,11 +72,12 @@ public class NestedChildrenCollector extends FacetCollector {
} }
@Override @Override
public void setNextReader(IndexReader reader, int docBase) throws IOException { public void setNextReader(AtomicReaderContext context) throws IOException {
collector.setNextReader(reader, docBase); collector.setNextReader(context);
currentReader = reader; // Can use null as acceptedDocs here, since only live doc ids are being pushed to collect method.
childDocs = DocSets.convert(reader, childFilter.getDocIdSet(reader)); DocIdSet docIdSet = parentFilter.getDocIdSet(context, null);
DocIdSet docIdSet = parentFilter.getDocIdSet(reader); // Im ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
childDocs = DocSets.convert(context.reader(), childFilter.getDocIdSet(context, null));
if (docIdSet == null) { if (docIdSet == null) {
parentDocs = null; parentDocs = null;
} else if (docIdSet instanceof FixedBitDocSet) { } else if (docIdSet instanceof FixedBitDocSet) {
@ -98,7 +99,7 @@ public class NestedChildrenCollector extends FacetCollector {
} }
int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1); int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1);
for (int i = (parentDoc - 1); i > prevParentDoc; i--) { for (int i = (parentDoc - 1); i > prevParentDoc; i--) {
if (!currentReader.isDeleted(i) && childDocs.get(i)) { if (childDocs.get(i)) {
collector.collect(i); collector.collect(i);
} }
} }

View File

@ -19,11 +19,13 @@
package org.elasticsearch.index.search.nested; package org.elasticsearch.index.search.nested;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.PrefixFilter; import org.apache.lucene.search.PrefixFilter;
import org.apache.lucene.util.Bits;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import java.io.IOException; import java.io.IOException;
@ -41,8 +43,8 @@ public class NestedDocsFilter extends Filter {
} }
@Override @Override
public DocIdSet getDocIdSet(IndexReader reader) throws IOException { public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
return filter.getDocIdSet(reader); return filter.getDocIdSet(context, acceptDocs);
} }
@Override @Override

View File

@ -19,11 +19,13 @@
package org.elasticsearch.index.search.nested; package org.elasticsearch.index.search.nested;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.PrefixFilter; import org.apache.lucene.search.PrefixFilter;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
@ -42,14 +44,14 @@ public class NonNestedDocsFilter extends Filter {
} }
@Override @Override
public DocIdSet getDocIdSet(IndexReader reader) throws IOException { public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
DocIdSet docSet = filter.getDocIdSet(reader); DocIdSet docSet = filter.getDocIdSet(context, acceptDocs);
if (docSet == null || docSet == DocIdSet.EMPTY_DOCIDSET) { if (docSet == null || docSet == DocIdSet.EMPTY_DOCIDSET) {
// will almost never happen, and we need an OpenBitSet for the parent filter in // will almost never happen, and we need an OpenBitSet for the parent filter in
// BlockJoinQuery, we cache it anyhow... // BlockJoinQuery, we cache it anyhow...
docSet = new FixedBitSet(reader.maxDoc()); docSet = new FixedBitSet(context.reader().maxDoc());
} }
((FixedBitSet) docSet).flip(0, reader.maxDoc()); ((FixedBitSet) docSet).flip(0, context.reader().maxDoc());
return docSet; return docSet;
} }