LUCENE-4273: fix DocsEnum freq flag consistent with DPEnum flags

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1367623 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-07-31 15:51:24 +00:00
parent 72525cb1df
commit 5244693ebf
92 changed files with 307 additions and 500 deletions

View File

@ -83,6 +83,11 @@ API Changes
recognized flags are DocsAndPositionsEnum.FLAG_PAYLOADS and
DocsAndPositionsEnum.FLAG_OFFSETS (Robert Muir, Mike McCandless)
* LUCENE-4273: When pulling a DocsEnum, you can pass an int flags
instead of the previous boolean needsFlags; consistent with the changes
for DocsAndPositionsEnum in LUCENE-4230. Currently othe only flag
is DocsEnum.FLAG_FREQS. (Robert Muir, Mike McCandless)
Optimizations
* LUCENE-4171: Performance improvements to Packed64.

View File

@ -103,7 +103,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
new BytesRef("Q36"),
MultiFields.getLiveDocs(reader),
null,
false);
0);
assertTrue(td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
td = _TestUtil.docs(random(),
reader,
@ -111,7 +111,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
new BytesRef("Q37"),
MultiFields.getLiveDocs(reader),
null,
false);
0);
assertTrue(td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
}

View File

@ -500,7 +500,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
TermsEnum termsEnum = terms.iterator(null);
DocsEnum docs = null;
while(termsEnum.next() != null) {
docs = _TestUtil.docs(random(), termsEnum, MultiFields.getLiveDocs(reader), docs, true);
docs = _TestUtil.docs(random(), termsEnum, MultiFields.getLiveDocs(reader), docs, DocsEnum.FLAG_FREQS);
while(docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
totalTokenCount2 += docs.freq();
}

View File

@ -683,14 +683,11 @@ public class BlockTermsReader extends FieldsProducer {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
//System.out.println("BTR.docs this=" + this);
if (needsFreqs && fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY) {
return null;
}
decodeMetaData();
//System.out.println("BTR.docs: state.docFreq=" + state.docFreq);
return postingsReader.docs(fieldInfo, state, liveDocs, reuse, needsFreqs);
return postingsReader.docs(fieldInfo, state, liveDocs, reuse, flags);
}
@Override

View File

@ -893,12 +893,9 @@ public class BlockTreeTermsReader extends FieldsProducer {
}
@Override
public DocsEnum docs(Bits skipDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public DocsEnum docs(Bits skipDocs, DocsEnum reuse, int flags) throws IOException {
currentFrame.decodeMetaData();
if (needsFreqs && fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY) {
return null;
}
return postingsReader.docs(fieldInfo, currentFrame.termState, skipDocs, reuse, needsFreqs);
return postingsReader.docs(fieldInfo, currentFrame.termState, skipDocs, reuse, flags);
}
@Override
@ -2115,10 +2112,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
}
@Override
public DocsEnum docs(Bits skipDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
if (needsFreqs && fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY) {
return null;
}
public DocsEnum docs(Bits skipDocs, DocsEnum reuse, int flags) throws IOException {
assert !eof;
//if (DEBUG) {
//System.out.println("BTTR.docs seg=" + segment);
@ -2127,7 +2121,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
//if (DEBUG) {
//System.out.println(" state=" + currentFrame.state);
//}
return postingsReader.docs(fieldInfo, currentFrame.state, skipDocs, reuse, needsFreqs);
return postingsReader.docs(fieldInfo, currentFrame.state, skipDocs, reuse, flags);
}
@Override

View File

@ -51,7 +51,7 @@ public abstract class PostingsReaderBase implements Closeable {
/** Must fully consume state, since after this call that
* TermState may be reused. */
public abstract DocsEnum docs(FieldInfo fieldInfo, BlockTermState state, Bits skipDocs, DocsEnum reuse, boolean needsFreqs) throws IOException;
public abstract DocsEnum docs(FieldInfo fieldInfo, BlockTermState state, Bits skipDocs, DocsEnum reuse, int flags) throws IOException;
/** Must fully consume state, since after this call that
* TermState may be reused. */

View File

@ -98,7 +98,7 @@ public abstract class TermsConsumer {
while((term = termsEnum.next()) != null) {
// We can pass null for liveDocs, because the
// mapping enum will skip the non-live docs:
docsEnumIn = (MultiDocsEnum) termsEnum.docs(null, docsEnumIn, false);
docsEnumIn = (MultiDocsEnum) termsEnum.docs(null, docsEnumIn, 0);
if (docsEnumIn != null) {
docsEnum.reset(docsEnumIn);
final PostingsConsumer postingsConsumer = startTerm(term);
@ -126,7 +126,7 @@ public abstract class TermsConsumer {
while((term = termsEnum.next()) != null) {
// We can pass null for liveDocs, because the
// mapping enum will skip the non-live docs:
docsAndFreqsEnumIn = (MultiDocsEnum) termsEnum.docs(null, docsAndFreqsEnumIn, true);
docsAndFreqsEnumIn = (MultiDocsEnum) termsEnum.docs(null, docsAndFreqsEnumIn);
assert docsAndFreqsEnumIn != null;
docsAndFreqsEnum.reset(docsAndFreqsEnumIn);
final PostingsConsumer postingsConsumer = startTerm(term);

View File

@ -18,6 +18,7 @@ package org.apache.lucene.codecs.lucene40;
*/
import java.io.IOException;
import java.util.Arrays;
import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
@ -218,7 +219,7 @@ public class Lucene40PostingsReader extends PostingsReaderBase {
}
@Override
public DocsEnum docs(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public DocsEnum docs(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
if (canReuse(reuse, liveDocs)) {
// if (DEBUG) System.out.println("SPR.docs ts=" + termState);
return ((SegmentDocsEnumBase) reuse).reset(fieldInfo, (StandardTermState)termState);
@ -351,13 +352,16 @@ public class Lucene40PostingsReader extends PostingsReaderBase {
start = -1;
count = 0;
freq = 1;
if (indexOmitsTF) {
Arrays.fill(freqs, 1);
}
maxBufferedDocId = -1;
return this;
}
@Override
public final int freq() {
assert !indexOmitsTF;
return freq;
}

View File

@ -478,7 +478,7 @@ public class Lucene40TermVectorsReader extends TermVectorsReader {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs /* ignored */) throws IOException {
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags /* ignored */) throws IOException {
TVDocsEnum docsEnum;
if (reuse != null && reuse instanceof TVDocsEnum) {
docsEnum = (TVDocsEnum) reuse;

View File

@ -316,7 +316,7 @@ public class DirectPostingsFormat extends PostingsFormat {
if (hasPos) {
docsAndPositionsEnum = termsEnum.docsAndPositions(null, docsAndPositionsEnum);
} else {
docsEnum = termsEnum.docs(null, docsEnum, hasFreq);
docsEnum = termsEnum.docs(null, docsEnum);
}
final TermAndSkip ent;
@ -781,11 +781,7 @@ public class DirectPostingsFormat extends PostingsFormat {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) {
if (needsFreqs && !hasFreq) {
return null;
}
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) {
// TODO: implement reuse, something like Pulsing:
// it's hairy!
@ -1381,11 +1377,7 @@ public class DirectPostingsFormat extends PostingsFormat {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) {
if (needsFreqs && !hasFreq) {
return null;
}
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) {
// TODO: implement reuse, something like Pulsing:
// it's hairy!
@ -1501,7 +1493,6 @@ public class DirectPostingsFormat extends PostingsFormat {
@Override
public int freq() {
assert false;
return 1;
}
@ -1876,7 +1867,11 @@ public class DirectPostingsFormat extends PostingsFormat {
@Override
public int freq() {
return freqs[upto];
if (freqs == null) {
return 1;
} else {
return freqs[upto];
}
}
@Override

View File

@ -344,6 +344,7 @@ public class MemoryPostingsFormat extends PostingsFormat {
docID = -1;
accum = 0;
docUpto = 0;
freq = 1;
payloadLen = 0;
this.numDocs = numDocs;
return this;
@ -428,7 +429,6 @@ public class MemoryPostingsFormat extends PostingsFormat {
@Override
public int freq() {
assert indexOptions != IndexOptions.DOCS_ONLY;
return freq;
}
}
@ -696,13 +696,11 @@ public class MemoryPostingsFormat extends PostingsFormat {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) {
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) {
decodeMetaData();
FSTDocsEnum docsEnum;
if (needsFreqs && field.getIndexOptions() == IndexOptions.DOCS_ONLY) {
return null;
} else if (reuse == null || !(reuse instanceof FSTDocsEnum)) {
if (reuse == null || !(reuse instanceof FSTDocsEnum)) {
docsEnum = new FSTDocsEnum(field.getIndexOptions(), field.hasPayloads());
} else {
docsEnum = (FSTDocsEnum) reuse;

View File

@ -178,7 +178,7 @@ public class PulsingPostingsReader extends PostingsReaderBase {
}
@Override
public DocsEnum docs(FieldInfo field, BlockTermState _termState, Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public DocsEnum docs(FieldInfo field, BlockTermState _termState, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
PulsingTermState termState = (PulsingTermState) _termState;
if (termState.postingsSize != -1) {
PulsingDocsEnum postings;
@ -202,11 +202,11 @@ public class PulsingPostingsReader extends PostingsReaderBase {
return postings.reset(liveDocs, termState);
} else {
if (reuse instanceof PulsingDocsEnum) {
DocsEnum wrapped = wrappedPostingsReader.docs(field, termState.wrappedTermState, liveDocs, getOther(reuse), needsFreqs);
DocsEnum wrapped = wrappedPostingsReader.docs(field, termState.wrappedTermState, liveDocs, getOther(reuse), flags);
setOther(wrapped, reuse); // wrapped.other = reuse
return wrapped;
} else {
return wrappedPostingsReader.docs(field, termState.wrappedTermState, liveDocs, reuse, needsFreqs);
return wrappedPostingsReader.docs(field, termState.wrappedTermState, liveDocs, reuse, flags);
}
}
}
@ -282,6 +282,7 @@ public class PulsingPostingsReader extends PostingsReaderBase {
postings.reset(postingsBytes, 0, termState.postingsSize);
docID = -1;
accum = 0;
freq = 1;
payloadLength = 0;
this.liveDocs = liveDocs;
return this;
@ -349,7 +350,6 @@ public class PulsingPostingsReader extends PostingsReaderBase {
@Override
public int freq() throws IOException {
assert indexOptions != IndexOptions.DOCS_ONLY;
return freq;
}

View File

@ -258,7 +258,7 @@ public class SepPostingsReader extends PostingsReaderBase {
}
@Override
public DocsEnum docs(FieldInfo fieldInfo, BlockTermState _termState, Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public DocsEnum docs(FieldInfo fieldInfo, BlockTermState _termState, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
final SepTermState termState = (SepTermState) _termState;
SepDocsEnum docsEnum;
if (reuse == null || !(reuse instanceof SepDocsEnum)) {
@ -366,6 +366,7 @@ public class SepPostingsReader extends PostingsReaderBase {
count = 0;
doc = -1;
accum = 0;
freq = 1;
skipped = false;
return this;
@ -399,7 +400,6 @@ public class SepPostingsReader extends PostingsReaderBase {
@Override
public int freq() throws IOException {
assert !omitTF;
return freq;
}

View File

@ -194,17 +194,14 @@ class SimpleTextFieldsReader extends FieldsProducer {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
if (needsFreqs && indexOptions == IndexOptions.DOCS_ONLY) {
return null;
}
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
SimpleTextDocsEnum docsEnum;
if (reuse != null && reuse instanceof SimpleTextDocsEnum && ((SimpleTextDocsEnum) reuse).canReuse(SimpleTextFieldsReader.this.in)) {
docsEnum = (SimpleTextDocsEnum) reuse;
} else {
docsEnum = new SimpleTextDocsEnum();
}
return docsEnum.reset(docsStart, liveDocs, !needsFreqs);
return docsEnum.reset(docsStart, liveDocs, indexOptions == IndexOptions.DOCS_ONLY);
}
@Override
@ -254,6 +251,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
in.seek(fp);
this.omitTF = omitTF;
docID = -1;
tf = 1;
return this;
}
@ -264,7 +262,6 @@ class SimpleTextFieldsReader extends FieldsProducer {
@Override
public int freq() throws IOException {
assert !omitTF;
return tf;
}

View File

@ -357,10 +357,10 @@ public class SimpleTextTermVectorsReader extends TermVectorsReader {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
// TODO: reuse
SimpleTVDocsEnum e = new SimpleTVDocsEnum();
e.reset(liveDocs, needsFreqs ? current.getValue().freq : -1);
e.reset(liveDocs, (flags & DocsEnum.FLAG_FREQS) == 0 ? 1 : current.getValue().freq);
return e;
}

View File

@ -116,9 +116,19 @@ public abstract class AtomicReader extends IndexReader {
}
/** Returns {@link DocsEnum} for the specified field &
* term. This may return null, if either the field or
* term. This will return null if either the field or
* term does not exist. */
public final DocsEnum termDocsEnum(Bits liveDocs, String field, BytesRef term, boolean needsFreqs) throws IOException {
public final DocsEnum termDocsEnum(Bits liveDocs, String field, BytesRef term) throws IOException {
return termDocsEnum(liveDocs, field, term, DocsEnum.FLAG_FREQS);
}
/** Returns {@link DocsEnum} for the specified field &
* term, with control over whether freqs are required.
* Some codecs may be able to optimize their
* implementation when freqs are not required. This will
* return null if the field or term does not
* exist. See {@link TermsEnum#docs(Bits,DocsEnum,int)}. */
public final DocsEnum termDocsEnum(Bits liveDocs, String field, BytesRef term, int flags) throws IOException {
assert field != null;
assert term != null;
final Fields fields = fields();
@ -127,7 +137,7 @@ public abstract class AtomicReader extends IndexReader {
if (terms != null) {
final TermsEnum termsEnum = terms.iterator(null);
if (termsEnum.seekExact(term, true)) {
return termsEnum.docs(liveDocs, null, needsFreqs);
return termsEnum.docs(liveDocs, null, flags);
}
}
}
@ -135,7 +145,7 @@ public abstract class AtomicReader extends IndexReader {
}
/** Returns {@link DocsAndPositionsEnum} for the specified
* field & term. This will return null if either the
* field & term. This will return null if the
* field or term does not exist or positions weren't indexed.
* @see #termPositionsEnum(Bits, String, BytesRef, int) */
public final DocsAndPositionsEnum termPositionsEnum(Bits liveDocs, String field, BytesRef term) throws IOException {
@ -147,7 +157,7 @@ public abstract class AtomicReader extends IndexReader {
* field & term, with control over whether offsets and payloads are
* required. Some codecs may be able to optimize their
* implementation when offsets and/or payloads are not required.
* This will return null, if either the field or term
* This will return null if the field or term
* does not exist or positions weren't indexed. See
* {@link TermsEnum#docsAndPositions(Bits,DocsAndPositionsEnum,int)}. */
public final DocsAndPositionsEnum termPositionsEnum(Bits liveDocs, String field, BytesRef term, int flags) throws IOException {

View File

@ -396,7 +396,8 @@ class BufferedDeletesStream {
// System.out.println(" term=" + term);
if (termsEnum.seekExact(term.bytes(), false)) {
DocsEnum docsEnum = termsEnum.docs(rld.getLiveDocs(), docs, false);
// we don't need term frequencies for this
DocsEnum docsEnum = termsEnum.docs(rld.getLiveDocs(), docs, 0);
//System.out.println("BDS: got docsEnum=" + docsEnum);
if (docsEnum != null) {

View File

@ -755,8 +755,7 @@ public class CheckIndex {
status.totFreq += docFreq;
sumDocFreq += docFreq;
docs = termsEnum.docs(liveDocs, docs, false);
docsAndFreqs = termsEnum.docs(liveDocs, docsAndFreqs, true);
docs = termsEnum.docs(liveDocs, docs);
postings = termsEnum.docsAndPositions(liveDocs, postings);
if (hasOrd) {
@ -778,25 +777,17 @@ public class CheckIndex {
status.termCount++;
final DocsEnum docs2;
final DocsEnum docsAndFreqs2;
final boolean hasPositions;
final boolean hasFreqs;
final boolean hasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
// if we are checking vectors, we have freqs implicitly
final boolean hasFreqs = isVectors || fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0;
// if we are checking vectors, offsets are a free-for-all anyway
final boolean hasOffsets = isVectors || fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
if (postings != null) {
docs2 = postings;
docsAndFreqs2 = postings;
hasPositions = true;
hasFreqs = true;
} else if (docsAndFreqs != null) {
docs2 = docsAndFreqs;
docsAndFreqs2 = docsAndFreqs;
hasPositions = false;
hasFreqs = true;
} else {
docs2 = docs;
docsAndFreqs2 = null;
hasPositions = false;
hasFreqs = false;
}
int lastDoc = -1;
@ -810,7 +801,7 @@ public class CheckIndex {
visitedDocs.set(doc);
int freq = -1;
if (hasFreqs) {
freq = docsAndFreqs2.freq();
freq = docs2.freq();
if (freq <= 0) {
throw new RuntimeException("term " + term + ": doc " + doc + ": freq " + freq + " is out of bounds");
}
@ -876,12 +867,12 @@ public class CheckIndex {
}
final long totalTermFreq2 = termsEnum.totalTermFreq();
final boolean hasTotalTermFreq = postings != null && totalTermFreq2 != -1;
final boolean hasTotalTermFreq = hasFreqs && totalTermFreq2 != -1;
// Re-count if there are deleted docs:
if (liveDocs != null) {
if (hasFreqs) {
final DocsEnum docsNoDel = termsEnum.docs(null, docsAndFreqs, true);
final DocsEnum docsNoDel = termsEnum.docs(null, docsAndFreqs);
docCount = 0;
totalTermFreq = 0;
while(docsNoDel.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
@ -890,7 +881,7 @@ public class CheckIndex {
totalTermFreq += docsNoDel.freq();
}
} else {
final DocsEnum docsNoDel = termsEnum.docs(null, docs, false);
final DocsEnum docsNoDel = termsEnum.docs(null, docs, 0);
docCount = 0;
totalTermFreq = -1;
while(docsNoDel.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
@ -982,7 +973,7 @@ public class CheckIndex {
} else {
for(int idx=0;idx<7;idx++) {
final int skipDocID = (int) (((idx+1)*(long) maxDoc)/8);
docs = termsEnum.docs(liveDocs, docs, false);
docs = termsEnum.docs(liveDocs, docs, 0);
final int docID = docs.advance(skipDocID);
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
@ -1052,7 +1043,7 @@ public class CheckIndex {
}
int expectedDocFreq = termsEnum.docFreq();
DocsEnum d = termsEnum.docs(null, null, false);
DocsEnum d = termsEnum.docs(null, null, 0);
int docFreq = 0;
while (d.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
docFreq++;
@ -1093,7 +1084,7 @@ public class CheckIndex {
throw new RuntimeException("seek to existing term " + seekTerms[i] + " failed");
}
docs = termsEnum.docs(liveDocs, docs, false);
docs = termsEnum.docs(liveDocs, docs, 0);
if (docs == null) {
throw new RuntimeException("null DocsEnum from to existing term " + seekTerms[i]);
}
@ -1111,7 +1102,7 @@ public class CheckIndex {
}
totDocFreq += termsEnum.docFreq();
docs = termsEnum.docs(null, docs, false);
docs = termsEnum.docs(null, docs, 0);
if (docs == null) {
throw new RuntimeException("null DocsEnum from to existing term " + seekTerms[i]);
}
@ -1440,6 +1431,7 @@ public class CheckIndex {
if (crossCheckTermVectors) {
Terms terms = tfv.terms(field);
termsEnum = terms.iterator(termsEnum);
final boolean postingsHasFreq = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0;
Terms postingsTerms = postingsFields.terms(field);
if (postingsTerms == null) {
@ -1451,30 +1443,15 @@ public class CheckIndex {
while ((term = termsEnum.next()) != null) {
final boolean hasProx;
final boolean hasFreqs;
// TODO: really we need a reflection/query
// API so we can just ask what was indexed
// instead of "probing"...
// TODO: cleanup:
// Try offsets:
// Try positions:
postings = termsEnum.docsAndPositions(null, postings);
if (postings == null) {
hasProx = false;
// Try docIDs & freqs:
docs = termsEnum.docs(null, docs, true);
if (docs == null) {
// OK, only docIDs:
hasFreqs = false;
docs = termsEnum.docs(null, docs, false);
} else {
hasFreqs = true;
}
docs = termsEnum.docs(null, docs);
} else {
hasProx = true;
hasFreqs = true;
}
final DocsEnum docs2;
@ -1487,25 +1464,16 @@ public class CheckIndex {
}
final DocsEnum postingsDocs2;
final boolean postingsHasFreq;
if (!postingsTermsEnum.seekExact(term, true)) {
throw new RuntimeException("vector term=" + term + " field=" + field + " does not exist in postings; doc=" + j);
}
postingsPostings = postingsTermsEnum.docsAndPositions(null, postingsPostings);
if (postingsPostings == null) {
// Term vectors were indexed w/ pos but postings were not
postingsDocs = postingsTermsEnum.docs(null, postingsDocs, true);
postingsDocs = postingsTermsEnum.docs(null, postingsDocs);
if (postingsDocs == null) {
postingsHasFreq = false;
postingsDocs = postingsTermsEnum.docs(null, postingsDocs, false);
if (postingsDocs == null) {
throw new RuntimeException("vector term=" + term + " field=" + field + " does not exist in postings; doc=" + j);
}
} else {
postingsHasFreq = true;
throw new RuntimeException("vector term=" + term + " field=" + field + " does not exist in postings; doc=" + j);
}
} else {
postingsHasFreq = true;
}
if (postingsPostings != null) {
@ -1525,7 +1493,7 @@ public class CheckIndex {
throw new RuntimeException("vector for doc " + j + " didn't return docID=0: got docID=" + doc);
}
if (hasFreqs) {
if (postingsHasFreq) {
final int tf = docs2.freq();
if (postingsHasFreq && postingsDocs2.freq() != tf) {
throw new RuntimeException("vector term=" + term + " field=" + field + " doc=" + j + ": freq=" + tf + " differs from postings freq=" + postingsDocs2.freq());

View File

@ -334,7 +334,7 @@ public class DocTermOrds {
final int df = te.docFreq();
if (df <= maxTermDocFreq) {
docsEnum = te.docs(liveDocs, docsEnum, false);
docsEnum = te.docs(liveDocs, docsEnum, 0);
// dF, but takes deletions into account
int actualDF = 0;
@ -668,8 +668,8 @@ public class DocTermOrds {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
return termsEnum.docs(liveDocs, reuse, needsFreqs);
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
return termsEnum.docs(liveDocs, reuse, flags);
}
@Override

View File

@ -27,6 +27,10 @@ import org.apache.lucene.util.AttributeSource;
* any of the per-doc methods. */
public abstract class DocsEnum extends DocIdSetIterator {
/** Flag to pass to {@link TermsEnum#docs(Bits,DocsEnum,int)}
* if you require term frequencies in the returned enum. */
public static final int FLAG_FREQS = 0x1;
private AttributeSource atts = null;
/** Returns term frequency in the current document. Do

View File

@ -181,8 +181,8 @@ public class FilterAtomicReader extends AtomicReader {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
return in.docs(liveDocs, reuse, needsFreqs);
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
return in.docs(liveDocs, reuse, flags);
}
@Override

View File

@ -166,8 +166,8 @@ public abstract class FilteredTermsEnum extends TermsEnum {
}
@Override
public DocsEnum docs(Bits bits, DocsEnum reuse, boolean needsFreqs) throws IOException {
return tenum.docs(bits, reuse, needsFreqs);
public DocsEnum docs(Bits bits, DocsEnum reuse, int flags) throws IOException {
return tenum.docs(bits, reuse, flags);
}
@Override

View File

@ -122,24 +122,34 @@ public final class MultiFields extends Fields {
}
/** Returns {@link DocsEnum} for the specified field &
* term. This may return null if the term does not
* exist. */
public static DocsEnum getTermDocsEnum(IndexReader r, Bits liveDocs, String field, BytesRef term, boolean needsFreqs) throws IOException {
* term. This will return null if the field or term does
* not exist. */
public static DocsEnum getTermDocsEnum(IndexReader r, Bits liveDocs, String field, BytesRef term) throws IOException {
return getTermDocsEnum(r, liveDocs, field, term);
}
/** Returns {@link DocsEnum} for the specified field &
* term, with control over whether freqs are required.
* Some codecs may be able to optimize their
* implementation when freqs are not required. This will
* return null if the field or term does not exist. See {@link
* TermsEnum#docs(Bits,DocsEnum,int)}.*/
public static DocsEnum getTermDocsEnum(IndexReader r, Bits liveDocs, String field, BytesRef term, int flags) throws IOException {
assert field != null;
assert term != null;
final Terms terms = getTerms(r, field);
if (terms != null) {
final TermsEnum termsEnum = terms.iterator(null);
if (termsEnum.seekExact(term, true)) {
return termsEnum.docs(liveDocs, null, needsFreqs);
return termsEnum.docs(liveDocs, null, flags);
}
}
return null;
}
/** Returns {@link DocsAndPositionsEnum} for the specified
* field & term. This may return null if the term does
* not exist or positions were not indexed.
* field & term. This will return null if the field or
* term does not exist or positions were not indexed.
* @see #getTermPositionsEnum(IndexReader, Bits, String, BytesRef, int) */
public static DocsAndPositionsEnum getTermPositionsEnum(IndexReader r, Bits liveDocs, String field, BytesRef term) throws IOException {
return getTermPositionsEnum(r, liveDocs, field, term, DocsAndPositionsEnum.FLAG_OFFSETS | DocsAndPositionsEnum.FLAG_PAYLOADS);
@ -149,7 +159,7 @@ public final class MultiFields extends Fields {
* field & term, with control over whether offsets and payloads are
* required. Some codecs may be able to optimize
* their implementation when offsets and/or payloads are not
* required. This will return null if the term does not
* required. This will return null if the field or term does not
* exist or positions were not indexed. See {@link
* TermsEnum#docsAndPositions(Bits,DocsAndPositionsEnum,int)}. */
public static DocsAndPositionsEnum getTermPositionsEnum(IndexReader r, Bits liveDocs, String field, BytesRef term, int flags) throws IOException {

View File

@ -344,7 +344,7 @@ public final class MultiTermsEnum extends TermsEnum {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
MultiDocsEnum docsEnum;
// Can only reuse if incoming enum is also a MultiDocsEnum
if (reuse != null && reuse instanceof MultiDocsEnum) {
@ -394,16 +394,15 @@ public final class MultiTermsEnum extends TermsEnum {
}
assert entry.index < docsEnum.subDocsEnum.length: entry.index + " vs " + docsEnum.subDocsEnum.length + "; " + subs.length;
final DocsEnum subDocsEnum = entry.terms.docs(b, docsEnum.subDocsEnum[entry.index], needsFreqs);
final DocsEnum subDocsEnum = entry.terms.docs(b, docsEnum.subDocsEnum[entry.index], flags);
if (subDocsEnum != null) {
docsEnum.subDocsEnum[entry.index] = subDocsEnum;
subDocs[upto].docsEnum = subDocsEnum;
subDocs[upto].slice = entry.subSlice;
upto++;
} else {
// One of our subs cannot provide freqs:
assert needsFreqs;
return null;
// One of our subs cannot provide a docsenum:
assert false;
}
}
@ -474,7 +473,7 @@ public final class MultiTermsEnum extends TermsEnum {
subDocsAndPositions[upto].slice = entry.subSlice;
upto++;
} else {
if (entry.terms.docs(b, null, false) != null) {
if (entry.terms.docs(b, null, 0) != null) {
// At least one of our subs does not store
// offsets or positions -- we can't correctly
// produce a MultiDocsAndPositions enum

View File

@ -138,20 +138,29 @@ public abstract class TermsEnum implements BytesRefIterator {
* deleted documents into account. */
public abstract long totalTermFreq() throws IOException;
// TODO: cutover to flags / make needsFreqs "a hint" / add
// default w/ needsFreqs=true
/** Get {@link DocsEnum} for the current term. Do not
* call this when the enum is unpositioned. This method
* may return null (if needsFreqs is true but freqs were
* not indexed for this field).
* will not return null.
*
* @param liveDocs unset bits are documents that should not
* be returned
* @param reuse pass a prior DocsEnum for possible reuse */
public final DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
return docs(liveDocs, reuse, DocsEnum.FLAG_FREQS);
}
/** Get {@link DocsEnum} for the current term, with
* control over whether freqs are required. Do not
* call this when the enum is unpositioned. This method
* will not return null.
*
* @param liveDocs unset bits are documents that should not
* be returned
* @param reuse pass a prior DocsEnum for possible reuse
* @param needsFreqs true if the caller intends to call
* {@link DocsEnum#freq}. If you pass false you must not
* call {@link DocsEnum#freq} in the returned DocsEnum. */
public abstract DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException;
* @param flags specifies which optional per-document values
* you require; see {@link DocsEnum#FLAG_FREQS}
* @see #docs(Bits, DocsEnum, int) */
public abstract DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException;
/** Get {@link DocsAndPositionsEnum} for the current term.
* Do not call this when the enum is unpositioned. This
@ -240,7 +249,7 @@ public abstract class TermsEnum implements BytesRefIterator {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) {
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) {
throw new IllegalStateException("this method should never be called");
}

View File

@ -361,11 +361,9 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
final Scorer scorer = weight.scorer(context, true, false, acceptDocs);
if (scorer == null) {
return null;
}
if (scorer instanceof TermScorer) {
docsAndFreqs[i] = new DocsAndFreqs((TermScorer) scorer);
} else {
docsAndFreqs[i] = new DocsAndFreqs((MatchOnlyTermScorer) scorer);
assert scorer instanceof TermScorer;
docsAndFreqs[i] = new DocsAndFreqs((TermScorer) scorer);
}
}
return new ConjunctionTermScorer(this, disableCoord ? 1.0f : coord(

View File

@ -121,10 +121,6 @@ class ConjunctionTermScorer extends Scorer {
this(termScorer, termScorer.getDocsEnum(), termScorer.getDocFreq());
}
DocsAndFreqs(MatchOnlyTermScorer termScorer) {
this(termScorer, termScorer.getDocsEnum(), termScorer.getDocFreq());
}
DocsAndFreqs(Scorer scorer, DocsEnum docs, int docFreq) {
this.docs = docs;
this.docFreq = docFreq;

View File

@ -364,7 +364,7 @@ class FieldCacheImpl implements FieldCache {
break;
}
final byte termval = parser.parseByte(term);
docs = termsEnum.docs(null, docs, false);
docs = termsEnum.docs(null, docs, 0);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
@ -437,7 +437,7 @@ class FieldCacheImpl implements FieldCache {
break;
}
final short termval = parser.parseShort(term);
docs = termsEnum.docs(null, docs, false);
docs = termsEnum.docs(null, docs, 0);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
@ -541,7 +541,7 @@ class FieldCacheImpl implements FieldCache {
retArray = new int[maxDoc];
}
docs = termsEnum.docs(null, docs, false);
docs = termsEnum.docs(null, docs, 0);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
@ -608,7 +608,7 @@ class FieldCacheImpl implements FieldCache {
res = new FixedBitSet(maxDoc);
}
docs = termsEnum.docs(null, docs, false);
docs = termsEnum.docs(null, docs, 0);
// TODO: use bulk API
while (true) {
final int docID = docs.nextDoc();
@ -691,7 +691,7 @@ class FieldCacheImpl implements FieldCache {
retArray = new float[maxDoc];
}
docs = termsEnum.docs(null, docs, false);
docs = termsEnum.docs(null, docs, 0);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
@ -779,7 +779,7 @@ class FieldCacheImpl implements FieldCache {
retArray = new long[maxDoc];
}
docs = termsEnum.docs(null, docs, false);
docs = termsEnum.docs(null, docs, 0);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
@ -868,7 +868,7 @@ class FieldCacheImpl implements FieldCache {
retArray = new double[maxDoc];
}
docs = termsEnum.docs(null, docs, false);
docs = termsEnum.docs(null, docs, 0);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
@ -1042,7 +1042,7 @@ class FieldCacheImpl implements FieldCache {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
throw new UnsupportedOperationException();
}
@ -1167,7 +1167,7 @@ class FieldCacheImpl implements FieldCache {
termOrdToBytesOffset = termOrdToBytesOffset.resize(ArrayUtil.oversize(1+termOrd, 1));
}
termOrdToBytesOffset.set(termOrd, bytes.copyUsingLengthPrefix(term));
docs = termsEnum.docs(null, docs, false);
docs = termsEnum.docs(null, docs, 0);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
@ -1288,7 +1288,7 @@ class FieldCacheImpl implements FieldCache {
break;
}
final long pointer = bytes.copyUsingLengthPrefix(term);
docs = termsEnum.docs(null, docs, false);
docs = termsEnum.docs(null, docs, 0);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {

View File

@ -272,8 +272,8 @@ public class FuzzyTermsEnum extends TermsEnum {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
return actualEnum.docs(liveDocs, reuse, needsFreqs);
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
return actualEnum.docs(liveDocs, reuse, flags);
}
@Override

View File

@ -1,112 +0,0 @@
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.search.similarities.Similarity;
/** Expert: A <code>Scorer</code> for documents matching a
* <code>Term</code>. It treats all documents as having
* one occurrenc (tf=1) for the term.
*/
final class MatchOnlyTermScorer extends Scorer {
private final DocsEnum docsEnum;
private final Similarity.ExactSimScorer docScorer;
private final int docFreq;
/**
* Construct a <code>TermScorer</code>.
*
* @param weight
* The weight of the <code>Term</code> in the query.
* @param td
* An iterator over the documents matching the <code>Term</code>.
* @param docScorer
* The </code>Similarity.ExactSimScorer</code> implementation
* to be used for score computations.
* @param docFreq
* per-segment docFreq of this term
*/
MatchOnlyTermScorer(Weight weight, DocsEnum td, Similarity.ExactSimScorer docScorer, int docFreq) {
super(weight);
this.docScorer = docScorer;
this.docsEnum = td;
this.docFreq = docFreq;
}
@Override
public int docID() {
return docsEnum.docID();
}
@Override
public float freq() {
return 1.0f;
}
/**
* Advances to the next document matching the query. <br>
*
* @return the document matching the query or NO_MORE_DOCS if there are no more documents.
*/
@Override
public int nextDoc() throws IOException {
return docsEnum.nextDoc();
}
@Override
public float score() {
assert docID() != NO_MORE_DOCS;
return docScorer.score(docsEnum.docID(), 1);
}
/**
* Advances to the first match beyond the current whose document number is
* greater than or equal to a given target. <br>
* The implementation uses {@link DocsEnum#advance(int)}.
*
* @param target
* The target document number.
* @return the matching document or NO_MORE_DOCS if none exist.
*/
@Override
public int advance(int target) throws IOException {
return docsEnum.advance(target);
}
/** Returns a string representation of this <code>TermScorer</code>. */
@Override
public String toString() { return "scorer(" + weight + ")"; }
// TODO: benchmark if the specialized conjunction really benefits
// from these, or if instead its from sorting by docFreq, or both
DocsEnum getDocsEnum() {
return docsEnum;
}
// TODO: generalize something like this for scorers?
// even this is just an estimation...
int getDocFreq() {
return docFreq;
}
}

View File

@ -230,7 +230,7 @@ public class MultiPhraseQuery extends Query {
if (postingsEnum == null) {
// term does exist, but has no positions
assert termsEnum.docs(liveDocs, null, false) != null: "termstate found but no term exists in reader";
assert termsEnum.docs(liveDocs, null, 0) != null: "termstate found but no term exists in reader";
throw new IllegalStateException("field \"" + term.field() + "\" was indexed without position data; cannot run PhraseQuery (term=" + term.text() + ")");
}

View File

@ -106,7 +106,7 @@ public class MultiTermQueryWrapperFilter<Q extends MultiTermQuery> extends Filte
do {
// System.out.println(" iter termCount=" + termCount + " term=" +
// enumerator.term().toBytesString());
docsEnum = termsEnum.docs(acceptDocs, docsEnum, false);
docsEnum = termsEnum.docs(acceptDocs, docsEnum, 0);
int docid;
while ((docid = docsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
bitSet.set(docid);

View File

@ -83,15 +83,9 @@ public class TermQuery extends Query {
if (termsEnum == null) {
return null;
}
DocsEnum docs = termsEnum.docs(acceptDocs, null, true);
if (docs != null) {
return new TermScorer(this, docs, similarity.exactSimScorer(stats, context), termsEnum.docFreq());
} else {
// Index does not store freq info
docs = termsEnum.docs(acceptDocs, null, false);
assert docs != null;
return new MatchOnlyTermScorer(this, docs, similarity.exactSimScorer(stats, context), termsEnum.docFreq());
}
DocsEnum docs = termsEnum.docs(acceptDocs, null);
assert docs != null;
return new TermScorer(this, docs, similarity.exactSimScorer(stats, context), termsEnum.docFreq());
}
/**

View File

@ -142,7 +142,7 @@ public class TestAppendingCodec extends LuceneTestCase {
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("lazy")));
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("dog")));
assertEquals(SeekStatus.FOUND, te.seekCeil(new BytesRef("the")));
DocsEnum de = te.docs(null, null, true);
DocsEnum de = te.docs(null, null);
assertTrue(de.advance(0) != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(2, de.freq());
assertTrue(de.advance(1) != DocIdSetIterator.NO_MORE_DOCS);

View File

@ -56,7 +56,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
IdentityHashMap<DocsEnum, Boolean> enums = new IdentityHashMap<DocsEnum, Boolean>();
MatchNoBits bits = new Bits.MatchNoBits(indexReader.maxDoc());
while ((iterator.next()) != null) {
DocsEnum docs = iterator.docs(random().nextBoolean() ? bits : new Bits.MatchNoBits(indexReader.maxDoc()), null, random().nextBoolean());
DocsEnum docs = iterator.docs(random().nextBoolean() ? bits : new Bits.MatchNoBits(indexReader.maxDoc()), null, random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
enums.put(docs, true);
}
@ -83,7 +83,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
MatchNoBits bits = new Bits.MatchNoBits(open.maxDoc());
DocsEnum docs = null;
while ((iterator.next()) != null) {
docs = iterator.docs(bits, docs, random().nextBoolean());
docs = iterator.docs(bits, docs, random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
enums.put(docs, true);
}
@ -92,7 +92,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
iterator = terms.iterator(null);
docs = null;
while ((iterator.next()) != null) {
docs = iterator.docs(new Bits.MatchNoBits(open.maxDoc()), docs, random().nextBoolean());
docs = iterator.docs(new Bits.MatchNoBits(open.maxDoc()), docs, random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
enums.put(docs, true);
}
assertEquals(terms.size(), enums.size());
@ -101,7 +101,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
iterator = terms.iterator(null);
docs = null;
while ((iterator.next()) != null) {
docs = iterator.docs(null, docs, random().nextBoolean());
docs = iterator.docs(null, docs, random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
enums.put(docs, true);
}
assertEquals(1, enums.size());
@ -133,7 +133,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
DocsEnum docs = null;
BytesRef term = null;
while ((term = iterator.next()) != null) {
docs = iterator.docs(null, randomDocsEnum("body", term, sequentialSubReaders2, bits), random().nextBoolean());
docs = iterator.docs(null, randomDocsEnum("body", term, sequentialSubReaders2, bits), random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
enums.put(docs, true);
}
assertEquals(terms.size(), enums.size());
@ -142,7 +142,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
enums.clear();
docs = null;
while ((term = iterator.next()) != null) {
docs = iterator.docs(bits, randomDocsEnum("body", term, sequentialSubReaders2, bits), random().nextBoolean());
docs = iterator.docs(bits, randomDocsEnum("body", term, sequentialSubReaders2, bits), random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
enums.put(docs, true);
}
assertEquals(terms.size(), enums.size());
@ -155,7 +155,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
return null;
}
AtomicReader indexReader = (AtomicReader) readers.get(random().nextInt(readers.size()));
return indexReader.termDocsEnum(bits, field, term, random().nextBoolean());
return indexReader.termDocsEnum(bits, field, term, random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
}
/**

View File

@ -88,7 +88,7 @@ public class Test10KPulsings extends LuceneTestCase {
for (int i = 0; i < 10050; i++) {
String expected = df.format(i);
assertEquals(expected, te.next().utf8ToString());
de = _TestUtil.docs(random(), te, null, de, false);
de = _TestUtil.docs(random(), te, null, de, 0);
assertTrue(de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(DocIdSetIterator.NO_MORE_DOCS, de.nextDoc());
}
@ -146,7 +146,7 @@ public class Test10KPulsings extends LuceneTestCase {
for (int i = 0; i < 10050; i++) {
String expected = df.format(i);
assertEquals(expected, te.next().utf8ToString());
de = _TestUtil.docs(random(), te, null, de, false);
de = _TestUtil.docs(random(), te, null, de, 0);
assertTrue(de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(DocIdSetIterator.NO_MORE_DOCS, de.nextDoc());
}

View File

@ -60,7 +60,7 @@ public class TestPulsingReuse extends LuceneTestCase {
Map<DocsEnum,Boolean> allEnums = new IdentityHashMap<DocsEnum,Boolean>();
TermsEnum te = segment.terms("foo").iterator(null);
while (te.next() != null) {
reuse = te.docs(null, reuse, false);
reuse = te.docs(null, reuse, 0);
allEnums.put(reuse, true);
}
@ -102,7 +102,7 @@ public class TestPulsingReuse extends LuceneTestCase {
Map<DocsEnum,Boolean> allEnums = new IdentityHashMap<DocsEnum,Boolean>();
TermsEnum te = segment.terms("foo").iterator(null);
while (te.next() != null) {
reuse = te.docs(null, reuse, false);
reuse = te.docs(null, reuse, 0);
allEnums.put(reuse, true);
}

View File

@ -550,7 +550,7 @@ public class TestAddIndexes extends LuceneTestCase {
private void verifyTermDocs(Directory dir, Term term, int numDocs)
throws IOException {
IndexReader reader = DirectoryReader.open(dir);
DocsEnum docsEnum = _TestUtil.docs(random(), reader, term.field, term.bytes, null, null, false);
DocsEnum docsEnum = _TestUtil.docs(random(), reader, term.field, term.bytes, null, null, 0);
int count = 0;
while (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS)
count++;

View File

@ -719,7 +719,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
// should be found exactly
assertEquals(TermsEnum.SeekStatus.FOUND,
terms.seekCeil(aaaTerm));
assertEquals(35, countDocs(_TestUtil.docs(random(), terms, null, null, false)));
assertEquals(35, countDocs(_TestUtil.docs(random(), terms, null, null, 0)));
assertNull(terms.next());
// should hit end of field
@ -731,12 +731,12 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
assertEquals(TermsEnum.SeekStatus.NOT_FOUND,
terms.seekCeil(new BytesRef("a")));
assertTrue(terms.term().bytesEquals(aaaTerm));
assertEquals(35, countDocs(_TestUtil.docs(random(), terms, null, null, false)));
assertEquals(35, countDocs(_TestUtil.docs(random(), terms, null, null, 0)));
assertNull(terms.next());
assertEquals(TermsEnum.SeekStatus.FOUND,
terms.seekCeil(aaaTerm));
assertEquals(35, countDocs(_TestUtil.docs(random(), terms,null, null, false)));
assertEquals(35, countDocs(_TestUtil.docs(random(), terms,null, null, 0)));
assertNull(terms.next());
r.close();

View File

@ -275,7 +275,7 @@ public class TestCodecs extends LuceneTestCase {
// make sure it properly fully resets (rewinds) its
// internal state:
for(int iter=0;iter<2;iter++) {
docsEnum = _TestUtil.docs(random(), termsEnum, null, docsEnum, false);
docsEnum = _TestUtil.docs(random(), termsEnum, null, docsEnum, 0);
assertEquals(terms[i].docs[0], docsEnum.nextDoc());
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsEnum.nextDoc());
}
@ -472,7 +472,7 @@ public class TestCodecs extends LuceneTestCase {
assertEquals(status, TermsEnum.SeekStatus.FOUND);
assertEquals(term.docs.length, termsEnum.docFreq());
if (field.omitTF) {
this.verifyDocs(term.docs, term.positions, _TestUtil.docs(random(), termsEnum, null, null, false), false);
this.verifyDocs(term.docs, term.positions, _TestUtil.docs(random(), termsEnum, null, null, 0), false);
} else {
this.verifyDocs(term.docs, term.positions, termsEnum.docsAndPositions(null, null), true);
}
@ -492,7 +492,7 @@ public class TestCodecs extends LuceneTestCase {
assertTrue(termsEnum.term().bytesEquals(new BytesRef(term.text2)));
assertEquals(term.docs.length, termsEnum.docFreq());
if (field.omitTF) {
this.verifyDocs(term.docs, term.positions, _TestUtil.docs(random(), termsEnum, null, null, false), false);
this.verifyDocs(term.docs, term.positions, _TestUtil.docs(random(), termsEnum, null, null, 0), false);
} else {
this.verifyDocs(term.docs, term.positions, termsEnum.docsAndPositions(null, null), true);
}
@ -550,12 +550,12 @@ public class TestCodecs extends LuceneTestCase {
if (postings != null) {
docs = docsAndFreqs = postings;
} else {
docs = docsAndFreqs = _TestUtil.docs(random(), termsEnum, null, null, true);
docs = docsAndFreqs = _TestUtil.docs(random(), termsEnum, null, null, DocsEnum.FLAG_FREQS);
}
} else {
postings = null;
docsAndFreqs = null;
docs = _TestUtil.docs(random(), termsEnum, null, null, false);
docs = _TestUtil.docs(random(), termsEnum, null, null, 0);
}
assertNotNull(docs);
int upto2 = -1;

View File

@ -97,13 +97,13 @@ public class TestDirectoryReader extends LuceneTestCase {
te2.term(),
MultiFields.getLiveDocs(mr2),
null,
false);
0);
TermsEnum te3 = MultiFields.getTerms(mr3, "body").iterator(null);
te3.seekCeil(new BytesRef("wow"));
td = _TestUtil.docs(random(), te3, MultiFields.getLiveDocs(mr3),
td,
false);
0);
int ret = 0;
@ -355,7 +355,7 @@ void assertTermDocsCount(String msg,
new BytesRef(term.text()),
MultiFields.getLiveDocs(reader),
null,
false);
0);
int count = 0;
if (tdocs != null) {
while(tdocs.nextDoc()!= DocIdSetIterator.NO_MORE_DOCS) {

View File

@ -72,7 +72,7 @@ public class TestDocCount extends LuceneTestCase {
FixedBitSet visited = new FixedBitSet(ir.maxDoc());
TermsEnum te = terms.iterator(null);
while (te.next() != null) {
DocsEnum de = _TestUtil.docs(random(), te, null, null, false);
DocsEnum de = _TestUtil.docs(random(), te, null, null, 0);
while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
visited.set(de.docID());
}

View File

@ -881,7 +881,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public int docId(AtomicReader reader, Term term) throws IOException {
int docFreq = reader.docFreq(term);
assertEquals(1, docFreq);
DocsEnum termDocsEnum = reader.termDocsEnum(null, term.field, term.bytes, false);
DocsEnum termDocsEnum = reader.termDocsEnum(null, term.field, term.bytes, 0);
int nextDoc = termDocsEnum.nextDoc();
assertEquals(DocIdSetIterator.NO_MORE_DOCS, termDocsEnum.nextDoc());
return nextDoc;

View File

@ -217,7 +217,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
IndexReaderContext topReaderContext = reader.getTopReaderContext();
for (AtomicReaderContext context : topReaderContext.leaves()) {
int maxDoc = context.reader().maxDoc();
DocsEnum docsEnum = _TestUtil.docs(random(), context.reader(), fieldName, bytes, null, null, true);
DocsEnum docsEnum = _TestUtil.docs(random(), context.reader(), fieldName, bytes, null, null, DocsEnum.FLAG_FREQS);
if (findNext(freqInDoc, context.docBase, context.docBase + maxDoc) == Integer.MAX_VALUE) {
assertNull(docsEnum);
continue;
@ -327,7 +327,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
writer.addDocument(doc);
DirectoryReader reader = writer.getReader();
AtomicReader r = getOnlySegmentReader(reader);
DocsEnum disi = _TestUtil.docs(random(), r, "foo", new BytesRef("bar"), null, null, false);
DocsEnum disi = _TestUtil.docs(random(), r, "foo", new BytesRef("bar"), null, null, 0);
int docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -335,7 +335,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
// now reuse and check again
TermsEnum te = r.terms("foo").iterator(null);
assertTrue(te.seekExact(new BytesRef("bar"), true));
disi = _TestUtil.docs(random(), te, null, disi, false);
disi = _TestUtil.docs(random(), te, null, disi, 0);
docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);

View File

@ -342,39 +342,39 @@ public class TestDuelingCodecs extends LuceneTestCase {
rightPositions = rightTermsEnum.docsAndPositions(randomBits, rightPositions));
// with freqs:
assertDocsEnum(leftDocs = leftTermsEnum.docs(null, leftDocs, true),
rightDocs = rightTermsEnum.docs(null, rightDocs, true),
assertDocsEnum(leftDocs = leftTermsEnum.docs(null, leftDocs),
rightDocs = rightTermsEnum.docs(null, rightDocs),
true);
assertDocsEnum(leftDocs = leftTermsEnum.docs(randomBits, leftDocs, true),
rightDocs = rightTermsEnum.docs(randomBits, rightDocs, true),
assertDocsEnum(leftDocs = leftTermsEnum.docs(randomBits, leftDocs),
rightDocs = rightTermsEnum.docs(randomBits, rightDocs),
true);
// w/o freqs:
assertDocsEnum(leftDocs = leftTermsEnum.docs(null, leftDocs, false),
rightDocs = rightTermsEnum.docs(null, rightDocs, false),
assertDocsEnum(leftDocs = leftTermsEnum.docs(null, leftDocs, 0),
rightDocs = rightTermsEnum.docs(null, rightDocs, 0),
false);
assertDocsEnum(leftDocs = leftTermsEnum.docs(randomBits, leftDocs, false),
rightDocs = rightTermsEnum.docs(randomBits, rightDocs, false),
assertDocsEnum(leftDocs = leftTermsEnum.docs(randomBits, leftDocs, 0),
rightDocs = rightTermsEnum.docs(randomBits, rightDocs, 0),
false);
// with freqs:
assertDocsSkipping(leftTermsEnum.docFreq(),
leftDocs = leftTermsEnum.docs(null, leftDocs, true),
rightDocs = rightTermsEnum.docs(null, rightDocs, true),
leftDocs = leftTermsEnum.docs(null, leftDocs),
rightDocs = rightTermsEnum.docs(null, rightDocs),
true);
assertDocsSkipping(leftTermsEnum.docFreq(),
leftDocs = leftTermsEnum.docs(randomBits, leftDocs, true),
rightDocs = rightTermsEnum.docs(randomBits, rightDocs, true),
leftDocs = leftTermsEnum.docs(randomBits, leftDocs),
rightDocs = rightTermsEnum.docs(randomBits, rightDocs),
true);
// w/o freqs:
assertDocsSkipping(leftTermsEnum.docFreq(),
leftDocs = leftTermsEnum.docs(null, leftDocs, false),
rightDocs = rightTermsEnum.docs(null, rightDocs, false),
leftDocs = leftTermsEnum.docs(null, leftDocs, 0),
rightDocs = rightTermsEnum.docs(null, rightDocs, 0),
false);
assertDocsSkipping(leftTermsEnum.docFreq(),
leftDocs = leftTermsEnum.docs(randomBits, leftDocs, false),
rightDocs = rightTermsEnum.docs(randomBits, rightDocs, false),
leftDocs = leftTermsEnum.docs(randomBits, leftDocs, 0),
rightDocs = rightTermsEnum.docs(randomBits, rightDocs, 0),
false);
}
}

View File

@ -541,7 +541,7 @@ public class TestIndexWriter extends LuceneTestCase {
new BytesRef("a"),
MultiFields.getLiveDocs(reader),
null,
true);
DocsEnum.FLAG_FREQS);
td.nextDoc();
assertEquals(128*1024, td.freq());
reader.close();
@ -1182,12 +1182,12 @@ public class TestIndexWriter extends LuceneTestCase {
// test that the terms were indexed.
assertTrue(_TestUtil.docs(random(), ir, "binary", new BytesRef("doc1field1"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "binary", new BytesRef("doc2field1"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "binary", new BytesRef("doc3field1"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "string", new BytesRef("doc1field2"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "string", new BytesRef("doc2field2"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "string", new BytesRef("doc3field2"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "binary", new BytesRef("doc1field1"), null, null, 0).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "binary", new BytesRef("doc2field1"), null, null, 0).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "binary", new BytesRef("doc3field1"), null, null, 0).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "string", new BytesRef("doc1field2"), null, null, 0).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "string", new BytesRef("doc2field2"), null, null, 0).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "string", new BytesRef("doc3field2"), null, null, 0).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
ir.close();
dir.close();
@ -1259,7 +1259,7 @@ public class TestIndexWriter extends LuceneTestCase {
TermsEnum t = r.fields().terms("field").iterator(null);
int count = 0;
while(t.next() != null) {
final DocsEnum docs = _TestUtil.docs(random(), t, null, null, false);
final DocsEnum docs = _TestUtil.docs(random(), t, null, null, 0);
assertEquals(0, docs.nextDoc());
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docs.nextDoc());
count++;

View File

@ -507,7 +507,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
new BytesRef(t.text()),
MultiFields.getLiveDocs(reader),
null,
false);
0);
int count = 0;
while(tdocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {

View File

@ -53,7 +53,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
t.field(), new BytesRef(t.text()),
MultiFields.getLiveDocs(r),
null,
false);
0);
if (td != null) {
while (td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
@ -987,7 +987,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
w.addDocument(doc);
SegmentReader r = getOnlySegmentReader(DirectoryReader.open(w, true));
try {
_TestUtil.docs(random(), r, "f", new BytesRef("val"), null, null, false);
_TestUtil.docs(random(), r, "f", new BytesRef("val"), null, null, 0);
fail("should have failed to seek since terms index was not loaded.");
} catch (IllegalStateException e) {
// expected - we didn't load the term index

View File

@ -224,7 +224,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
new BytesRef("aaa"),
MultiFields.getLiveDocs(reader),
null,
false);
0);
int count = 0;
while(tdocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
count++;

View File

@ -368,10 +368,10 @@ public class TestLongPostings extends LuceneTestCase {
final DocsEnum postings;
if (options == IndexOptions.DOCS_ONLY) {
docs = _TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, false);
docs = _TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, 0);
postings = null;
} else {
docs = postings = _TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, true);
docs = postings = _TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, DocsEnum.FLAG_FREQS);
assert postings != null;
}
assert docs != null;

View File

@ -121,7 +121,7 @@ public class TestMultiFields extends LuceneTestCase {
System.out.println("TEST: seek term="+ UnicodeUtil.toHexString(term.utf8ToString()) + " " + term);
}
DocsEnum docsEnum = _TestUtil.docs(random(), reader, "field", term, liveDocs, null, false);
DocsEnum docsEnum = _TestUtil.docs(random(), reader, "field", term, liveDocs, null, 0);
assertNotNull(docsEnum);
for(int docID : docs.get(term)) {
@ -162,8 +162,8 @@ public class TestMultiFields extends LuceneTestCase {
w.addDocument(d);
IndexReader r = w.getReader();
w.close();
DocsEnum d1 = _TestUtil.docs(random(), r, "f", new BytesRef("j"), null, null, false);
DocsEnum d2 = _TestUtil.docs(random(), r, "f", new BytesRef("j"), null, null, false);
DocsEnum d1 = _TestUtil.docs(random(), r, "f", new BytesRef("j"), null, null, 0);
DocsEnum d2 = _TestUtil.docs(random(), r, "f", new BytesRef("j"), null, null, 0);
assertEquals(0, d1.nextDoc());
assertEquals(0, d2.nextDoc());
r.close();

View File

@ -53,7 +53,7 @@ public class TestOmitPositions extends LuceneTestCase {
assertNull(MultiFields.getTermPositionsEnum(reader, null, "foo", new BytesRef("test")));
DocsEnum de = _TestUtil.docs(random(), reader, "foo", new BytesRef("test"), null, null, true);
DocsEnum de = _TestUtil.docs(random(), reader, "foo", new BytesRef("test"), null, null, DocsEnum.FLAG_FREQS);
while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
assertEquals(2, de.freq());
}

View File

@ -81,7 +81,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
BytesRef b = te.next();
assertNotNull(b);
assertEquals(t, b.utf8ToString());
DocsEnum td = _TestUtil.docs(random(), te, liveDocs, null, false);
DocsEnum td = _TestUtil.docs(random(), te, liveDocs, null, 0);
assertTrue(td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(0, td.docID());
assertEquals(td.nextDoc(), DocIdSetIterator.NO_MORE_DOCS);

View File

@ -227,7 +227,7 @@ public class TestPerSegmentDeletes extends LuceneTestCase {
Terms cterms = fields.terms(term.field);
TermsEnum ctermsEnum = cterms.iterator(null);
if (ctermsEnum.seekExact(new BytesRef(term.text()), false)) {
DocsEnum docsEnum = _TestUtil.docs(random(), ctermsEnum, bits, null, false);
DocsEnum docsEnum = _TestUtil.docs(random(), ctermsEnum, bits, null, 0);
return toArray(docsEnum);
}
return null;

View File

@ -557,7 +557,7 @@ public class TestPostingsFormat extends LuceneTestCase {
if (options.contains(Option.REUSE_ENUMS) && random().nextInt(10) < 9) {
prevDocsEnum = threadState.reuseDocsEnum;
}
threadState.reuseDocsEnum = termsEnum.docs(liveDocs, prevDocsEnum, doCheckFreqs);
threadState.reuseDocsEnum = termsEnum.docs(liveDocs, prevDocsEnum, doCheckFreqs ? DocsEnum.FLAG_FREQS : 0);
docsEnum = threadState.reuseDocsEnum;
docsAndPositionsEnum = null;
}

View File

@ -208,7 +208,7 @@ public class TestPostingsOffsets extends LuceneTestCase {
// check that other fields (without offsets) work correctly
for (int i = 0; i < numDocs; i++) {
DocsEnum dp = MultiFields.getTermDocsEnum(reader, null, "id", new BytesRef("" + i), false);
DocsEnum dp = MultiFields.getTermDocsEnum(reader, null, "id", new BytesRef("" + i), 0);
assertEquals(i, dp.nextDoc());
assertEquals(DocIdSetIterator.NO_MORE_DOCS, dp.nextDoc());
}
@ -301,7 +301,7 @@ public class TestPostingsOffsets extends LuceneTestCase {
for(String term : terms) {
//System.out.println(" term=" + term);
if (termsEnum.seekExact(new BytesRef(term), random().nextBoolean())) {
docs = termsEnum.docs(null, docs, true);
docs = termsEnum.docs(null, docs);
assertNotNull(docs);
int doc;
//System.out.println(" doc/freq");

View File

@ -109,7 +109,7 @@ public class TestSegmentMerger extends LuceneTestCase {
new BytesRef("field"),
MultiFields.getLiveDocs(mergedReader),
null,
false);
0);
assertTrue(termDocs != null);
assertTrue(termDocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);

View File

@ -133,7 +133,7 @@ public class TestSegmentReader extends LuceneTestCase {
new BytesRef("field"),
MultiFields.getLiveDocs(reader),
null,
false);
0);
assertTrue(termDocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
termDocs = _TestUtil.docs(random(), reader,
@ -141,7 +141,7 @@ public class TestSegmentReader extends LuceneTestCase {
new BytesRef(DocHelper.NO_NORMS_TEXT),
MultiFields.getLiveDocs(reader),
null,
false);
0);
assertTrue(termDocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);

View File

@ -63,7 +63,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
TermsEnum terms = reader.fields().terms(DocHelper.TEXT_FIELD_2_KEY).iterator(null);
terms.seekCeil(new BytesRef("field"));
DocsEnum termDocs = _TestUtil.docs(random(), terms, reader.getLiveDocs(), null, true);
DocsEnum termDocs = _TestUtil.docs(random(), terms, reader.getLiveDocs(), null, DocsEnum.FLAG_FREQS);
if (termDocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
int docId = termDocs.docID();
assertTrue(docId == 0);
@ -87,7 +87,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef("bad"),
reader.getLiveDocs(),
null,
false);
0);
assertNull(termDocs);
reader.close();
@ -101,7 +101,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef("bad"),
reader.getLiveDocs(),
null,
false);
0);
assertNull(termDocs);
reader.close();
}
@ -138,7 +138,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef(ta.text()),
MultiFields.getLiveDocs(reader),
null,
true);
DocsEnum.FLAG_FREQS);
// without optimization (assumption skipInterval == 16)
@ -163,7 +163,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef(ta.text()),
MultiFields.getLiveDocs(reader),
null,
false);
0);
assertTrue(tdocs.advance(0) != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(0, tdocs.docID());
@ -181,7 +181,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef(tb.text()),
MultiFields.getLiveDocs(reader),
null,
true);
DocsEnum.FLAG_FREQS);
assertTrue(tdocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(10, tdocs.docID());
@ -205,7 +205,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef(tb.text()),
MultiFields.getLiveDocs(reader),
null,
true);
DocsEnum.FLAG_FREQS);
assertTrue(tdocs.advance(5) != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(10, tdocs.docID());
@ -225,7 +225,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef(tc.text()),
MultiFields.getLiveDocs(reader),
null,
true);
DocsEnum.FLAG_FREQS);
assertTrue(tdocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(26, tdocs.docID());
@ -251,7 +251,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef(tc.text()),
MultiFields.getLiveDocs(reader),
null,
false);
0);
assertTrue(tdocs.advance(5) != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(26, tdocs.docID());
assertTrue(tdocs.advance(40) != DocIdSetIterator.NO_MORE_DOCS);

View File

@ -82,11 +82,11 @@ public class TestStressAdvance extends LuceneTestCase {
System.out.println("\nTEST: iter=" + iter + " iter2=" + iter2);
}
assertEquals(TermsEnum.SeekStatus.FOUND, te.seekCeil(new BytesRef("a")));
de = _TestUtil.docs(random(), te, null, de, false);
de = _TestUtil.docs(random(), te, null, de, 0);
testOne(de, aDocIDs);
assertEquals(TermsEnum.SeekStatus.FOUND, te.seekCeil(new BytesRef("b")));
de = _TestUtil.docs(random(), te, null, de, false);
de = _TestUtil.docs(random(), te, null, de, 0);
testOne(de, bDocIDs);
}

View File

@ -336,7 +336,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
// deleted docs):
DocsEnum docs = null;
while(termsEnum.next() != null) {
docs = _TestUtil.docs(random(), termsEnum, null, docs, false);
docs = _TestUtil.docs(random(), termsEnum, null, docs, 0);
while(docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
fail("r1 is not empty but r2 is");
}
@ -356,9 +356,9 @@ public class TestStressIndexing2 extends LuceneTestCase {
break;
}
termDocs1 = _TestUtil.docs(random(), termsEnum, liveDocs1, termDocs1, false);
termDocs1 = _TestUtil.docs(random(), termsEnum, liveDocs1, termDocs1, 0);
if (termsEnum2.seekExact(term, false)) {
termDocs2 = _TestUtil.docs(random(), termsEnum2, liveDocs2, termDocs2, false);
termDocs2 = _TestUtil.docs(random(), termsEnum2, liveDocs2, termDocs2, 0);
} else {
termDocs2 = null;
}
@ -417,7 +417,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
System.out.println(" pos=" + dpEnum.nextPosition());
}
} else {
dEnum = _TestUtil.docs(random(), termsEnum3, null, dEnum, true);
dEnum = _TestUtil.docs(random(), termsEnum3, null, dEnum, DocsEnum.FLAG_FREQS);
assertNotNull(dEnum);
assertTrue(dEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
final int freq = dEnum.freq();
@ -451,7 +451,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
System.out.println(" pos=" + dpEnum.nextPosition());
}
} else {
dEnum = _TestUtil.docs(random(), termsEnum3, null, dEnum, true);
dEnum = _TestUtil.docs(random(), termsEnum3, null, dEnum, DocsEnum.FLAG_FREQS);
assertNotNull(dEnum);
assertTrue(dEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
final int freq = dEnum.freq();
@ -508,7 +508,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
}
//System.out.println("TEST: term1=" + term1);
docs1 = _TestUtil.docs(random(), termsEnum1, liveDocs1, docs1, true);
docs1 = _TestUtil.docs(random(), termsEnum1, liveDocs1, docs1, DocsEnum.FLAG_FREQS);
while (docs1.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
int d = docs1.docID();
int f = docs1.freq();
@ -542,7 +542,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
}
//System.out.println("TEST: term1=" + term1);
docs2 = _TestUtil.docs(random(), termsEnum2, liveDocs2, docs2, true);
docs2 = _TestUtil.docs(random(), termsEnum2, liveDocs2, docs2, DocsEnum.FLAG_FREQS);
while (docs2.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
int d = r2r1[docs2.docID()];
int f = docs2.freq();
@ -669,8 +669,8 @@ public class TestStressIndexing2 extends LuceneTestCase {
assertEquals(DocIdSetIterator.NO_MORE_DOCS, dpEnum1.nextDoc());
assertEquals(DocIdSetIterator.NO_MORE_DOCS, dpEnum2.nextDoc());
} else {
dEnum1 = _TestUtil.docs(random(), termsEnum1, null, dEnum1, true);
dEnum2 = _TestUtil.docs(random(), termsEnum2, null, dEnum2, true);
dEnum1 = _TestUtil.docs(random(), termsEnum1, null, dEnum1, DocsEnum.FLAG_FREQS);
dEnum2 = _TestUtil.docs(random(), termsEnum2, null, dEnum2, DocsEnum.FLAG_FREQS);
assertNotNull(dEnum1);
assertNotNull(dEnum2);
int docID1 = dEnum1.nextDoc();

View File

@ -225,7 +225,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
//System.out.println("Term: " + term);
assertEquals(testTerms[i], term);
docsEnum = _TestUtil.docs(random(), termsEnum, null, docsEnum, false);
docsEnum = _TestUtil.docs(random(), termsEnum, null, docsEnum, 0);
assertNotNull(docsEnum);
int doc = docsEnum.docID();
assertTrue(doc == -1 || doc == DocIdSetIterator.NO_MORE_DOCS);

View File

@ -124,7 +124,7 @@ public class TestTermdocPerf extends LuceneTestCase {
final Random random = new Random(random().nextLong());
for (int i=0; i<iter; i++) {
tenum.seekCeil(new BytesRef("val"));
tdocs = _TestUtil.docs(random, tenum, MultiFields.getLiveDocs(reader), tdocs, false);
tdocs = _TestUtil.docs(random, tenum, MultiFields.getLiveDocs(reader), tdocs, 0);
while (tdocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
ret += tdocs.docID();
}

View File

@ -332,7 +332,7 @@ public class TestTermsEnum extends LuceneTestCase {
}
assertEquals(expected, actual);
assertEquals(1, te.docFreq());
docsEnum = _TestUtil.docs(random(), te, null, docsEnum, false);
docsEnum = _TestUtil.docs(random(), te, null, docsEnum, 0);
final int docID = docsEnum.nextDoc();
assertTrue(docID != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(docIDToID[docID], termToID.get(expected).intValue());
@ -747,25 +747,25 @@ public class TestTermsEnum extends LuceneTestCase {
CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false);
TermsEnum te = terms.intersect(ca, null);
assertEquals("aaa", te.next().utf8ToString());
assertEquals(0, te.docs(null, null, false).nextDoc());
assertEquals(0, te.docs(null, null, 0).nextDoc());
assertEquals("bbb", te.next().utf8ToString());
assertEquals(1, te.docs(null, null, false).nextDoc());
assertEquals(1, te.docs(null, null, 0).nextDoc());
assertEquals("ccc", te.next().utf8ToString());
assertEquals(2, te.docs(null, null, false).nextDoc());
assertEquals(2, te.docs(null, null, 0).nextDoc());
assertNull(te.next());
te = terms.intersect(ca, new BytesRef("abc"));
assertEquals("bbb", te.next().utf8ToString());
assertEquals(1, te.docs(null, null, false).nextDoc());
assertEquals(1, te.docs(null, null, 0).nextDoc());
assertEquals("ccc", te.next().utf8ToString());
assertEquals(2, te.docs(null, null, false).nextDoc());
assertEquals(2, te.docs(null, null, 0).nextDoc());
assertNull(te.next());
te = terms.intersect(ca, new BytesRef("aaa"));
assertEquals("bbb", te.next().utf8ToString());
assertEquals(1, te.docs(null, null, false).nextDoc());
assertEquals(1, te.docs(null, null, 0).nextDoc());
assertEquals("ccc", te.next().utf8ToString());
assertEquals(2, te.docs(null, null, false).nextDoc());
assertEquals(2, te.docs(null, null, 0).nextDoc());
assertNull(te.next());
r.close();

View File

@ -261,7 +261,7 @@ public class TestTermVectors extends LuceneTestCase {
while (termsEnum.next() != null) {
String text = termsEnum.term().utf8ToString();
docs = _TestUtil.docs(random(), termsEnum, MultiFields.getLiveDocs(knownSearcher.reader), docs, true);
docs = _TestUtil.docs(random(), termsEnum, MultiFields.getLiveDocs(knownSearcher.reader), docs, DocsEnum.FLAG_FREQS);
while (docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
int docId = docs.docID();

View File

@ -108,7 +108,7 @@ class TakmiSampleFixer implements SampleFixer {
Bits liveDocs = MultiFields.getLiveDocs(indexReader);
int updatedCount = countIntersection(MultiFields.getTermDocsEnum(indexReader, liveDocs,
drillDownTerm.field(), drillDownTerm.bytes(),
false),
0),
docIds.iterator());
fresNode.setValue(updatedCount);

View File

@ -196,7 +196,7 @@ public class DirectoryTaxonomyReader implements TaxonomyReader {
indexReaderLock.readLock().lock();
// TODO (Facet): avoid Multi*?
Bits liveDocs = MultiFields.getLiveDocs(indexReader);
DocsEnum docs = MultiFields.getTermDocsEnum(indexReader, liveDocs, Consts.FULL, new BytesRef(path), false);
DocsEnum docs = MultiFields.getTermDocsEnum(indexReader, liveDocs, Consts.FULL, new BytesRef(path), 0);
if (docs != null && docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
ret = docs.docID();
}

View File

@ -411,7 +411,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
final BytesRef catTerm = new BytesRef(categoryPath.toString(delimiter));
int base = 0;
for (AtomicReader r : reader.getSequentialSubReaders()) {
DocsEnum docs = r.termDocsEnum(null, Consts.FULL, catTerm, false);
DocsEnum docs = r.termDocsEnum(null, Consts.FULL, catTerm, 0);
if (docs != null) {
doc = docs.nextDoc() + base;
break;
@ -454,7 +454,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
final BytesRef catTerm = new BytesRef(categoryPath.toString(delimiter, prefixLen));
int base = 0;
for (AtomicReader r : reader.getSequentialSubReaders()) {
DocsEnum docs = r.termDocsEnum(null, Consts.FULL, catTerm, false);
DocsEnum docs = r.termDocsEnum(null, Consts.FULL, catTerm, 0);
if (docs != null) {
doc = docs.nextDoc() + base;
break;
@ -767,7 +767,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
// 'validation' checks.
cp.clear();
cp.add(t.utf8ToString(), delimiter);
docsEnum = termsEnum.docs(null, docsEnum, false);
docsEnum = termsEnum.docs(null, docsEnum, 0);
boolean res = cache.put(cp, docsEnum.nextDoc() + base);
assert !res : "entries should not have been evicted from the cache";
} else {
@ -859,7 +859,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
// the findCategory() call above failed to find it.
ordinal = addCategory(cp);
}
docs = te.docs(null, docs, false);
docs = te.docs(null, docs, 0);
ordinalMap.addMapping(docs.nextDoc() + base, ordinal);
}
base += ar.maxDoc(); // no deletions, so we're ok

View File

@ -284,7 +284,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
TermsEnum te = terms.iterator(null);
DocsEnum de = null;
while (te.next() != null) {
de = _TestUtil.docs(random(), te, liveDocs, de, false);
de = _TestUtil.docs(random(), te, liveDocs, de, 0);
int cnt = 0;
while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
cnt++;

View File

@ -88,7 +88,7 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
// Obtain facets results and hand-test them
assertCorrectResults(facetsCollector);
DocsEnum td = _TestUtil.docs(random(), ir, "$facets", new BytesRef("$fulltree$"), MultiFields.getLiveDocs(ir), null, false);
DocsEnum td = _TestUtil.docs(random(), ir, "$facets", new BytesRef("$fulltree$"), MultiFields.getLiveDocs(ir), null, 0);
assertTrue(td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
tr.close();
@ -182,7 +182,7 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
}
private void assertPostingListExists(String field, String text, IndexReader ir) throws IOException {
DocsEnum de = _TestUtil.docs(random(), ir, field, new BytesRef(text), null, null, false);
DocsEnum de = _TestUtil.docs(random(), ir, field, new BytesRef(text), null, null, 0);
assertTrue(de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
}

View File

@ -184,7 +184,7 @@ class TermsIncludingScoreQuery extends Query {
scoreUpto = upto;
TermsEnum.SeekStatus status = termsEnum.seekCeil(terms.get(ords[upto++], spare), true);
if (status == TermsEnum.SeekStatus.FOUND) {
docsEnum = reuse = termsEnum.docs(acceptDocs, reuse, false);
docsEnum = reuse = termsEnum.docs(acceptDocs, reuse, 0);
}
} while (docsEnum == null);
@ -253,7 +253,7 @@ class TermsIncludingScoreQuery extends Query {
scoreUpto = upto;
TermsEnum.SeekStatus status = termsEnum.seekCeil(terms.get(ords[upto++], spare), true);
if (status == TermsEnum.SeekStatus.FOUND) {
docsEnum = reuse = termsEnum.docs(acceptDocs, reuse, false);
docsEnum = reuse = termsEnum.docs(acceptDocs, reuse, 0);
}
} while (docsEnum == null);

View File

@ -682,7 +682,7 @@ public class TestJoinUtil extends LuceneTestCase {
}
for (RandomDoc otherSideDoc : otherMatchingDocs) {
DocsEnum docsEnum = MultiFields.getTermDocsEnum(topLevelReader, MultiFields.getLiveDocs(topLevelReader), "id", new BytesRef(otherSideDoc.id), false);
DocsEnum docsEnum = MultiFields.getTermDocsEnum(topLevelReader, MultiFields.getLiveDocs(topLevelReader), "id", new BytesRef(otherSideDoc.id), 0);
assert docsEnum != null;
int doc = docsEnum.nextDoc();
expectedResult.set(doc);

View File

@ -872,7 +872,7 @@ public class MemoryIndex {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) {
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) {
if (reuse == null || !(reuse instanceof MemoryDocsEnum)) {
reuse = new MemoryDocsEnum();
}

View File

@ -205,7 +205,7 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
MemoryIndex memory = new MemoryIndex();
memory.addField("foo", "bar", analyzer);
AtomicReader reader = (AtomicReader) memory.createSearcher().getIndexReader();
DocsEnum disi = _TestUtil.docs(random(), reader, "foo", new BytesRef("bar"), null, null, false);
DocsEnum disi = _TestUtil.docs(random(), reader, "foo", new BytesRef("bar"), null, null, 0);
int docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -213,7 +213,7 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
// now reuse and check again
TermsEnum te = reader.terms("foo").iterator(null);
assertTrue(te.seekExact(new BytesRef("bar"), true));
disi = te.docs(null, disi, false);
disi = te.docs(null, disi, 0);
docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);

View File

@ -200,7 +200,8 @@ public class HighFreqTerms {
continue;
} // otherwise we fall-through
}
DocsEnum de = r.termDocsEnum(liveDocs, field, termText, true);
// note: what should we do if field omits freqs? currently it counts as 1...
DocsEnum de = r.termDocsEnum(liveDocs, field, termText);
if (de != null) {
while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS)
totalTF += de.freq();

View File

@ -82,7 +82,7 @@ public class TermsFilter extends Filter {
br.copyBytes(term.bytes());
assert termsEnum != null;
if (termsEnum.seekCeil(br) == TermsEnum.SeekStatus.FOUND) {
docs = termsEnum.docs(acceptDocs, docs, false);
docs = termsEnum.docs(acceptDocs, docs, 0);
while (docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
result.set(docs.docID());
}

View File

@ -57,16 +57,11 @@ public class TFValueSource extends TermFreqValueSource {
public void reset() throws IOException {
// no one should call us for deleted docs?
boolean omitTF = false;
if (terms != null) {
final TermsEnum termsEnum = terms.iterator(null);
if (termsEnum.seekExact(indexedBytes, false)) {
docs = termsEnum.docs(null, null, true);
if (docs == null) { // omitTF
omitTF = true;
docs = termsEnum.docs(null, null, false);
}
docs = termsEnum.docs(null, null);
} else {
docs = null;
}
@ -96,30 +91,6 @@ public class TFValueSource extends TermFreqValueSource {
return DocIdSetIterator.NO_MORE_DOCS;
}
};
} else if (omitTF) {
// the docsenum won't support freq(), so return 1
final DocsEnum delegate = docs;
docs = new DocsEnum() {
@Override
public int freq() {
return 1;
}
@Override
public int docID() {
return delegate.docID();
}
@Override
public int nextDoc() throws IOException {
return delegate.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return delegate.advance(target);
}
};
}
atDoc = -1;
}

View File

@ -50,16 +50,11 @@ public class TermFreqValueSource extends DocFreqValueSource {
public void reset() throws IOException {
// no one should call us for deleted docs?
boolean omitTF = false;
if (terms != null) {
final TermsEnum termsEnum = terms.iterator(null);
if (termsEnum.seekExact(indexedBytes, false)) {
docs = termsEnum.docs(null, null, true);
if (docs == null) { // omit tf
omitTF = true;
docs = termsEnum.docs(null, null, false);
}
docs = termsEnum.docs(null, null);
} else {
docs = null;
}
@ -89,30 +84,6 @@ public class TermFreqValueSource extends DocFreqValueSource {
return DocIdSetIterator.NO_MORE_DOCS;
}
};
} else if (omitTF) {
// the docsenum won't support freq(), so return 1
final DocsEnum delegate = docs;
docs = new DocsEnum() {
@Override
public int freq() {
return 1;
}
@Override
public int docID() {
return delegate.docID();
}
@Override
public int nextDoc() throws IOException {
return delegate.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return delegate.advance(target);
}
};
}
atDoc = -1;
}

View File

@ -102,7 +102,7 @@ public class DuplicateFilter extends Filter {
if (currTerm == null) {
break;
} else {
docs = termsEnum.docs(acceptDocs, docs, false);
docs = termsEnum.docs(acceptDocs, docs, 0);
int doc = docs.nextDoc();
if (doc != DocIdSetIterator.NO_MORE_DOCS) {
if (keepMode == KeepMode.KM_USE_FIRST_OCCURRENCE) {
@ -142,7 +142,7 @@ public class DuplicateFilter extends Filter {
} else {
if (termsEnum.docFreq() > 1) {
// unset potential duplicates
docs = termsEnum.docs(acceptDocs, docs, false);
docs = termsEnum.docs(acceptDocs, docs, 0);
int doc = docs.nextDoc();
if (doc != DocIdSetIterator.NO_MORE_DOCS) {
if (keepMode == KeepMode.KM_USE_FIRST_OCCURRENCE) {

View File

@ -139,7 +139,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
new BytesRef(url),
MultiFields.getLiveDocs(reader),
null,
false);
0);
int lastDoc = 0;
while (td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
@ -163,7 +163,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
new BytesRef(url),
MultiFields.getLiveDocs(reader),
null,
false);
0);
int lastDoc = 0;
td.nextDoc();

View File

@ -112,7 +112,7 @@ RE "scan" threshold:
if (seekStat == TermsEnum.SeekStatus.NOT_FOUND)
continue;
if (cell.getLevel() == detailLevel || cell.isLeaf()) {
docsEnum = termsEnum.docs(acceptDocs, docsEnum, false);
docsEnum = termsEnum.docs(acceptDocs, docsEnum, 0);
addDocs(docsEnum,bits);
} else {//any other intersection
//If the next indexed term is the leaf marker, then add all of them
@ -120,7 +120,7 @@ RE "scan" threshold:
assert StringHelper.startsWith(nextCellTerm, cellTerm);
scanCell = grid.getNode(nextCellTerm.bytes, nextCellTerm.offset, nextCellTerm.length, scanCell);
if (scanCell.isLeaf()) {
docsEnum = termsEnum.docs(acceptDocs, docsEnum, false);
docsEnum = termsEnum.docs(acceptDocs, docsEnum, 0);
addDocs(docsEnum,bits);
termsEnum.next();//move pointer to avoid potential redundant addDocs() below
}
@ -145,7 +145,7 @@ RE "scan" threshold:
if(queryShape.relate(cShape, grid.getSpatialContext()) == SpatialRelation.DISJOINT)
continue;
docsEnum = termsEnum.docs(acceptDocs, docsEnum, false);
docsEnum = termsEnum.docs(acceptDocs, docsEnum, 0);
addDocs(docsEnum,bits);
}
}//term loop

View File

@ -64,7 +64,7 @@ public abstract class ShapeFieldCacheProvider<T extends Shape> {
while (term != null) {
T shape = readShape(term);
if( shape != null ) {
docs = te.docs(null, docs, false);
docs = te.docs(null, docs, 0);
Integer docid = docs.nextDoc();
while (docid != DocIdSetIterator.NO_MORE_DOCS) {
idx.add( docid, shape );

View File

@ -386,7 +386,7 @@ public class RAMOnlyPostingsFormat extends PostingsFormat {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) {
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) {
return new RAMDocsEnum(ramField.termToDocs.get(current), liveDocs);
}

View File

@ -125,7 +125,7 @@ public class AssertingAtomicReader extends FilterAtomicReader {
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
assert state == State.POSITIONED: "docs(...) called on unpositioned TermsEnum";
// TODO: should we give this thing a random to be super-evil,
@ -133,7 +133,7 @@ public class AssertingAtomicReader extends FilterAtomicReader {
if (reuse instanceof AssertingDocsEnum) {
reuse = ((AssertingDocsEnum) reuse).in;
}
DocsEnum docs = super.docs(liveDocs, reuse, needsFreqs);
DocsEnum docs = super.docs(liveDocs, reuse, flags);
return docs == null ? null : new AssertingDocsEnum(docs);
}

View File

@ -857,7 +857,7 @@ public class _TestUtil {
// Returns a DocsEnum, but randomly sometimes uses a
// DocsAndFreqsEnum, DocsAndPositionsEnum. Returns null
// if field/term doesn't exist:
public static DocsEnum docs(Random random, IndexReader r, String field, BytesRef term, Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public static DocsEnum docs(Random random, IndexReader r, String field, BytesRef term, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
final Terms terms = MultiFields.getTerms(r, field);
if (terms == null) {
return null;
@ -866,33 +866,30 @@ public class _TestUtil {
if (!termsEnum.seekExact(term, random.nextBoolean())) {
return null;
}
return docs(random, termsEnum, liveDocs, reuse, needsFreqs);
return docs(random, termsEnum, liveDocs, reuse, flags);
}
// Returns a DocsEnum from a positioned TermsEnum, but
// randomly sometimes uses a DocsAndFreqsEnum, DocsAndPositionsEnum.
public static DocsEnum docs(Random random, TermsEnum termsEnum, Bits liveDocs, DocsEnum reuse, boolean needsFreqs) throws IOException {
public static DocsEnum docs(Random random, TermsEnum termsEnum, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
if (random.nextBoolean()) {
if (random.nextBoolean()) {
final int flags;
final int posFlags;
switch (random.nextInt(4)) {
case 0: flags = 0; break;
case 1: flags = DocsAndPositionsEnum.FLAG_OFFSETS; break;
case 2: flags = DocsAndPositionsEnum.FLAG_PAYLOADS; break;
default: flags = DocsAndPositionsEnum.FLAG_OFFSETS | DocsAndPositionsEnum.FLAG_PAYLOADS; break;
case 0: posFlags = 0; break;
case 1: posFlags = DocsAndPositionsEnum.FLAG_OFFSETS; break;
case 2: posFlags = DocsAndPositionsEnum.FLAG_PAYLOADS; break;
default: posFlags = DocsAndPositionsEnum.FLAG_OFFSETS | DocsAndPositionsEnum.FLAG_PAYLOADS; break;
}
// TODO: cast to DocsAndPositionsEnum?
DocsAndPositionsEnum docsAndPositions = termsEnum.docsAndPositions(liveDocs, null, flags);
DocsAndPositionsEnum docsAndPositions = termsEnum.docsAndPositions(liveDocs, null, posFlags);
if (docsAndPositions != null) {
return docsAndPositions;
}
}
final DocsEnum docsAndFreqs = termsEnum.docs(liveDocs, reuse, true);
if (docsAndFreqs != null) {
return docsAndFreqs;
}
flags |= DocsEnum.FLAG_FREQS;
}
return termsEnum.docs(liveDocs, reuse, needsFreqs);
return termsEnum.docs(liveDocs, reuse, flags);
}
public static CharSequence stringToCharSequence(String string, Random random) {

View File

@ -390,7 +390,7 @@ public class LukeRequestHandler extends RequestHandlerBase
docsEnum = reader.termDocsEnum(reader.getLiveDocs(),
term.field(),
new BytesRef(term.text()),
false);
0);
if (docsEnum != null) {
int docId;
if ((docId = docsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {

View File

@ -543,7 +543,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
for (String id : elevations.ids) {
term.copyChars(id);
if (seen.contains(id) == false && termsEnum.seekExact(term, false)) {
docsEnum = termsEnum.docs(liveDocs, docsEnum, false);
docsEnum = termsEnum.docs(liveDocs, docsEnum, 0);
if (docsEnum != null) {
int docId = docsEnum.nextDoc();
if (docId == DocIdSetIterator.NO_MORE_DOCS ) continue; // must have been deleted

View File

@ -756,7 +756,7 @@ public class SimpleFacets {
// TODO: specialize when base docset is a bitset or hash set (skipDocs)? or does it matter for this?
// TODO: do this per-segment for better efficiency (MultiDocsEnum just uses base class impl)
// TODO: would passing deleted docs lead to better efficiency over checking the fastForRandomSet?
docsEnum = termsEnum.docs(null, docsEnum, false);
docsEnum = termsEnum.docs(null, docsEnum, 0);
c=0;
if (docsEnum instanceof MultiDocsEnum) {

View File

@ -341,7 +341,7 @@ class JoinQuery extends Query {
if (freq < minDocFreqFrom) {
fromTermDirectCount++;
// OK to skip liveDocs, since we check for intersection with docs matching query
fromDeState.docsEnum = fromDeState.termsEnum.docs(null, fromDeState.docsEnum, false);
fromDeState.docsEnum = fromDeState.termsEnum.docs(null, fromDeState.docsEnum, 0);
DocsEnum docsEnum = fromDeState.docsEnum;
if (docsEnum instanceof MultiDocsEnum) {
@ -406,7 +406,7 @@ class JoinQuery extends Query {
toTermDirectCount++;
// need to use liveDocs here so we don't map to any deleted ones
toDeState.docsEnum = toDeState.termsEnum.docs(toDeState.liveDocs, toDeState.docsEnum, false);
toDeState.docsEnum = toDeState.termsEnum.docs(toDeState.liveDocs, toDeState.docsEnum, 0);
DocsEnum docsEnum = toDeState.docsEnum;
if (docsEnum instanceof MultiDocsEnum) {

View File

@ -598,7 +598,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
if (!termsEnum.seekExact(termBytes, false)) {
return -1;
}
DocsEnum docs = termsEnum.docs(atomicReader.getLiveDocs(), null, false);
DocsEnum docs = termsEnum.docs(atomicReader.getLiveDocs(), null, 0);
if (docs == null) return -1;
int id = docs.nextDoc();
return id == DocIdSetIterator.NO_MORE_DOCS ? -1 : id;
@ -620,7 +620,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
final Bits liveDocs = reader.getLiveDocs();
final DocsEnum docs = reader.termDocsEnum(liveDocs, field, idBytes, false);
final DocsEnum docs = reader.termDocsEnum(liveDocs, field, idBytes, 0);
if (docs == null) continue;
int id = docs.nextDoc();
@ -925,7 +925,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
int bitsSet = 0;
OpenBitSet obs = null;
DocsEnum docsEnum = deState.termsEnum.docs(deState.liveDocs, deState.docsEnum, false);
DocsEnum docsEnum = deState.termsEnum.docs(deState.liveDocs, deState.docsEnum, 0);
if (deState.docsEnum == null) {
deState.docsEnum = docsEnum;
}
@ -1003,7 +1003,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
if (terms != null) {
final TermsEnum termsEnum = terms.iterator(null);
if (termsEnum.seekExact(termBytes, false)) {
docsEnum = termsEnum.docs(liveDocs, null, false);
docsEnum = termsEnum.docs(liveDocs, null, 0);
}
}

View File

@ -278,7 +278,7 @@ public class FileFloatSource extends ValueSource {
continue;
}
docsEnum = termsEnum.docs(null, docsEnum, false);
docsEnum = termsEnum.docs(null, docsEnum, 0);
int doc;
while ((doc = docsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
vals[doc] = fval;

View File

@ -134,7 +134,7 @@ public class TestRTGBase extends SolrTestCaseJ4 {
if (!termsEnum.seekExact(termBytes, false)) {
return -1;
}
DocsEnum docs = termsEnum.docs(MultiFields.getLiveDocs(r), null, false);
DocsEnum docs = termsEnum.docs(MultiFields.getLiveDocs(r), null, 0);
int id = docs.nextDoc();
if (id != DocIdSetIterator.NO_MORE_DOCS) {
int next = docs.nextDoc();