mirror of https://github.com/apache/lucene.git
LUCENE-6271: initial patch, postings() funcs switched over to not return null, callers marked with nocommits that check against null
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene6271@1662157 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b76cc37e61
commit
a411d8af6c
|
@ -652,14 +652,6 @@ public class BlockTermsReader extends FieldsProducer {
|
|||
|
||||
@Override
|
||||
public PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) throws IOException {
|
||||
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
if (fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
|
||||
// Positions were not indexed:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
//System.out.println("BTR.docs this=" + this);
|
||||
decodeMetaData();
|
||||
//System.out.println("BTR.docs: state.docFreq=" + state.docFreq);
|
||||
|
|
|
@ -203,14 +203,6 @@ final class OrdsIntersectTermsEnum extends TermsEnum {
|
|||
|
||||
@Override
|
||||
public PostingsEnum postings(Bits skipDocs, PostingsEnum reuse, int flags) throws IOException {
|
||||
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
if (fr.fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
|
||||
// Positions were not indexed:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
currentFrame.decodeMetaData();
|
||||
return fr.parent.postingsReader.postings(fr.fieldInfo, currentFrame.termState, skipDocs, reuse, flags);
|
||||
}
|
||||
|
|
|
@ -924,14 +924,6 @@ public final class OrdsSegmentTermsEnum extends TermsEnum {
|
|||
|
||||
@Override
|
||||
public PostingsEnum postings(Bits skipDocs, PostingsEnum reuse, int flags) throws IOException {
|
||||
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
if (fr.fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
|
||||
// Positions were not indexed:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
assert !eof;
|
||||
//if (DEBUG) {
|
||||
//System.out.println("BTTR.docs seg=" + segment);
|
||||
|
|
|
@ -860,14 +860,25 @@ public final class DirectPostingsFormat extends PostingsFormat {
|
|||
// TODO: implement reuse
|
||||
// it's hairy!
|
||||
|
||||
// TODO: the logic of which enum impl to choose should be refactored to be simpler...
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
if (!hasPos) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (terms[termOrd] instanceof LowFreqTerm) {
|
||||
final LowFreqTerm term = ((LowFreqTerm) terms[termOrd]);
|
||||
final int[] postings = term.postings;
|
||||
if (hasPos == false) {
|
||||
LowFreqDocsEnumNoPos docsEnum;
|
||||
if (reuse instanceof LowFreqDocsEnumNoPos) {
|
||||
docsEnum = (LowFreqDocsEnumNoPos) reuse;
|
||||
if (!docsEnum.canReuse(liveDocs)) {
|
||||
docsEnum = new LowFreqDocsEnumNoPos(liveDocs);
|
||||
}
|
||||
} else {
|
||||
docsEnum = new LowFreqDocsEnumNoPos(liveDocs);
|
||||
}
|
||||
|
||||
return docsEnum.reset(postings);
|
||||
}
|
||||
final byte[] payloads = term.payloads;
|
||||
return new LowFreqPostingsEnum(liveDocs, hasOffsets, hasPayloads).reset(postings, payloads);
|
||||
} else {
|
||||
|
@ -1454,10 +1465,9 @@ public final class DirectPostingsFormat extends PostingsFormat {
|
|||
public PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) {
|
||||
// TODO: implement reuse
|
||||
// it's hairy!
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
if (!hasPos) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// TODO: the logic of which enum impl to choose should be refactored to be simpler...
|
||||
if (hasPos && PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
if (terms[termOrd] instanceof LowFreqTerm) {
|
||||
final LowFreqTerm term = ((LowFreqTerm) terms[termOrd]);
|
||||
final int[] postings = term.postings;
|
||||
|
|
|
@ -829,10 +829,9 @@ public final class MemoryPostingsFormat extends PostingsFormat {
|
|||
@Override
|
||||
public PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) {
|
||||
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
if (field.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
|
||||
return null;
|
||||
}
|
||||
// TODO: the logic of which enum impl to choose should be refactored to be simpler...
|
||||
boolean hasPositions = field.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
|
||||
if (hasPositions && PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
boolean hasOffsets = field.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
|
||||
decodeMetaData();
|
||||
FSTPostingsEnum docsAndPositionsEnum;
|
||||
|
|
|
@ -209,11 +209,8 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
|||
@Override
|
||||
public PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) throws IOException {
|
||||
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
if (indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
|
||||
// Positions were not indexed
|
||||
return null;
|
||||
}
|
||||
boolean hasPositions = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
|
||||
if (hasPositions && PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
|
||||
SimpleTextPostingsEnum docsAndPositionsEnum;
|
||||
if (reuse != null && reuse instanceof SimpleTextPostingsEnum && ((SimpleTextPostingsEnum) reuse).canReuse(SimpleTextFieldsReader.this.in)) {
|
||||
|
|
|
@ -388,16 +388,15 @@ public class SimpleTextTermVectorsReader extends TermVectorsReader {
|
|||
|
||||
@Override
|
||||
public PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) throws IOException {
|
||||
|
||||
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
SimpleTVPostings postings = current.getValue();
|
||||
if (postings.positions == null && postings.startOffsets == null) {
|
||||
return null;
|
||||
if (postings.positions != null || postings.startOffsets != null) {
|
||||
// TODO: reuse
|
||||
SimpleTVPostingsEnum e = new SimpleTVPostingsEnum();
|
||||
e.reset(liveDocs, postings.positions, postings.startOffsets, postings.endOffsets, postings.payloads);
|
||||
return e;
|
||||
}
|
||||
// TODO: reuse
|
||||
SimpleTVPostingsEnum e = new SimpleTVPostingsEnum();
|
||||
e.reset(liveDocs, postings.positions, postings.startOffsets, postings.endOffsets, postings.payloads);
|
||||
return e;
|
||||
}
|
||||
|
||||
// TODO: reuse
|
||||
|
|
|
@ -936,12 +936,6 @@ public final class CompressingTermVectorsReader extends TermVectorsReader implem
|
|||
|
||||
@Override
|
||||
public final PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) throws IOException {
|
||||
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
if (positions == null && startOffsets == null)
|
||||
return null;
|
||||
}
|
||||
|
||||
final TVPostingsEnum docsEnum;
|
||||
if (reuse != null && reuse instanceof TVPostingsEnum) {
|
||||
docsEnum = (TVPostingsEnum) reuse;
|
||||
|
|
|
@ -193,8 +193,12 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
|
|||
|
||||
@Override
|
||||
public PostingsEnum postings(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs, PostingsEnum reuse, int flags) throws IOException {
|
||||
|
||||
boolean indexHasPositions = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
|
||||
boolean indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
|
||||
boolean indexHasPayloads = fieldInfo.hasPayloads();
|
||||
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS) == false) {
|
||||
if (indexHasPositions == false || PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS) == false) {
|
||||
BlockDocsEnum docsEnum;
|
||||
if (reuse instanceof BlockDocsEnum) {
|
||||
docsEnum = (BlockDocsEnum) reuse;
|
||||
|
@ -205,17 +209,8 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
|
|||
docsEnum = new BlockDocsEnum(fieldInfo);
|
||||
}
|
||||
return docsEnum.reset(liveDocs, (IntBlockTermState) termState, flags);
|
||||
}
|
||||
|
||||
boolean indexHasPositions = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
|
||||
boolean indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
|
||||
boolean indexHasPayloads = fieldInfo.hasPayloads();
|
||||
|
||||
if (!indexHasPositions)
|
||||
return null;
|
||||
|
||||
if ((!indexHasOffsets || PostingsEnum.featureRequested(flags, PostingsEnum.OFFSETS) == false) &&
|
||||
(!indexHasPayloads || PostingsEnum.featureRequested(flags, PostingsEnum.PAYLOADS) == false)) {
|
||||
} else if ((indexHasOffsets == false || PostingsEnum.featureRequested(flags, PostingsEnum.OFFSETS) == false) &&
|
||||
(indexHasPayloads == false || PostingsEnum.featureRequested(flags, PostingsEnum.PAYLOADS) == false)) {
|
||||
BlockPostingsEnum docsAndPositionsEnum;
|
||||
if (reuse instanceof BlockPostingsEnum) {
|
||||
docsAndPositionsEnum = (BlockPostingsEnum) reuse;
|
||||
|
|
|
@ -1050,6 +1050,7 @@ public class CheckIndex implements Closeable {
|
|||
sumDocFreq += docFreq;
|
||||
|
||||
docs = termsEnum.postings(liveDocs, docs);
|
||||
// nocommit: check null
|
||||
postings = termsEnum.postings(liveDocs, postings, PostingsEnum.ALL);
|
||||
|
||||
if (hasFreqs == false) {
|
||||
|
@ -1389,6 +1390,7 @@ public class CheckIndex implements Closeable {
|
|||
}
|
||||
|
||||
docs = termsEnum.postings(liveDocs, docs, PostingsEnum.NONE);
|
||||
// nocommit: null check still needed? how to replace?
|
||||
if (docs == null) {
|
||||
throw new RuntimeException("null DocsEnum from to existing term " + seekTerms[i]);
|
||||
}
|
||||
|
@ -1407,6 +1409,7 @@ public class CheckIndex implements Closeable {
|
|||
|
||||
totDocFreq += termsEnum.docFreq();
|
||||
docs = termsEnum.postings(null, docs, PostingsEnum.NONE);
|
||||
// nocommit: null check still needed? how to replace?
|
||||
if (docs == null) {
|
||||
throw new RuntimeException("null DocsEnum from to existing term " + seekTerms[i]);
|
||||
}
|
||||
|
|
|
@ -159,8 +159,7 @@ public final class MultiFields extends Fields {
|
|||
* required. Some codecs may be able to optimize
|
||||
* their implementation when offsets and/or payloads are not
|
||||
* required. This will return null if the field or term does not
|
||||
* exist or positions were not indexed. See {@link
|
||||
* TermsEnum#postings(Bits, PostingsEnum,int)}. */
|
||||
* exist. See {@link TermsEnum#postings(Bits, PostingsEnum,int)}. */
|
||||
public static PostingsEnum getTermPositionsEnum(IndexReader r, Bits liveDocs, String field, BytesRef term, int flags) throws IOException {
|
||||
assert field != null;
|
||||
assert term != null;
|
||||
|
|
|
@ -379,22 +379,14 @@ public final class MultiTermsEnum extends TermsEnum {
|
|||
|
||||
assert entry.index < docsEnum.subPostingsEnums.length: entry.index + " vs " + docsEnum.subPostingsEnums.length + "; " + subs.length;
|
||||
final PostingsEnum subPostingsEnum = entry.terms.postings(b, docsEnum.subPostingsEnums[entry.index], flags);
|
||||
if (subPostingsEnum != null) {
|
||||
docsEnum.subPostingsEnums[entry.index] = subPostingsEnum;
|
||||
subDocs[upto].postingsEnum = subPostingsEnum;
|
||||
subDocs[upto].slice = entry.subSlice;
|
||||
upto++;
|
||||
} else {
|
||||
// should this be an error?
|
||||
return null; // We can't support what is being asked for
|
||||
}
|
||||
}
|
||||
|
||||
if (upto == 0) {
|
||||
return null;
|
||||
} else {
|
||||
return docsEnum.reset(subDocs, upto);
|
||||
assert subPostingsEnum != null;
|
||||
docsEnum.subPostingsEnums[entry.index] = subPostingsEnum;
|
||||
subDocs[upto].postingsEnum = subPostingsEnum;
|
||||
subDocs[upto].slice = entry.subSlice;
|
||||
upto++;
|
||||
}
|
||||
|
||||
return docsEnum.reset(subDocs, upto);
|
||||
}
|
||||
|
||||
final static class TermsEnumWithSlice {
|
||||
|
|
|
@ -230,6 +230,7 @@ public class MultiPhraseQuery extends Query {
|
|||
termsEnum.seekExact(term.bytes(), termState);
|
||||
postingsEnum = termsEnum.postings(liveDocs, null, PostingsEnum.POSITIONS);
|
||||
|
||||
// nocommit: check
|
||||
if (postingsEnum == null) {
|
||||
// term does exist, but has no positions
|
||||
assert termsEnum.postings(liveDocs, null, PostingsEnum.NONE) != null: "termstate found but no term exists in reader";
|
||||
|
@ -483,6 +484,7 @@ class UnionPostingsEnum extends PostingsEnum {
|
|||
}
|
||||
termsEnum.seekExact(term.bytes(), termState);
|
||||
PostingsEnum postings = termsEnum.postings(liveDocs, null, PostingsEnum.POSITIONS);
|
||||
// nocommit: check
|
||||
if (postings == null) {
|
||||
// term does exist, but has no positions
|
||||
throw new IllegalStateException("field \"" + term.field() + "\" was indexed without position data; cannot run PhraseQuery (term=" + term.text() + ")");
|
||||
|
|
|
@ -312,6 +312,7 @@ public class PhraseQuery extends Query {
|
|||
|
||||
// PhraseQuery on a field that did not index
|
||||
// positions.
|
||||
// nocommit: check
|
||||
if (postingsEnum == null) {
|
||||
assert te.seekExact(t.bytes()) : "termstate found but no term exists in reader";
|
||||
// term does exist, but has no positions
|
||||
|
|
|
@ -119,6 +119,7 @@ public class SpanTermQuery extends SpanQuery {
|
|||
if (postings != null) {
|
||||
return new TermSpans(postings, term);
|
||||
} else {
|
||||
// nocommit: check
|
||||
// term does exist, but has no positions
|
||||
throw new IllegalStateException("field \"" + term.field() + "\" was indexed without position data; cannot run SpanTermQuery (term=" + term.text() + ")");
|
||||
}
|
||||
|
|
|
@ -458,6 +458,7 @@ public class TestCodecs extends LuceneTestCase {
|
|||
if (postings != null) {
|
||||
docs = postings;
|
||||
} else {
|
||||
// nocommit: check
|
||||
docs = TestUtil.docs(random(), termsEnum, null, null, PostingsEnum.FREQS);
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -84,18 +84,22 @@ public class TestPostingsEnum extends LuceneTestCase {
|
|||
|
||||
// we did not index positions
|
||||
PostingsEnum docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.POSITIONS);
|
||||
// nocommit: check
|
||||
assertNull(docsAndPositionsEnum);
|
||||
|
||||
// we did not index positions
|
||||
docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.PAYLOADS);
|
||||
// nocommit: check
|
||||
assertNull(docsAndPositionsEnum);
|
||||
|
||||
// we did not index positions
|
||||
docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.OFFSETS);
|
||||
// nocommit: check
|
||||
assertNull(docsAndPositionsEnum);
|
||||
|
||||
// we did not index positions
|
||||
docsAndPositionsEnum = getOnlySegmentReader(reader).postings(new Term("foo", "bar"), PostingsEnum.ALL);
|
||||
// nocommit: check
|
||||
assertNull(docsAndPositionsEnum);
|
||||
|
||||
iw.close();
|
||||
|
|
|
@ -397,6 +397,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
|||
while((term2 = termsEnum3.next()) != null) {
|
||||
System.out.println(" " + term2.utf8ToString() + ": freq=" + termsEnum3.totalTermFreq());
|
||||
dpEnum = termsEnum3.postings(null, dpEnum, PostingsEnum.ALL);
|
||||
// nocommit: check
|
||||
if (dpEnum != null) {
|
||||
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||
final int freq = dpEnum.freq();
|
||||
|
@ -620,6 +621,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
|||
|
||||
dpEnum1 = termsEnum1.postings(null, dpEnum1, PostingsEnum.ALL);
|
||||
dpEnum2 = termsEnum2.postings(null, dpEnum2, PostingsEnum.ALL);
|
||||
// nocommit: check
|
||||
if (dpEnum1 != null) {
|
||||
assertNotNull(dpEnum2);
|
||||
int docID1 = dpEnum1.nextDoc();
|
||||
|
|
|
@ -292,6 +292,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
|
|||
//System.out.println("Term: " + term);
|
||||
assertEquals(testTerms[i], term);
|
||||
assertNotNull(termsEnum.postings(null, null));
|
||||
// nocommit: check
|
||||
assertNull(termsEnum.postings(null, null, PostingsEnum.ALL)); // no pos
|
||||
}
|
||||
reader.close();
|
||||
|
|
|
@ -552,6 +552,7 @@ public class PostingsHighlighter {
|
|||
if (!termsEnum.seekExact(terms[i])) {
|
||||
continue; // term not found
|
||||
}
|
||||
// nocommit: check
|
||||
de = postings[i] = termsEnum.postings(null, null, PostingsEnum.OFFSETS);
|
||||
if (de == null) {
|
||||
// no positions available
|
||||
|
|
|
@ -104,6 +104,7 @@ public class FieldTermStack {
|
|||
if (!termSet.contains(term)) {
|
||||
continue;
|
||||
}
|
||||
// nocommit: check
|
||||
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.POSITIONS);
|
||||
if (dpEnum == null) {
|
||||
// null snippet
|
||||
|
|
|
@ -88,13 +88,13 @@ public class SortingLeafReader extends FilterLeafReader {
|
|||
|
||||
@Override
|
||||
public TermsEnum iterator(final TermsEnum reuse) throws IOException {
|
||||
return new SortingTermsEnum(in.iterator(reuse), docMap, indexOptions);
|
||||
return new SortingTermsEnum(in.iterator(reuse), docMap, indexOptions, hasPositions());
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermsEnum intersect(CompiledAutomaton compiled, BytesRef startTerm)
|
||||
throws IOException {
|
||||
return new SortingTermsEnum(in.intersect(compiled, startTerm), docMap, indexOptions);
|
||||
return new SortingTermsEnum(in.intersect(compiled, startTerm), docMap, indexOptions, hasPositions());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -103,11 +103,13 @@ public class SortingLeafReader extends FilterLeafReader {
|
|||
|
||||
final Sorter.DocMap docMap; // pkg-protected to avoid synthetic accessor methods
|
||||
private final IndexOptions indexOptions;
|
||||
private final boolean hasPositions;
|
||||
|
||||
public SortingTermsEnum(final TermsEnum in, Sorter.DocMap docMap, IndexOptions indexOptions) {
|
||||
public SortingTermsEnum(final TermsEnum in, Sorter.DocMap docMap, IndexOptions indexOptions, boolean hasPositions) {
|
||||
super(in);
|
||||
this.docMap = docMap;
|
||||
this.indexOptions = indexOptions;
|
||||
this.hasPositions = hasPositions;
|
||||
}
|
||||
|
||||
Bits newToOld(final Bits liveDocs) {
|
||||
|
@ -132,7 +134,7 @@ public class SortingLeafReader extends FilterLeafReader {
|
|||
@Override
|
||||
public PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, final int flags) throws IOException {
|
||||
|
||||
if (PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
if (hasPositions && PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS)) {
|
||||
final PostingsEnum inReuse;
|
||||
final SortingPostingsEnum wrapReuse;
|
||||
if (reuse != null && reuse instanceof SortingPostingsEnum) {
|
||||
|
@ -146,10 +148,6 @@ public class SortingLeafReader extends FilterLeafReader {
|
|||
}
|
||||
|
||||
final PostingsEnum inDocsAndPositions = in.postings(newToOld(liveDocs), inReuse, flags);
|
||||
if (inDocsAndPositions == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// we ignore the fact that offsets may be stored but not asked for,
|
||||
// since this code is expected to be used during addIndexes which will
|
||||
// ask for everything. if that assumption changes in the future, we can
|
||||
|
|
|
@ -335,6 +335,7 @@ public class TermAutomatonQuery extends Query {
|
|||
|
||||
public EnumAndScorer(int termID, PostingsEnum posEnum) {
|
||||
this.termID = termID;
|
||||
// nocommit: check uses
|
||||
this.posEnum = posEnum;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -159,9 +159,8 @@ public class AssertingLeafReader extends FilterLeafReader {
|
|||
actualReuse = null;
|
||||
}
|
||||
PostingsEnum docs = super.postings(liveDocs, actualReuse, flags);
|
||||
if (docs == null) {
|
||||
return null;
|
||||
} else if (docs == actualReuse) {
|
||||
assert docs != null;
|
||||
if (docs == actualReuse) {
|
||||
// codec reused, reset asserting state
|
||||
((AssertingPostingsEnum)reuse).reset();
|
||||
return reuse;
|
||||
|
|
|
@ -1626,6 +1626,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
|
|||
} else {
|
||||
docs = termsEnum.postings(null, null, PostingsEnum.POSITIONS);
|
||||
}
|
||||
// nocommit: can we remove the noPositions and always ask for positions here?
|
||||
int docFreq = 0;
|
||||
long totalTermFreq = 0;
|
||||
while (docs.nextDoc() != PostingsEnum.NO_MORE_DOCS) {
|
||||
|
|
|
@ -454,6 +454,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
|
|||
bits.clear(0);
|
||||
PostingsEnum docsAndPositionsEnum = termsEnum.postings(bits, random().nextBoolean() ? null : this.docsEnum.get(), PostingsEnum.POSITIONS);
|
||||
assertEquals(ft.storeTermVectorOffsets() || ft.storeTermVectorPositions(), docsAndPositionsEnum != null);
|
||||
// nocommit: check
|
||||
if (docsAndPositionsEnum != null) {
|
||||
assertEquals(PostingsEnum.NO_MORE_DOCS, docsAndPositionsEnum.nextDoc());
|
||||
}
|
||||
|
|
|
@ -1968,6 +1968,7 @@ public abstract class LuceneTestCase extends Assert {
|
|||
*/
|
||||
public void assertDocsAndPositionsEnumEquals(String info, PostingsEnum leftDocs, PostingsEnum rightDocs) throws IOException {
|
||||
if (leftDocs == null || rightDocs == null) {
|
||||
// nocommit: this should now only be for term or field not existing, is this assert used in that way?
|
||||
assertNull(leftDocs);
|
||||
assertNull(rightDocs);
|
||||
return;
|
||||
|
|
|
@ -1020,6 +1020,7 @@ public final class TestUtil {
|
|||
default: posFlags = PostingsEnum.ALL; break;
|
||||
}
|
||||
PostingsEnum docsAndPositions = termsEnum.postings(liveDocs, null, posFlags);
|
||||
// nocommit: check
|
||||
if (docsAndPositions != null) {
|
||||
return docsAndPositions;
|
||||
}
|
||||
|
|
|
@ -693,6 +693,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
|
|||
term.copyChars(id);
|
||||
if (seen.contains(id) == false && termsEnum.seekExact(term.get())) {
|
||||
postingsEnum = termsEnum.postings(liveDocs, postingsEnum, PostingsEnum.NONE);
|
||||
// nocommit: check, maybe just remove null check?
|
||||
if (postingsEnum != null) {
|
||||
int docId = postingsEnum.nextDoc();
|
||||
if (docId == DocIdSetIterator.NO_MORE_DOCS ) continue; // must have been deleted
|
||||
|
|
|
@ -798,6 +798,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
|
|||
return -1;
|
||||
}
|
||||
PostingsEnum docs = termsEnum.postings(leafReader.getLiveDocs(), null, PostingsEnum.NONE);
|
||||
// nocommit: check
|
||||
if (docs == null) return -1;
|
||||
int id = docs.nextDoc();
|
||||
return id == DocIdSetIterator.NO_MORE_DOCS ? -1 : id;
|
||||
|
@ -1169,6 +1170,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
|
|||
FixedBitSet fbs = null;
|
||||
|
||||
PostingsEnum postingsEnum = deState.termsEnum.postings(deState.liveDocs, deState.postingsEnum, PostingsEnum.NONE);
|
||||
// nocommit: check
|
||||
if (deState.postingsEnum == null) {
|
||||
deState.postingsEnum = postingsEnum;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue