LUCENE-6246: Fix DocsEnum -> PostingsEnum transition (phase 1)

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1660366 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2015-02-17 12:46:55 +00:00
parent a8b94ea3c2
commit 69300ee467
93 changed files with 475 additions and 405 deletions

View File

@ -111,7 +111,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
TermsEnum termsEnum = vector.iterator(null);
termsEnum.next();
assertEquals(2, termsEnum.totalTermFreq());
PostingsEnum positions = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum positions = termsEnum.postings(null, null, PostingsEnum.ALL);
assertTrue(positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(2, positions.freq());
positions.nextPosition();

View File

@ -958,7 +958,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
// should be found exactly
assertEquals(TermsEnum.SeekStatus.FOUND,
terms.seekCeil(aaaTerm));
assertEquals(35, countDocs(TestUtil.docs(random(), terms, null, null, PostingsEnum.FLAG_NONE)));
assertEquals(35, countDocs(TestUtil.docs(random(), terms, null, null, PostingsEnum.NONE)));
assertNull(terms.next());
// should hit end of field
@ -970,12 +970,12 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
assertEquals(TermsEnum.SeekStatus.NOT_FOUND,
terms.seekCeil(new BytesRef("a")));
assertTrue(terms.term().bytesEquals(aaaTerm));
assertEquals(35, countDocs(TestUtil.docs(random(), terms, null, null, PostingsEnum.FLAG_NONE)));
assertEquals(35, countDocs(TestUtil.docs(random(), terms, null, null, PostingsEnum.NONE)));
assertNull(terms.next());
assertEquals(TermsEnum.SeekStatus.FOUND,
terms.seekCeil(aaaTerm));
assertEquals(35, countDocs(TestUtil.docs(random(), terms, null, null, PostingsEnum.FLAG_NONE)));
assertEquals(35, countDocs(TestUtil.docs(random(), terms, null, null, PostingsEnum.NONE)));
assertNull(terms.next());
r.close();

View File

@ -499,7 +499,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
TermsEnum termsEnum = terms.iterator(null);
PostingsEnum docs = null;
while(termsEnum.next() != null) {
docs = TestUtil.docs(random(), termsEnum, MultiFields.getLiveDocs(reader), docs, PostingsEnum.FLAG_FREQS);
docs = TestUtil.docs(random(), termsEnum, MultiFields.getLiveDocs(reader), docs, PostingsEnum.FREQS);
while(docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
totalTokenCount2 += docs.freq();
}

View File

@ -28,7 +28,6 @@ import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexOptions;
@ -358,7 +357,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
termOffsets[count+1] = termOffset;
if (hasPos) {
docsAndPositionsEnum = termsEnum.postings(null, docsAndPositionsEnum, PostingsEnum.FLAG_ALL);
docsAndPositionsEnum = termsEnum.postings(null, docsAndPositionsEnum, PostingsEnum.ALL);
} else {
postingsEnum = termsEnum.postings(null, postingsEnum);
}
@ -861,7 +860,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
// TODO: implement reuse
// it's hairy!
if ((flags & PostingsEnum.FLAG_POSITIONS) >= PostingsEnum.FLAG_POSITIONS) {
if ((flags & PostingsEnum.POSITIONS) >= PostingsEnum.POSITIONS) {
if (!hasPos) {
return null;
}
@ -1455,7 +1454,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
public PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) {
// TODO: implement reuse
// it's hairy!
if ((flags & PostingsEnum.FLAG_POSITIONS) >= PostingsEnum.FLAG_POSITIONS) {
if ((flags & PostingsEnum.POSITIONS) >= PostingsEnum.POSITIONS) {
if (!hasPos) {
return null;
}
@ -1510,7 +1509,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
}
// Docs only:
private final static class LowFreqDocsEnumNoTF extends DocsEnum {
private final static class LowFreqDocsEnumNoTF extends PostingsEnum {
private int[] postings;
private final Bits liveDocs;
private int upto;
@ -1571,6 +1570,21 @@ public final class DirectPostingsFormat extends PostingsFormat {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
@Override
public int advance(int target) throws IOException {
// Linear scan, but this is low-freq term so it won't
@ -1585,7 +1599,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
}
// Docs + freqs:
private final static class LowFreqDocsEnumNoPos extends DocsEnum {
private final static class LowFreqDocsEnumNoPos extends PostingsEnum {
private int[] postings;
private final Bits liveDocs;
private int upto;
@ -1645,6 +1659,21 @@ public final class DirectPostingsFormat extends PostingsFormat {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
@Override
public int advance(int target) throws IOException {
// Linear scan, but this is low-freq term so it won't
@ -1659,7 +1688,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
}
// Docs + freqs + positions/offets:
private final static class LowFreqDocsEnum extends DocsEnum {
private final static class LowFreqDocsEnum extends PostingsEnum {
private int[] postings;
private final Bits liveDocs;
private final int posMult;
@ -1735,6 +1764,21 @@ public final class DirectPostingsFormat extends PostingsFormat {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
@Override
public int advance(int target) throws IOException {
// Linear scan, but this is low-freq term so it won't
@ -1907,7 +1951,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
}
// Docs + freqs:
private final static class HighFreqDocsEnum extends DocsEnum {
private final static class HighFreqDocsEnum extends PostingsEnum {
private int[] docIDs;
private int[] freqs;
private final Bits liveDocs;
@ -2082,6 +2126,26 @@ public final class DirectPostingsFormat extends PostingsFormat {
public long cost() {
return docIDs.length;
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
}
// TODO: specialize offsets and not

View File

@ -31,7 +31,6 @@ import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.TermStats;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
@ -330,19 +329,19 @@ public final class MemoryPostingsFormat extends PostingsFormat {
if (writeFreqs == false) {
enumFlags = 0;
} else if (writePositions == false) {
enumFlags = PostingsEnum.FLAG_FREQS;
enumFlags = PostingsEnum.FREQS;
} else if (writeOffsets == false) {
if (writePayloads) {
enumFlags = PostingsEnum.FLAG_PAYLOADS;
enumFlags = PostingsEnum.PAYLOADS;
}
else {
enumFlags = PostingsEnum.FLAG_POSITIONS;
enumFlags = PostingsEnum.POSITIONS;
}
} else {
if (writePayloads) {
enumFlags = PostingsEnum.FLAG_PAYLOADS | PostingsEnum.FLAG_OFFSETS;
enumFlags = PostingsEnum.PAYLOADS | PostingsEnum.OFFSETS;
} else {
enumFlags = PostingsEnum.FLAG_OFFSETS;
enumFlags = PostingsEnum.OFFSETS;
}
}
@ -428,7 +427,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
return new MemoryFieldsConsumer(state);
}
private final static class FSTDocsEnum extends DocsEnum {
private final static class FSTDocsEnum extends PostingsEnum {
private final IndexOptions indexOptions;
private final boolean storePayloads;
private byte[] buffer = new byte[16];
@ -551,6 +550,26 @@ public final class MemoryPostingsFormat extends PostingsFormat {
public long cost() {
return numDocs;
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
}
private final static class FSTPostingsEnum extends PostingsEnum {
@ -810,7 +829,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
@Override
public PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) {
if ((flags & PostingsEnum.FLAG_POSITIONS) >= PostingsEnum.FLAG_POSITIONS) {
if ((flags & PostingsEnum.POSITIONS) >= PostingsEnum.POSITIONS) {
if (field.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
return null;
}

View File

@ -27,7 +27,6 @@ import java.util.Map;
import java.util.TreeMap;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexOptions;
@ -237,7 +236,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
}
private class SimpleTextDocsEnum extends DocsEnum {
private class SimpleTextDocsEnum extends PostingsEnum {
private final IndexInput inStart;
private final IndexInput in;
private boolean omitTF;
@ -283,6 +282,21 @@ class SimpleTextFieldsReader extends FieldsProducer {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
@Override
public int nextDoc() throws IOException {
if (docID == NO_MORE_DOCS) {

View File

@ -78,16 +78,16 @@ class SimpleTextFieldsWriter extends FieldsConsumer {
int flags = 0;
if (hasPositions) {
flags = PostingsEnum.FLAG_POSITIONS;
flags = PostingsEnum.POSITIONS;
if (hasPayloads) {
flags = flags | PostingsEnum.FLAG_PAYLOADS;
flags = flags | PostingsEnum.PAYLOADS;
}
if (hasOffsets) {
flags = flags | PostingsEnum.FLAG_OFFSETS;
flags = flags | PostingsEnum.OFFSETS;
}
} else {
if (hasFreqs) {
flags = flags | PostingsEnum.FLAG_FREQS;
flags = flags | PostingsEnum.FREQS;
}
}

View File

@ -25,7 +25,6 @@ import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexFileNames;
@ -403,14 +402,14 @@ public class SimpleTextTermVectorsReader extends TermVectorsReader {
// TODO: reuse
SimpleTVDocsEnum e = new SimpleTVDocsEnum();
e.reset(liveDocs, (flags & PostingsEnum.FLAG_FREQS) == 0 ? 1 : current.getValue().freq);
e.reset(liveDocs, (flags & PostingsEnum.FREQS) == 0 ? 1 : current.getValue().freq);
return e;
}
}
// note: these two enum classes are exactly like the Default impl...
private static class SimpleTVDocsEnum extends DocsEnum {
private static class SimpleTVDocsEnum extends PostingsEnum {
private boolean didNext;
private int doc = -1;
private int freq;
@ -428,6 +427,21 @@ public class SimpleTextTermVectorsReader extends TermVectorsReader {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
@Override
public int docID() {
return doc;

View File

@ -98,18 +98,18 @@ public abstract class PushPostingsWriterBase extends PostingsWriterBase {
if (writeFreqs == false) {
enumFlags = 0;
} else if (writePositions == false) {
enumFlags = PostingsEnum.FLAG_FREQS;
enumFlags = PostingsEnum.FREQS;
} else if (writeOffsets == false) {
if (writePayloads) {
enumFlags = PostingsEnum.FLAG_PAYLOADS;
enumFlags = PostingsEnum.PAYLOADS;
} else {
enumFlags = PostingsEnum.FLAG_POSITIONS;
enumFlags = PostingsEnum.POSITIONS;
}
} else {
if (writePayloads) {
enumFlags = PostingsEnum.FLAG_PAYLOADS | PostingsEnum.FLAG_OFFSETS;
enumFlags = PostingsEnum.PAYLOADS | PostingsEnum.OFFSETS;
} else {
enumFlags = PostingsEnum.FLAG_OFFSETS;
enumFlags = PostingsEnum.OFFSETS;
}
}

View File

@ -267,7 +267,7 @@ public abstract class TermVectorsWriter implements Closeable {
startTerm(termsEnum.term(), freq);
if (hasPositions || hasOffsets) {
docsAndPositionsEnum = termsEnum.postings(null, docsAndPositionsEnum, PostingsEnum.FLAG_OFFSETS | PostingsEnum.FLAG_PAYLOADS);
docsAndPositionsEnum = termsEnum.postings(null, docsAndPositionsEnum, PostingsEnum.OFFSETS | PostingsEnum.PAYLOADS);
assert docsAndPositionsEnum != null;
final int docID = docsAndPositionsEnum.nextDoc();

View File

@ -937,7 +937,7 @@ public final class CompressingTermVectorsReader extends TermVectorsReader implem
@Override
public final PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) throws IOException {
if ((flags & PostingsEnum.FLAG_POSITIONS) >= PostingsEnum.FLAG_POSITIONS) {
if ((flags & PostingsEnum.POSITIONS) >= PostingsEnum.POSITIONS) {
if (positions == null && startOffsets == null)
return null;
}

View File

@ -24,7 +24,6 @@ import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.IntBlockTermState;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexOptions;
@ -195,7 +194,7 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
@Override
public PostingsEnum postings(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs, PostingsEnum reuse, int flags) throws IOException {
if ((flags & PostingsEnum.FLAG_POSITIONS) < PostingsEnum.FLAG_POSITIONS) {
if ((flags & PostingsEnum.POSITIONS) < PostingsEnum.POSITIONS) {
BlockDocsEnum docsEnum;
if (reuse instanceof BlockDocsEnum) {
docsEnum = (BlockDocsEnum) reuse;
@ -215,8 +214,8 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
if (!indexHasPositions)
return null;
if ((!indexHasOffsets || (flags & PostingsEnum.FLAG_OFFSETS) == 0) &&
(!indexHasPayloads || (flags & PostingsEnum.FLAG_PAYLOADS) == 0)) {
if ((!indexHasOffsets || (flags & PostingsEnum.OFFSETS) == 0) &&
(!indexHasPayloads || (flags & PostingsEnum.PAYLOADS) == 0)) {
BlockPostingsEnum docsAndPositionsEnum;
if (reuse instanceof BlockPostingsEnum) {
docsAndPositionsEnum = (BlockPostingsEnum) reuse;
@ -241,7 +240,7 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
}
}
final class BlockDocsEnum extends DocsEnum {
final class BlockDocsEnum extends PostingsEnum {
private final byte[] encoded;
private final int[] docDeltaBuffer = new int[MAX_DATA_SIZE];
@ -318,7 +317,7 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
}
doc = -1;
this.needsFreq = (flags & PostingsEnum.FLAG_FREQS) != 0;
this.needsFreq = (flags & PostingsEnum.FREQS) != 0;
if (indexHasFreq == false || needsFreq == false) {
Arrays.fill(freqBuffer, 1);
}
@ -341,6 +340,21 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
@Override
public int docID() {
return doc;
@ -968,8 +982,8 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
lastPosBlockFP = posTermStartFP + termState.lastPosBlockOffset;
}
this.needsOffsets = (flags & PostingsEnum.FLAG_OFFSETS) != 0;
this.needsPayloads = (flags & PostingsEnum.FLAG_PAYLOADS) != 0;
this.needsOffsets = (flags & PostingsEnum.OFFSETS) != 0;
this.needsPayloads = (flags & PostingsEnum.PAYLOADS) != 0;
doc = -1;
accum = 0;

View File

@ -562,7 +562,7 @@ class BufferedUpdatesStream implements Accountable {
if (state.delGen < delGen) {
// we don't need term frequencies for this
state.postingsEnum = state.termsEnum.postings(state.rld.getLiveDocs(), state.postingsEnum, PostingsEnum.FLAG_NONE);
state.postingsEnum = state.termsEnum.postings(state.rld.getLiveDocs(), state.postingsEnum, PostingsEnum.NONE);
assert state.postingsEnum != null;
@ -658,7 +658,7 @@ class BufferedUpdatesStream implements Accountable {
if (termsEnum.seekExact(term.bytes())) {
// we don't need term frequencies for this
postingsEnum = termsEnum.postings(segState.rld.getLiveDocs(), postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(segState.rld.getLiveDocs(), postingsEnum, PostingsEnum.NONE);
DocValuesFieldUpdates dvUpdates = dvUpdatesContainer.getUpdates(update.field, update.type);
if (dvUpdates == null) {

View File

@ -1050,7 +1050,7 @@ public class CheckIndex implements Closeable {
sumDocFreq += docFreq;
docs = termsEnum.postings(liveDocs, docs);
postings = termsEnum.postings(liveDocs, postings, PostingsEnum.FLAG_ALL);
postings = termsEnum.postings(liveDocs, postings, PostingsEnum.ALL);
if (hasFreqs == false) {
if (termsEnum.totalTermFreq() != -1) {
@ -1184,7 +1184,7 @@ public class CheckIndex implements Closeable {
totalTermFreq += docsNoDel.freq();
}
} else {
final PostingsEnum docsNoDel = termsEnum.postings(null, docs, PostingsEnum.FLAG_NONE);
final PostingsEnum docsNoDel = termsEnum.postings(null, docs, PostingsEnum.NONE);
docCount = 0;
totalTermFreq = -1;
while(docsNoDel.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
@ -1211,7 +1211,7 @@ public class CheckIndex implements Closeable {
if (hasPositions) {
for(int idx=0;idx<7;idx++) {
final int skipDocID = (int) (((idx+1)*(long) maxDoc)/8);
postings = termsEnum.postings(liveDocs, postings, PostingsEnum.FLAG_ALL);
postings = termsEnum.postings(liveDocs, postings, PostingsEnum.ALL);
final int docID = postings.advance(skipDocID);
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
@ -1275,7 +1275,7 @@ public class CheckIndex implements Closeable {
} else {
for(int idx=0;idx<7;idx++) {
final int skipDocID = (int) (((idx+1)*(long) maxDoc)/8);
docs = termsEnum.postings(liveDocs, docs, PostingsEnum.FLAG_NONE);
docs = termsEnum.postings(liveDocs, docs, PostingsEnum.NONE);
final int docID = docs.advance(skipDocID);
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
@ -1347,7 +1347,7 @@ public class CheckIndex implements Closeable {
}
int expectedDocFreq = termsEnum.docFreq();
PostingsEnum d = termsEnum.postings(null, null, PostingsEnum.FLAG_NONE);
PostingsEnum d = termsEnum.postings(null, null, PostingsEnum.NONE);
int docFreq = 0;
while (d.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
docFreq++;
@ -1388,7 +1388,7 @@ public class CheckIndex implements Closeable {
throw new RuntimeException("seek to existing term " + seekTerms[i] + " failed");
}
docs = termsEnum.postings(liveDocs, docs, PostingsEnum.FLAG_NONE);
docs = termsEnum.postings(liveDocs, docs, PostingsEnum.NONE);
if (docs == null) {
throw new RuntimeException("null DocsEnum from to existing term " + seekTerms[i]);
}
@ -1406,7 +1406,7 @@ public class CheckIndex implements Closeable {
}
totDocFreq += termsEnum.docFreq();
docs = termsEnum.postings(null, docs, PostingsEnum.FLAG_NONE);
docs = termsEnum.postings(null, docs, PostingsEnum.NONE);
if (docs == null) {
throw new RuntimeException("null DocsEnum from to existing term " + seekTerms[i]);
}
@ -1922,7 +1922,7 @@ public class CheckIndex implements Closeable {
while ((term = termsEnum.next()) != null) {
if (hasProx) {
postings = termsEnum.postings(null, postings, PostingsEnum.FLAG_ALL);
postings = termsEnum.postings(null, postings, PostingsEnum.ALL);
assert postings != null;
docs = null;
} else {
@ -1944,7 +1944,7 @@ public class CheckIndex implements Closeable {
if (!postingsTermsEnum.seekExact(term)) {
throw new RuntimeException("vector term=" + term + " field=" + field + " does not exist in postings; doc=" + j);
}
postingsPostings = postingsTermsEnum.postings(null, postingsPostings, PostingsEnum.FLAG_ALL);
postingsPostings = postingsTermsEnum.postings(null, postingsPostings, PostingsEnum.ALL);
if (postingsPostings == null) {
// Term vectors were indexed w/ pos but postings were not
postingsDocs = postingsTermsEnum.postings(null, postingsDocs);

View File

@ -1,70 +0,0 @@
package org.apache.lucene.index;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.util.BytesRef;
/**
* Convenience class returning empty values for positions, offsets and payloads
*/
public abstract class DocsEnum extends PostingsEnum {
/** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */
protected DocsEnum() {
super();
}
/**
* @return -1, indicating no positions are available
* @throws IOException if a low-level IO exception occurred
*/
@Override
public int nextPosition() throws IOException {
return -1;
}
/**
* @return -1, indicating no offsets are available
* @throws IOException if a low-level IO exception occurred
*/
@Override
public int startOffset() throws IOException {
return -1;
}
/**
* @return -1, indicating no offsets are available
* @throws IOException if a low-level IO exception occurred
*/
@Override
public int endOffset() throws IOException {
return -1;
}
/**
* @return null, indicating no payloads are available
* @throws IOException if a low-level IO exception occurred
*/
@Override
public BytesRef getPayload() throws IOException {
return null;
}
}

View File

@ -235,7 +235,7 @@ class FreqProxFields extends Fields {
throw new IllegalArgumentException("liveDocs must be null");
}
if ((flags & PostingsEnum.FLAG_POSITIONS) >= PostingsEnum.FLAG_POSITIONS) {
if ((flags & PostingsEnum.POSITIONS) >= PostingsEnum.POSITIONS) {
FreqProxPostingsEnum posEnum;
if (!terms.hasProx) {
@ -244,7 +244,7 @@ class FreqProxFields extends Fields {
throw new IllegalArgumentException("did not index positions");
}
if (!terms.hasOffsets && (flags & PostingsEnum.FLAG_OFFSETS) == PostingsEnum.FLAG_OFFSETS) {
if (!terms.hasOffsets && (flags & PostingsEnum.OFFSETS) == PostingsEnum.OFFSETS) {
// Caller wants offsets but we didn't index them;
// don't lie:
throw new IllegalArgumentException("did not index offsets");
@ -264,7 +264,7 @@ class FreqProxFields extends Fields {
FreqProxDocsEnum docsEnum;
if (!terms.hasFreq && (flags & PostingsEnum.FLAG_FREQS) != 0) {
if (!terms.hasFreq && (flags & PostingsEnum.FREQS) != 0) {
// Caller wants freqs but we didn't index them;
// don't lie:
throw new IllegalArgumentException("did not index freq");
@ -303,7 +303,7 @@ class FreqProxFields extends Fields {
}
}
private static class FreqProxDocsEnum extends DocsEnum {
private static class FreqProxDocsEnum extends PostingsEnum {
final FreqProxTermsWriterPerField terms;
final FreqProxPostingsArray postingsArray;
@ -349,6 +349,21 @@ class FreqProxFields extends Fields {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
@Override
public int nextDoc() throws IOException {
if (reader.eof()) {

View File

@ -209,7 +209,7 @@ public abstract class LeafReader extends IndexReader {
* This will return null if either the field or
* term does not exist.
* @see TermsEnum#postings(Bits, PostingsEnum) */
public final PostingsEnum termDocsEnum(Term term, int flags) throws IOException {
public final PostingsEnum postings(Term term, int flags) throws IOException {
assert term.field() != null;
assert term.bytes() != null;
final Terms terms = terms(term.field());
@ -223,9 +223,9 @@ public abstract class LeafReader extends IndexReader {
}
/** Returns {@link PostingsEnum} for the specified term
* with {@link PostingsEnum#FLAG_FREQS}. */
public final PostingsEnum termDocsEnum(Term term) throws IOException {
return termDocsEnum(term, PostingsEnum.FLAG_FREQS);
* with {@link PostingsEnum#FREQS}. */
public final PostingsEnum postings(Term term) throws IOException {
return postings(term, PostingsEnum.FREQS);
}
/** Returns {@link NumericDocValues} for this field, or

View File

@ -124,7 +124,7 @@ public final class MultiFields extends Fields {
* term. This will return null if the field or term does
* not exist. */
public static PostingsEnum getTermDocsEnum(IndexReader r, Bits liveDocs, String field, BytesRef term) throws IOException {
return getTermDocsEnum(r, liveDocs, field, term, PostingsEnum.FLAG_FREQS);
return getTermDocsEnum(r, liveDocs, field, term, PostingsEnum.FREQS);
}
/** Returns {@link PostingsEnum} for the specified field and
@ -151,7 +151,7 @@ public final class MultiFields extends Fields {
* term does not exist or positions were not indexed.
* @see #getTermPositionsEnum(IndexReader, Bits, String, BytesRef, int) */
public static PostingsEnum getTermPositionsEnum(IndexReader r, Bits liveDocs, String field, BytesRef term) throws IOException {
return getTermPositionsEnum(r, liveDocs, field, term, PostingsEnum.FLAG_OFFSETS | PostingsEnum.FLAG_PAYLOADS);
return getTermPositionsEnum(r, liveDocs, field, term, PostingsEnum.OFFSETS | PostingsEnum.PAYLOADS);
}
/** Returns {@link PostingsEnum} for the specified

View File

@ -33,29 +33,29 @@ public abstract class PostingsEnum extends DocIdSetIterator {
* Flag to pass to {@link TermsEnum#postings(Bits, PostingsEnum, int)} if you don't
* require per-document postings in the returned enum.
*/
public static final int FLAG_NONE = 0x0;
public static final int NONE = 0x0;
/** Flag to pass to {@link TermsEnum#postings(Bits, PostingsEnum, int)}
* if you require term frequencies in the returned enum. */
public static final int FLAG_FREQS = 0x1;
public static final int FREQS = 0x1;
/** Flag to pass to {@link TermsEnum#postings(Bits, PostingsEnum, int)}
* if you require term positions in the returned enum. */
public static final int FLAG_POSITIONS = 0x3;
public static final int POSITIONS = 0x3;
/** Flag to pass to {@link TermsEnum#postings(Bits, PostingsEnum, int)}
* if you require offsets in the returned enum. */
public static final int FLAG_OFFSETS = 0x7;
public static final int OFFSETS = 0x7;
/** Flag to pass to {@link TermsEnum#postings(Bits, PostingsEnum, int)}
* if you require payloads in the returned enum. */
public static final int FLAG_PAYLOADS = 0xB;
public static final int PAYLOADS = 0xB;
/**
* Flag to pass to {@link TermsEnum#postings(Bits, PostingsEnum, int)}
* to get positions, payloads and offsets in the returned enum
*/
public static final int FLAG_ALL = FLAG_POSITIONS | FLAG_PAYLOADS;
public static final int ALL = POSITIONS | PAYLOADS;
/**
* Returns true if the passed in flags require positions to be indexed
@ -63,7 +63,7 @@ public abstract class PostingsEnum extends DocIdSetIterator {
* @return true if the passed in flags require positions to be indexed
*/
public static boolean requiresPositions(int flags) {
return ((flags & FLAG_POSITIONS) >= FLAG_POSITIONS);
return ((flags & POSITIONS) >= POSITIONS);
}
private AttributeSource atts = null;
@ -80,7 +80,7 @@ public abstract class PostingsEnum extends DocIdSetIterator {
* {@link DocIdSetIterator#NO_MORE_DOCS}.
*
* <p>
* <b>NOTE:</b> if the {@link PostingsEnum} was obtain with {@link #FLAG_NONE},
* <b>NOTE:</b> if the {@link PostingsEnum} was obtain with {@link #NONE},
* the result of this method is undefined.
*/
public abstract int freq() throws IOException;

View File

@ -146,7 +146,7 @@ public abstract class TermsEnum implements BytesRefIterator {
* be returned
* @param reuse pass a prior PostingsEnum for possible reuse */
public final PostingsEnum postings(Bits liveDocs, PostingsEnum reuse) throws IOException {
return postings(liveDocs, reuse, PostingsEnum.FLAG_FREQS);
return postings(liveDocs, reuse, PostingsEnum.FREQS);
}
/** Get {@link PostingsEnum} for the current term, with
@ -159,7 +159,7 @@ public abstract class TermsEnum implements BytesRefIterator {
* be returned
* @param reuse pass a prior PostingsEnum for possible reuse
* @param flags specifies which optional per-document values
* you require; see {@link PostingsEnum#FLAG_FREQS}
* you require; see {@link PostingsEnum#FREQS}
* @see #postings(Bits, PostingsEnum, int) */
public abstract PostingsEnum postings(Bits liveDocs, PostingsEnum reuse, int flags) throws IOException;

View File

@ -92,9 +92,9 @@
* // get the document frequency
* System.out.println(termsEnum.docFreq());
* // enumerate through documents
* DocsEnum docs = termsEnum.docs(null, null);
* PostingsEnum docs = termsEnum.postings(null, null);
* // enumerate through documents and positions
* DocsAndPositionsEnum docsAndPositions = termsEnum.docsAndPositions(null, null);
* PostingsEnum docsAndPositions = termsEnum.postings(null, null, PostingsEnum.FLAG_POSITIONS);
* }
* </pre>
* <a name="documents"></a>

View File

@ -228,11 +228,11 @@ public class MultiPhraseQuery extends Query {
return null;
}
termsEnum.seekExact(term.bytes(), termState);
postingsEnum = termsEnum.postings(liveDocs, null, PostingsEnum.FLAG_POSITIONS);
postingsEnum = termsEnum.postings(liveDocs, null, PostingsEnum.POSITIONS);
if (postingsEnum == null) {
// term does exist, but has no positions
assert termsEnum.postings(liveDocs, null, PostingsEnum.FLAG_NONE) != null: "termstate found but no term exists in reader";
assert termsEnum.postings(liveDocs, null, PostingsEnum.NONE) != null: "termstate found but no term exists in reader";
throw new IllegalStateException("field \"" + term.field() + "\" was indexed without position data; cannot run PhraseQuery (term=" + term.text() + ")");
}
@ -482,7 +482,7 @@ class UnionPostingsEnum extends PostingsEnum {
continue;
}
termsEnum.seekExact(term.bytes(), termState);
PostingsEnum postings = termsEnum.postings(liveDocs, null, PostingsEnum.FLAG_POSITIONS);
PostingsEnum postings = termsEnum.postings(liveDocs, null, PostingsEnum.POSITIONS);
if (postings == null) {
// term does exist, but has no positions
throw new IllegalStateException("field \"" + term.field() + "\" was indexed without position data; cannot run PhraseQuery (term=" + term.text() + ")");

View File

@ -94,7 +94,7 @@ public class MultiTermQueryWrapperFilter<Q extends MultiTermQuery> extends Filte
BitDocIdSet.Builder builder = new BitDocIdSet.Builder(context.reader().maxDoc());
PostingsEnum docs = null;
while (termsEnum.next() != null) {
docs = termsEnum.postings(acceptDocs, docs, PostingsEnum.FLAG_NONE);
docs = termsEnum.postings(acceptDocs, docs, PostingsEnum.NONE);
builder.or(docs);
}
return builder.build();

View File

@ -266,7 +266,7 @@ public class PhraseQuery extends Query {
return null;
}
te.seekExact(t.bytes(), state);
PostingsEnum postingsEnum = te.postings(liveDocs, null, PostingsEnum.FLAG_POSITIONS);
PostingsEnum postingsEnum = te.postings(liveDocs, null, PostingsEnum.POSITIONS);
// PhraseQuery on a field that did not index
// positions.

View File

@ -83,7 +83,7 @@ public class TermQuery extends Query {
if (termsEnum == null) {
return null;
}
PostingsEnum docs = termsEnum.postings(acceptDocs, null, needsScores ? PostingsEnum.FLAG_FREQS : PostingsEnum.FLAG_NONE);
PostingsEnum docs = termsEnum.postings(acceptDocs, null, needsScores ? PostingsEnum.FREQS : PostingsEnum.NONE);
assert docs != null;
return new TermScorer(this, docs, similarity.simScorer(stats, context));
}

View File

@ -449,7 +449,7 @@
* {@link org.apache.lucene.search.Scorer#freq freq()} &mdash; Returns the number of matches
* for the current document. This value can be determined in any appropriate way for an application. For instance, the
* {@link org.apache.lucene.search.TermScorer TermScorer} simply defers to the term frequency from the inverted index:
* {@link org.apache.lucene.index.DocsEnum#freq DocsEnum.freq()}.
* {@link org.apache.lucene.index.PostingsEnum#freq PostingsEnum.freq()}.
* </li>
* <li>
* {@link org.apache.lucene.search.Scorer#advance advance()} &mdash; Skip ahead in

View File

@ -114,7 +114,7 @@ public class SpanTermQuery extends SpanQuery {
final TermsEnum termsEnum = context.reader().terms(term.field()).iterator(null);
termsEnum.seekExact(term.bytes(), state);
final PostingsEnum postings = termsEnum.postings(acceptDocs, null, PostingsEnum.FLAG_PAYLOADS);
final PostingsEnum postings = termsEnum.postings(acceptDocs, null, PostingsEnum.PAYLOADS);
if (postings != null) {
return new TermSpans(postings, term);

View File

@ -321,7 +321,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
final Terms terms = fields.terms("f");
final TermsEnum te = terms.iterator(null);
assertEquals(new BytesRef("a"), te.next());
final PostingsEnum dpe = te.postings(null, null, PostingsEnum.FLAG_ALL);
final PostingsEnum dpe = te.postings(null, null, PostingsEnum.ALL);
assertEquals(0, dpe.nextDoc());
assertEquals(2, dpe.freq());
assertEquals(0, dpe.nextPosition());

View File

@ -293,55 +293,55 @@ public class TestBlockPostingsFormat3 extends LuceneTestCase {
assertTermStats(leftTermsEnum, rightTermsEnum);
if (deep) {
// with payloads + off
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.FLAG_ALL),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.FLAG_ALL));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.FLAG_ALL),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.FLAG_ALL));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.ALL),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.ALL));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.ALL),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.ALL));
assertPositionsSkipping(leftTermsEnum.docFreq(),
leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.FLAG_ALL),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.FLAG_ALL));
leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.ALL),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.ALL));
assertPositionsSkipping(leftTermsEnum.docFreq(),
leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.FLAG_ALL),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.FLAG_ALL));
leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.ALL),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.ALL));
// with payloads only
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.FLAG_PAYLOADS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.FLAG_PAYLOADS));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.FLAG_PAYLOADS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.FLAG_PAYLOADS));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.PAYLOADS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.PAYLOADS));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.PAYLOADS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.PAYLOADS));
assertPositionsSkipping(leftTermsEnum.docFreq(),
leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.FLAG_PAYLOADS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.FLAG_PAYLOADS));
leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.PAYLOADS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.PAYLOADS));
assertPositionsSkipping(leftTermsEnum.docFreq(),
leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.FLAG_PAYLOADS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.FLAG_PAYLOADS));
leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.PAYLOADS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.PAYLOADS));
// with offsets only
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.FLAG_OFFSETS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.FLAG_OFFSETS));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.FLAG_OFFSETS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.FLAG_OFFSETS));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.OFFSETS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.OFFSETS));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.OFFSETS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.OFFSETS));
assertPositionsSkipping(leftTermsEnum.docFreq(),
leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.FLAG_OFFSETS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.FLAG_OFFSETS));
leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.OFFSETS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.OFFSETS));
assertPositionsSkipping(leftTermsEnum.docFreq(),
leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.FLAG_OFFSETS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.FLAG_OFFSETS));
leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.OFFSETS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.OFFSETS));
// with positions only
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.FLAG_POSITIONS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.FLAG_POSITIONS));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.FLAG_POSITIONS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.FLAG_POSITIONS));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.POSITIONS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.POSITIONS));
assertDocsAndPositionsEnum(leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.POSITIONS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.POSITIONS));
assertPositionsSkipping(leftTermsEnum.docFreq(),
leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.FLAG_POSITIONS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.FLAG_POSITIONS));
leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.POSITIONS),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.POSITIONS));
assertPositionsSkipping(leftTermsEnum.docFreq(),
leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.FLAG_POSITIONS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.FLAG_POSITIONS));
leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.POSITIONS),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.POSITIONS));
// with freqs:
assertDocsEnum(leftDocs = leftTermsEnum.postings(null, leftDocs),
@ -350,10 +350,10 @@ public class TestBlockPostingsFormat3 extends LuceneTestCase {
rightDocs = rightTermsEnum.postings(randomBits, rightDocs));
// w/o freqs:
assertDocsEnum(leftDocs = leftTermsEnum.postings(null, leftDocs, PostingsEnum.FLAG_NONE),
rightDocs = rightTermsEnum.postings(null, rightDocs, PostingsEnum.FLAG_NONE));
assertDocsEnum(leftDocs = leftTermsEnum.postings(randomBits, leftDocs, PostingsEnum.FLAG_NONE),
rightDocs = rightTermsEnum.postings(randomBits, rightDocs, PostingsEnum.FLAG_NONE));
assertDocsEnum(leftDocs = leftTermsEnum.postings(null, leftDocs, PostingsEnum.NONE),
rightDocs = rightTermsEnum.postings(null, rightDocs, PostingsEnum.NONE));
assertDocsEnum(leftDocs = leftTermsEnum.postings(randomBits, leftDocs, PostingsEnum.NONE),
rightDocs = rightTermsEnum.postings(randomBits, rightDocs, PostingsEnum.NONE));
// with freqs:
assertDocsSkipping(leftTermsEnum.docFreq(),
@ -365,11 +365,11 @@ public class TestBlockPostingsFormat3 extends LuceneTestCase {
// w/o freqs:
assertDocsSkipping(leftTermsEnum.docFreq(),
leftDocs = leftTermsEnum.postings(null, leftDocs, PostingsEnum.FLAG_NONE),
rightDocs = rightTermsEnum.postings(null, rightDocs, PostingsEnum.FLAG_NONE));
leftDocs = leftTermsEnum.postings(null, leftDocs, PostingsEnum.NONE),
rightDocs = rightTermsEnum.postings(null, rightDocs, PostingsEnum.NONE));
assertDocsSkipping(leftTermsEnum.docFreq(),
leftDocs = leftTermsEnum.postings(randomBits, leftDocs, PostingsEnum.FLAG_NONE),
rightDocs = rightTermsEnum.postings(randomBits, rightDocs, PostingsEnum.FLAG_NONE));
leftDocs = leftTermsEnum.postings(randomBits, leftDocs, PostingsEnum.NONE),
rightDocs = rightTermsEnum.postings(randomBits, rightDocs, PostingsEnum.NONE));
}
}
assertNull(rightTermsEnum.next());

View File

@ -538,7 +538,7 @@ public class TestAddIndexes extends LuceneTestCase {
private void verifyTermDocs(Directory dir, Term term, int numDocs)
throws IOException {
IndexReader reader = DirectoryReader.open(dir);
PostingsEnum postingsEnum = TestUtil.docs(random(), reader, term.field, term.bytes, null, null, PostingsEnum.FLAG_NONE);
PostingsEnum postingsEnum = TestUtil.docs(random(), reader, term.field, term.bytes, null, null, PostingsEnum.NONE);
int count = 0;
while (postingsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS)
count++;

View File

@ -244,7 +244,7 @@ public class TestCodecs extends LuceneTestCase {
// make sure it properly fully resets (rewinds) its
// internal state:
for(int iter=0;iter<2;iter++) {
postingsEnum = TestUtil.docs(random(), termsEnum, null, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = TestUtil.docs(random(), termsEnum, null, postingsEnum, PostingsEnum.NONE);
assertEquals(terms[i].docs[0], postingsEnum.nextDoc());
assertEquals(DocIdSetIterator.NO_MORE_DOCS, postingsEnum.nextDoc());
}
@ -392,9 +392,9 @@ public class TestCodecs extends LuceneTestCase {
assertEquals(status, TermsEnum.SeekStatus.FOUND);
assertEquals(term.docs.length, termsEnum.docFreq());
if (field.omitTF) {
this.verifyDocs(term.docs, term.positions, TestUtil.docs(random(), termsEnum, null, null, PostingsEnum.FLAG_NONE), false);
this.verifyDocs(term.docs, term.positions, TestUtil.docs(random(), termsEnum, null, null, PostingsEnum.NONE), false);
} else {
this.verifyDocs(term.docs, term.positions, termsEnum.postings(null, null, PostingsEnum.FLAG_ALL), true);
this.verifyDocs(term.docs, term.positions, termsEnum.postings(null, null, PostingsEnum.ALL), true);
}
// Test random seek by ord:
@ -412,9 +412,9 @@ public class TestCodecs extends LuceneTestCase {
assertTrue(termsEnum.term().bytesEquals(new BytesRef(term.text2)));
assertEquals(term.docs.length, termsEnum.docFreq());
if (field.omitTF) {
this.verifyDocs(term.docs, term.positions, TestUtil.docs(random(), termsEnum, null, null, PostingsEnum.FLAG_NONE), false);
this.verifyDocs(term.docs, term.positions, TestUtil.docs(random(), termsEnum, null, null, PostingsEnum.NONE), false);
} else {
this.verifyDocs(term.docs, term.positions, termsEnum.postings(null, null, PostingsEnum.FLAG_ALL), true);
this.verifyDocs(term.docs, term.positions, termsEnum.postings(null, null, PostingsEnum.ALL), true);
}
}
@ -465,15 +465,15 @@ public class TestCodecs extends LuceneTestCase {
final PostingsEnum docs;
final PostingsEnum postings;
if (!field.omitTF) {
postings = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
postings = termsEnum.postings(null, null, PostingsEnum.ALL);
if (postings != null) {
docs = postings;
} else {
docs = TestUtil.docs(random(), termsEnum, null, null, PostingsEnum.FLAG_FREQS);
docs = TestUtil.docs(random(), termsEnum, null, null, PostingsEnum.FREQS);
}
} else {
postings = null;
docs = TestUtil.docs(random(), termsEnum, null, null, PostingsEnum.FLAG_NONE);
docs = TestUtil.docs(random(), termsEnum, null, null, PostingsEnum.NONE);
}
assertNotNull(docs);
int upto2 = -1;
@ -804,7 +804,7 @@ public class TestCodecs extends LuceneTestCase {
Term term = new Term("f", new BytesRef("doc"));
DirectoryReader reader = DirectoryReader.open(dir);
for (LeafReaderContext ctx : reader.leaves()) {
PostingsEnum de = ctx.reader().termDocsEnum(term);
PostingsEnum de = ctx.reader().postings(term);
while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
assertEquals("wrong freq for doc " + de.docID(), 1, de.freq());
}

View File

@ -633,8 +633,8 @@ public void testFilesOpenClose() throws IOException {
while(enum1.next() != null) {
assertEquals("Different terms", enum1.term(), enum2.next());
PostingsEnum tp1 = enum1.postings(liveDocs, null, PostingsEnum.FLAG_ALL);
PostingsEnum tp2 = enum2.postings(liveDocs, null, PostingsEnum.FLAG_ALL);
PostingsEnum tp1 = enum1.postings(liveDocs, null, PostingsEnum.ALL);
PostingsEnum tp2 = enum2.postings(liveDocs, null, PostingsEnum.ALL);
while(tp1.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
assertTrue(tp2.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);

View File

@ -261,7 +261,7 @@ public class TestDoc extends LuceneTestCase {
out.print(" term=" + field + ":" + tis.term());
out.println(" DF=" + tis.docFreq());
PostingsEnum positions = tis.postings(reader.getLiveDocs(), null, PostingsEnum.FLAG_POSITIONS);
PostingsEnum positions = tis.postings(reader.getLiveDocs(), null, PostingsEnum.POSITIONS);
while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
out.print(" doc=" + positions.docID());

View File

@ -67,7 +67,7 @@ public class TestDocCount extends LuceneTestCase {
FixedBitSet visited = new FixedBitSet(ir.maxDoc());
TermsEnum te = terms.iterator(null);
while (te.next() != null) {
PostingsEnum de = TestUtil.docs(random(), te, null, null, PostingsEnum.FLAG_NONE);
PostingsEnum de = TestUtil.docs(random(), te, null, null, PostingsEnum.NONE);
while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
visited.set(de.docID());
}

View File

@ -96,7 +96,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
if (terms != null) {
TermsEnum te = terms.iterator(null);
if (te.seekExact(bytes)) {
return te.postings(liveDocs, null, PostingsEnum.FLAG_ALL);
return te.postings(liveDocs, null, PostingsEnum.ALL);
}
}
return null;
@ -226,7 +226,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
IndexReaderContext topReaderContext = reader.getContext();
for (LeafReaderContext context : topReaderContext.leaves()) {
int maxDoc = context.reader().maxDoc();
PostingsEnum postingsEnum = TestUtil.docs(random(), context.reader(), fieldName, bytes, null, null, PostingsEnum.FLAG_FREQS);
PostingsEnum postingsEnum = TestUtil.docs(random(), context.reader(), fieldName, bytes, null, null, PostingsEnum.FREQS);
if (findNext(freqInDoc, context.docBase, context.docBase + maxDoc) == Integer.MAX_VALUE) {
assertNull(postingsEnum);
continue;
@ -336,7 +336,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
writer.addDocument(doc);
DirectoryReader reader = writer.getReader();
LeafReader r = getOnlySegmentReader(reader);
PostingsEnum disi = TestUtil.docs(random(), r, "foo", new BytesRef("bar"), null, null, PostingsEnum.FLAG_NONE);
PostingsEnum disi = TestUtil.docs(random(), r, "foo", new BytesRef("bar"), null, null, PostingsEnum.NONE);
int docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -344,7 +344,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
// now reuse and check again
TermsEnum te = r.terms("foo").iterator(null);
assertTrue(te.seekExact(new BytesRef("bar")));
disi = TestUtil.docs(random(), te, null, disi, PostingsEnum.FLAG_NONE);
disi = TestUtil.docs(random(), te, null, disi, PostingsEnum.NONE);
docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -361,7 +361,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
writer.addDocument(doc);
DirectoryReader reader = writer.getReader();
LeafReader r = getOnlySegmentReader(reader);
PostingsEnum disi = r.termDocsEnum(new Term("foo", "bar"), PostingsEnum.FLAG_ALL);
PostingsEnum disi = r.postings(new Term("foo", "bar"), PostingsEnum.ALL);
int docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -369,7 +369,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
// now reuse and check again
TermsEnum te = r.terms("foo").iterator(null);
assertTrue(te.seekExact(new BytesRef("bar")));
disi = te.postings(null, disi, PostingsEnum.FLAG_ALL);
disi = te.postings(null, disi, PostingsEnum.ALL);
docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);

View File

@ -239,18 +239,18 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.close();
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
PostingsEnum termPositions = reader.termDocsEnum(new Term("preanalyzed", "term1"), PostingsEnum.FLAG_ALL);
PostingsEnum termPositions = reader.postings(new Term("preanalyzed", "term1"), PostingsEnum.ALL);
assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(1, termPositions.freq());
assertEquals(0, termPositions.nextPosition());
termPositions = reader.termDocsEnum(new Term("preanalyzed", "term2"), PostingsEnum.FLAG_ALL);
termPositions = reader.postings(new Term("preanalyzed", "term2"), PostingsEnum.ALL);
assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(2, termPositions.freq());
assertEquals(1, termPositions.nextPosition());
assertEquals(3, termPositions.nextPosition());
termPositions = reader.termDocsEnum(new Term("preanalyzed", "term3"), PostingsEnum.FLAG_ALL);
termPositions = reader.postings(new Term("preanalyzed", "term3"), PostingsEnum.ALL);
assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(1, termPositions.freq());
assertEquals(2, termPositions.nextPosition());

View File

@ -151,7 +151,7 @@ public class TestFilterLeafReader extends LuceneTestCase {
assertEquals(TermsEnum.SeekStatus.FOUND, terms.seekCeil(new BytesRef("one")));
PostingsEnum positions = terms.postings(MultiFields.getLiveDocs(reader), null, PostingsEnum.FLAG_ALL);
PostingsEnum positions = terms.postings(MultiFields.getLiveDocs(reader), null, PostingsEnum.ALL);
while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
assertTrue((positions.docID() % 2) == 1);
}

View File

@ -507,7 +507,7 @@ public class TestIndexWriter extends LuceneTestCase {
new BytesRef("a"),
MultiFields.getLiveDocs(reader),
null,
PostingsEnum.FLAG_FREQS);
PostingsEnum.FREQS);
td.nextDoc();
assertEquals(128*1024, td.freq());
reader.close();
@ -833,14 +833,14 @@ public class TestIndexWriter extends LuceneTestCase {
Terms tpv = r.getTermVectors(0).terms("field");
TermsEnum termsEnum = tpv.iterator(null);
assertNotNull(termsEnum.next());
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.ALL);
assertNotNull(dpEnum);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(1, dpEnum.freq());
assertEquals(100, dpEnum.nextPosition());
assertNotNull(termsEnum.next());
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
assertNotNull(dpEnum);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(1, dpEnum.freq());
@ -1239,12 +1239,12 @@ public class TestIndexWriter extends LuceneTestCase {
// test that the terms were indexed.
assertTrue(TestUtil.docs(random(), ir, "binary", new BytesRef("doc1field1"), null, null, PostingsEnum.FLAG_NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "binary", new BytesRef("doc2field1"), null, null, PostingsEnum.FLAG_NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "binary", new BytesRef("doc3field1"), null, null, PostingsEnum.FLAG_NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "string", new BytesRef("doc1field2"), null, null, PostingsEnum.FLAG_NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "string", new BytesRef("doc2field2"), null, null, PostingsEnum.FLAG_NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "string", new BytesRef("doc3field2"), null, null, PostingsEnum.FLAG_NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "binary", new BytesRef("doc1field1"), null, null, PostingsEnum.NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "binary", new BytesRef("doc2field1"), null, null, PostingsEnum.NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "binary", new BytesRef("doc3field1"), null, null, PostingsEnum.NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "string", new BytesRef("doc1field2"), null, null, PostingsEnum.NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "string", new BytesRef("doc2field2"), null, null, PostingsEnum.NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(TestUtil.docs(random(), ir, "string", new BytesRef("doc3field2"), null, null, PostingsEnum.NONE).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
ir.close();
dir.close();

View File

@ -332,14 +332,14 @@ public class TestIndexableField extends LuceneTestCase {
TermsEnum termsEnum = tfv.iterator(null);
assertEquals(new BytesRef(""+counter), termsEnum.next());
assertEquals(1, termsEnum.totalTermFreq());
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.ALL);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(1, dpEnum.freq());
assertEquals(1, dpEnum.nextPosition());
assertEquals(new BytesRef("text"), termsEnum.next());
assertEquals(1, termsEnum.totalTermFreq());
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(1, dpEnum.freq());
assertEquals(0, dpEnum.nextPosition());

View File

@ -374,10 +374,10 @@ public class TestLongPostings extends LuceneTestCase {
final PostingsEnum postings;
if (options == IndexOptions.DOCS) {
docs = TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, PostingsEnum.FLAG_NONE);
docs = TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, PostingsEnum.NONE);
postings = null;
} else {
docs = postings = TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, PostingsEnum.FLAG_FREQS);
docs = postings = TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, PostingsEnum.FREQS);
assert postings != null;
}
assert docs != null;

View File

@ -135,7 +135,7 @@ public class TestMultiFields extends LuceneTestCase {
System.out.println("TEST: seek term="+ UnicodeUtil.toHexString(term.utf8ToString()) + " " + term);
}
PostingsEnum postingsEnum = TestUtil.docs(random(), reader, "field", term, liveDocs, null, PostingsEnum.FLAG_NONE);
PostingsEnum postingsEnum = TestUtil.docs(random(), reader, "field", term, liveDocs, null, PostingsEnum.NONE);
assertNotNull(postingsEnum);
for(int docID : docs.get(term)) {
@ -176,8 +176,8 @@ public class TestMultiFields extends LuceneTestCase {
w.addDocument(d);
IndexReader r = w.getReader();
w.close();
PostingsEnum d1 = TestUtil.docs(random(), r, "f", new BytesRef("j"), null, null, PostingsEnum.FLAG_NONE);
PostingsEnum d2 = TestUtil.docs(random(), r, "f", new BytesRef("j"), null, null, PostingsEnum.FLAG_NONE);
PostingsEnum d1 = TestUtil.docs(random(), r, "f", new BytesRef("j"), null, null, PostingsEnum.NONE);
PostingsEnum d2 = TestUtil.docs(random(), r, "f", new BytesRef("j"), null, null, PostingsEnum.NONE);
assertEquals(0, d1.nextDoc());
assertEquals(0, d2.nextDoc());
r.close();

View File

@ -84,7 +84,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
for (int i = 0; i < 2; i++) {
counter = 0;
PostingsEnum tp = reader.termDocsEnum(term, PostingsEnum.FLAG_ALL);
PostingsEnum tp = reader.postings(term, PostingsEnum.ALL);
checkSkipTo(tp, 14, 185); // no skips
checkSkipTo(tp, 17, 190); // one skip on level 0
checkSkipTo(tp, 287, 200); // one skip on level 1, two on level 0

View File

@ -53,7 +53,7 @@ public class TestOmitPositions extends LuceneTestCase {
assertNull(MultiFields.getTermPositionsEnum(reader, null, "foo", new BytesRef("test")));
PostingsEnum de = TestUtil.docs(random(), reader, "foo", new BytesRef("test"), null, null, PostingsEnum.FLAG_FREQS);
PostingsEnum de = TestUtil.docs(random(), reader, "foo", new BytesRef("test"), null, null, PostingsEnum.FREQS);
while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
assertEquals(2, de.freq());
}

View File

@ -80,7 +80,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
BytesRef b = te.next();
assertNotNull(b);
assertEquals(t, b.utf8ToString());
PostingsEnum td = TestUtil.docs(random(), te, liveDocs, null, PostingsEnum.FLAG_NONE);
PostingsEnum td = TestUtil.docs(random(), te, liveDocs, null, PostingsEnum.NONE);
assertTrue(td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(0, td.docID());
assertEquals(td.nextDoc(), DocIdSetIterator.NO_MORE_DOCS);

View File

@ -489,7 +489,7 @@ public class TestPayloads extends LuceneTestCase {
PostingsEnum tp = null;
while (terms.next() != null) {
String termText = terms.term().utf8ToString();
tp = terms.postings(liveDocs, tp, PostingsEnum.FLAG_PAYLOADS);
tp = terms.postings(liveDocs, tp, PostingsEnum.PAYLOADS);
while(tp.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
int freq = tp.freq();
for (int i = 0; i < freq; i++) {
@ -609,7 +609,7 @@ public class TestPayloads extends LuceneTestCase {
writer.addDocument(doc);
DirectoryReader reader = writer.getReader();
LeafReader sr = SlowCompositeReaderWrapper.wrap(reader);
PostingsEnum de = sr.termDocsEnum(new Term("field", "withPayload"), PostingsEnum.FLAG_POSITIONS);
PostingsEnum de = sr.postings(new Term("field", "withPayload"), PostingsEnum.POSITIONS);
de.nextDoc();
de.nextPosition();
assertEquals(new BytesRef("test"), de.getPayload());
@ -643,7 +643,7 @@ public class TestPayloads extends LuceneTestCase {
writer.addDocument(doc);
DirectoryReader reader = writer.getReader();
SegmentReader sr = getOnlySegmentReader(reader);
PostingsEnum de = sr.termDocsEnum(new Term("field", "withPayload"), PostingsEnum.FLAG_POSITIONS);
PostingsEnum de = sr.postings(new Term("field", "withPayload"), PostingsEnum.POSITIONS);
de.nextDoc();
de.nextPosition();
assertEquals(new BytesRef("test"), de.getPayload());

View File

@ -72,7 +72,7 @@ public class TestPayloadsOnVectors extends LuceneTestCase {
assert terms != null;
TermsEnum termsEnum = terms.iterator(null);
assertTrue(termsEnum.seekExact(new BytesRef("withPayload")));
PostingsEnum de = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum de = termsEnum.postings(null, null, PostingsEnum.ALL);
assertEquals(0, de.nextDoc());
assertEquals(0, de.nextPosition());
assertEquals(new BytesRef("test"), de.getPayload());
@ -114,7 +114,7 @@ public class TestPayloadsOnVectors extends LuceneTestCase {
assert terms != null;
TermsEnum termsEnum = terms.iterator(null);
assertTrue(termsEnum.seekExact(new BytesRef("withPayload")));
PostingsEnum de = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum de = termsEnum.postings(null, null, PostingsEnum.ALL);
assertEquals(0, de.nextDoc());
assertEquals(3, de.nextPosition());
assertEquals(new BytesRef("test"), de.getPayload());

View File

@ -226,7 +226,7 @@ public class TestPerSegmentDeletes extends LuceneTestCase {
Terms cterms = fields.terms(term.field);
TermsEnum ctermsEnum = cterms.iterator(null);
if (ctermsEnum.seekExact(new BytesRef(term.text()))) {
PostingsEnum postingsEnum = TestUtil.docs(random(), ctermsEnum, bits, null, PostingsEnum.FLAG_NONE);
PostingsEnum postingsEnum = TestUtil.docs(random(), ctermsEnum, bits, null, PostingsEnum.NONE);
return toArray(postingsEnum);
}
return null;

View File

@ -313,7 +313,7 @@ public class TestPostingsOffsets extends LuceneTestCase {
}
// explicitly exclude offsets here
docsAndPositions = termsEnum.postings(null, docsAndPositions, PostingsEnum.FLAG_ALL);
docsAndPositions = termsEnum.postings(null, docsAndPositions, PostingsEnum.ALL);
assertNotNull(docsAndPositions);
//System.out.println(" doc/freq/pos");
while((doc = docsAndPositions.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
@ -328,7 +328,7 @@ public class TestPostingsOffsets extends LuceneTestCase {
}
}
docsAndPositionsAndOffsets = termsEnum.postings(null, docsAndPositions, PostingsEnum.FLAG_ALL);
docsAndPositionsAndOffsets = termsEnum.postings(null, docsAndPositions, PostingsEnum.ALL);
assertNotNull(docsAndPositionsAndOffsets);
//System.out.println(" doc/freq/pos/offs");
while((doc = docsAndPositionsAndOffsets.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {

View File

@ -58,7 +58,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
TermsEnum terms = reader.fields().terms(DocHelper.TEXT_FIELD_2_KEY).iterator(null);
terms.seekCeil(new BytesRef("field"));
PostingsEnum termDocs = TestUtil.docs(random(), terms, reader.getLiveDocs(), null, PostingsEnum.FLAG_FREQS);
PostingsEnum termDocs = TestUtil.docs(random(), terms, reader.getLiveDocs(), null, PostingsEnum.FREQS);
if (termDocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
int docId = termDocs.docID();
assertTrue(docId == 0);
@ -126,7 +126,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef(ta.text()),
MultiFields.getLiveDocs(reader),
null,
PostingsEnum.FLAG_FREQS);
PostingsEnum.FREQS);
// without optimization (assumption skipInterval == 16)
@ -169,7 +169,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef(tb.text()),
MultiFields.getLiveDocs(reader),
null,
PostingsEnum.FLAG_FREQS);
PostingsEnum.FREQS);
assertTrue(tdocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(10, tdocs.docID());
@ -193,7 +193,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef(tb.text()),
MultiFields.getLiveDocs(reader),
null,
PostingsEnum.FLAG_FREQS);
PostingsEnum.FREQS);
assertTrue(tdocs.advance(5) != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(10, tdocs.docID());
@ -213,7 +213,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
new BytesRef(tc.text()),
MultiFields.getLiveDocs(reader),
null,
PostingsEnum.FLAG_FREQS);
PostingsEnum.FREQS);
assertTrue(tdocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(26, tdocs.docID());

View File

@ -82,11 +82,11 @@ public class TestStressAdvance extends LuceneTestCase {
System.out.println("\nTEST: iter=" + iter + " iter2=" + iter2);
}
assertEquals(TermsEnum.SeekStatus.FOUND, te.seekCeil(new BytesRef("a")));
de = TestUtil.docs(random(), te, null, de, PostingsEnum.FLAG_NONE);
de = TestUtil.docs(random(), te, null, de, PostingsEnum.NONE);
testOne(de, aDocIDs);
assertEquals(TermsEnum.SeekStatus.FOUND, te.seekCeil(new BytesRef("b")));
de = TestUtil.docs(random(), te, null, de, PostingsEnum.FLAG_NONE);
de = TestUtil.docs(random(), te, null, de, PostingsEnum.NONE);
testOne(de, bDocIDs);
}

View File

@ -327,7 +327,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
Bits liveDocs = MultiFields.getLiveDocs(r1);
PostingsEnum docs = null;
while(termsEnum.next() != null) {
docs = TestUtil.docs(random(), termsEnum, liveDocs, docs, PostingsEnum.FLAG_NONE);
docs = TestUtil.docs(random(), termsEnum, liveDocs, docs, PostingsEnum.NONE);
while(docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
fail("r1 is not empty but r2 is");
}
@ -346,9 +346,9 @@ public class TestStressIndexing2 extends LuceneTestCase {
break;
}
termDocs1 = TestUtil.docs(random(), termsEnum, liveDocs1, termDocs1, PostingsEnum.FLAG_NONE);
termDocs1 = TestUtil.docs(random(), termsEnum, liveDocs1, termDocs1, PostingsEnum.NONE);
if (termsEnum2.seekExact(term)) {
termDocs2 = TestUtil.docs(random(), termsEnum2, liveDocs2, termDocs2, PostingsEnum.FLAG_NONE);
termDocs2 = TestUtil.docs(random(), termsEnum2, liveDocs2, termDocs2, PostingsEnum.NONE);
} else {
termDocs2 = null;
}
@ -396,7 +396,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
BytesRef term2;
while((term2 = termsEnum3.next()) != null) {
System.out.println(" " + term2.utf8ToString() + ": freq=" + termsEnum3.totalTermFreq());
dpEnum = termsEnum3.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum3.postings(null, dpEnum, PostingsEnum.ALL);
if (dpEnum != null) {
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
final int freq = dpEnum.freq();
@ -405,7 +405,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
System.out.println(" pos=" + dpEnum.nextPosition());
}
} else {
dEnum = TestUtil.docs(random(), termsEnum3, null, dEnum, PostingsEnum.FLAG_FREQS);
dEnum = TestUtil.docs(random(), termsEnum3, null, dEnum, PostingsEnum.FREQS);
assertNotNull(dEnum);
assertTrue(dEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
final int freq = dEnum.freq();
@ -428,7 +428,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
BytesRef term2;
while((term2 = termsEnum3.next()) != null) {
System.out.println(" " + term2.utf8ToString() + ": freq=" + termsEnum3.totalTermFreq());
dpEnum = termsEnum3.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum3.postings(null, dpEnum, PostingsEnum.ALL);
if (dpEnum != null) {
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
final int freq = dpEnum.freq();
@ -437,7 +437,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
System.out.println(" pos=" + dpEnum.nextPosition());
}
} else {
dEnum = TestUtil.docs(random(), termsEnum3, null, dEnum, PostingsEnum.FLAG_FREQS);
dEnum = TestUtil.docs(random(), termsEnum3, null, dEnum, PostingsEnum.FREQS);
assertNotNull(dEnum);
assertTrue(dEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
final int freq = dEnum.freq();
@ -496,7 +496,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
}
//System.out.println("TEST: term1=" + term1);
docs1 = TestUtil.docs(random(), termsEnum1, liveDocs1, docs1, PostingsEnum.FLAG_FREQS);
docs1 = TestUtil.docs(random(), termsEnum1, liveDocs1, docs1, PostingsEnum.FREQS);
while (docs1.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
int d = docs1.docID();
int f = docs1.freq();
@ -529,7 +529,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
}
//System.out.println("TEST: term1=" + term1);
docs2 = TestUtil.docs(random(), termsEnum2, liveDocs2, docs2, PostingsEnum.FLAG_FREQS);
docs2 = TestUtil.docs(random(), termsEnum2, liveDocs2, docs2, PostingsEnum.FREQS);
while (docs2.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
int d = r2r1[docs2.docID()];
int f = docs2.freq();
@ -618,8 +618,8 @@ public class TestStressIndexing2 extends LuceneTestCase {
assertEquals(termsEnum1.totalTermFreq(),
termsEnum2.totalTermFreq());
dpEnum1 = termsEnum1.postings(null, dpEnum1, PostingsEnum.FLAG_ALL);
dpEnum2 = termsEnum2.postings(null, dpEnum2, PostingsEnum.FLAG_ALL);
dpEnum1 = termsEnum1.postings(null, dpEnum1, PostingsEnum.ALL);
dpEnum2 = termsEnum2.postings(null, dpEnum2, PostingsEnum.ALL);
if (dpEnum1 != null) {
assertNotNull(dpEnum2);
int docID1 = dpEnum1.nextDoc();
@ -655,8 +655,8 @@ public class TestStressIndexing2 extends LuceneTestCase {
assertEquals(DocIdSetIterator.NO_MORE_DOCS, dpEnum1.nextDoc());
assertEquals(DocIdSetIterator.NO_MORE_DOCS, dpEnum2.nextDoc());
} else {
dEnum1 = TestUtil.docs(random(), termsEnum1, null, dEnum1, PostingsEnum.FLAG_FREQS);
dEnum2 = TestUtil.docs(random(), termsEnum2, null, dEnum2, PostingsEnum.FLAG_FREQS);
dEnum1 = TestUtil.docs(random(), termsEnum1, null, dEnum1, PostingsEnum.FREQS);
dEnum2 = TestUtil.docs(random(), termsEnum2, null, dEnum2, PostingsEnum.FREQS);
assertNotNull(dEnum1);
assertNotNull(dEnum2);
int docID1 = dEnum1.nextDoc();

View File

@ -228,7 +228,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
//System.out.println("Term: " + term);
assertEquals(testTerms[i], term);
postingsEnum = TestUtil.docs(random(), termsEnum, null, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = TestUtil.docs(random(), termsEnum, null, postingsEnum, PostingsEnum.NONE);
assertNotNull(postingsEnum);
int doc = postingsEnum.docID();
assertEquals(-1, doc);
@ -255,7 +255,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
//System.out.println("Term: " + term);
assertEquals(testTerms[i], term);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
assertNotNull(dpEnum);
int doc = dpEnum.docID();
assertEquals(-1, doc);
@ -266,7 +266,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, dpEnum.nextDoc());
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
doc = dpEnum.docID();
assertEquals(-1, doc);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -292,7 +292,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
//System.out.println("Term: " + term);
assertEquals(testTerms[i], term);
assertNotNull(termsEnum.postings(null, null));
assertNull(termsEnum.postings(null, null, PostingsEnum.FLAG_ALL)); // no pos
assertNull(termsEnum.postings(null, null, PostingsEnum.ALL)); // no pos
}
reader.close();
}
@ -311,7 +311,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
String term = text.utf8ToString();
assertEquals(testTerms[i], term);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
assertNotNull(dpEnum);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(dpEnum.freq(), positions[i].length);
@ -320,7 +320,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, dpEnum.nextDoc());
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertNotNull(dpEnum);
assertEquals(dpEnum.freq(), positions[i].length);

View File

@ -68,7 +68,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// Token "" occurred once
assertEquals(1, termsEnum.totalTermFreq());
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.ALL);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
dpEnum.nextPosition();
assertEquals(8, dpEnum.startOffset());
@ -77,7 +77,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// Token "abcd" occurred three times
assertEquals(new BytesRef("abcd"), termsEnum.next());
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
assertEquals(3, termsEnum.totalTermFreq());
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -117,7 +117,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
IndexReader r = DirectoryReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.ALL);
assertEquals(2, termsEnum.totalTermFreq());
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -152,7 +152,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
IndexReader r = DirectoryReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.ALL);
assertEquals(2, termsEnum.totalTermFreq());
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -190,7 +190,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
IndexReader r = DirectoryReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.ALL);
assertEquals(2, termsEnum.totalTermFreq());
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -225,7 +225,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
IndexReader r = DirectoryReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.ALL);
assertEquals(2, termsEnum.totalTermFreq());
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -261,7 +261,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
IndexReader r = DirectoryReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.ALL);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
dpEnum.nextPosition();
@ -269,14 +269,14 @@ public class TestTermVectorsWriter extends LuceneTestCase {
assertEquals(4, dpEnum.endOffset());
assertNotNull(termsEnum.next());
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
dpEnum.nextPosition();
assertEquals(11, dpEnum.startOffset());
assertEquals(17, dpEnum.endOffset());
assertNotNull(termsEnum.next());
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
dpEnum.nextPosition();
assertEquals(18, dpEnum.startOffset());
@ -305,7 +305,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
IndexReader r = DirectoryReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.ALL);
assertEquals(1, (int) termsEnum.totalTermFreq());
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -314,7 +314,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
assertEquals(7, dpEnum.endOffset());
assertNotNull(termsEnum.next());
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
dpEnum.nextPosition();
assertEquals(8, dpEnum.startOffset());
@ -347,7 +347,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
IndexReader r = DirectoryReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum dpEnum = termsEnum.postings(null, null, PostingsEnum.ALL);
assertEquals(1, (int) termsEnum.totalTermFreq());
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -356,7 +356,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
assertEquals(4, dpEnum.endOffset());
assertNotNull(termsEnum.next());
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
dpEnum.nextPosition();
assertEquals(6, dpEnum.startOffset());

View File

@ -123,7 +123,7 @@ public class TestTermdocPerf extends LuceneTestCase {
final Random random = new Random(random().nextLong());
for (int i=0; i<iter; i++) {
tenum.seekCeil(new BytesRef("val"));
tdocs = TestUtil.docs(random, tenum, MultiFields.getLiveDocs(reader), tdocs, PostingsEnum.FLAG_NONE);
tdocs = TestUtil.docs(random, tenum, MultiFields.getLiveDocs(reader), tdocs, PostingsEnum.NONE);
while (tdocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
ret += tdocs.docID();
}

View File

@ -326,7 +326,7 @@ public class TestTermsEnum extends LuceneTestCase {
}
assertEquals(expected, actual);
assertEquals(1, te.docFreq());
postingsEnum = TestUtil.docs(random(), te, null, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = TestUtil.docs(random(), te, null, postingsEnum, PostingsEnum.NONE);
final int docID = postingsEnum.nextDoc();
assertTrue(docID != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(docIDToID.get(docID), termToID.get(expected).intValue());
@ -740,25 +740,25 @@ public class TestTermsEnum extends LuceneTestCase {
CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false);
TermsEnum te = terms.intersect(ca, null);
assertEquals("aaa", te.next().utf8ToString());
assertEquals(0, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(0, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertEquals("bbb", te.next().utf8ToString());
assertEquals(1, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(1, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertEquals("ccc", te.next().utf8ToString());
assertEquals(2, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(2, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertNull(te.next());
te = terms.intersect(ca, new BytesRef("abc"));
assertEquals("bbb", te.next().utf8ToString());
assertEquals(1, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(1, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertEquals("ccc", te.next().utf8ToString());
assertEquals(2, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(2, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertNull(te.next());
te = terms.intersect(ca, new BytesRef("aaa"));
assertEquals("bbb", te.next().utf8ToString());
assertEquals(1, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(1, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertEquals("ccc", te.next().utf8ToString());
assertEquals(2, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(2, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertNull(te.next());
r.close();
@ -798,17 +798,17 @@ public class TestTermsEnum extends LuceneTestCase {
// should seek to startTerm
te = terms.intersect(ca, new BytesRef("aad"));
assertEquals("abd", te.next().utf8ToString());
assertEquals(1, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(1, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertEquals("acd", te.next().utf8ToString());
assertEquals(2, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(2, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertEquals("bcd", te.next().utf8ToString());
assertEquals(3, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(3, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertNull(te.next());
// should fail to find ceil label on second arc, rewind
te = terms.intersect(ca, new BytesRef("add"));
assertEquals("bcd", te.next().utf8ToString());
assertEquals(3, te.postings(null, null, PostingsEnum.FLAG_NONE).nextDoc());
assertEquals(3, te.postings(null, null, PostingsEnum.NONE).nextDoc());
assertNull(te.next());
// should reach end
@ -852,12 +852,12 @@ public class TestTermsEnum extends LuceneTestCase {
PostingsEnum de;
assertEquals("", te.next().utf8ToString());
de = te.postings(null, null, PostingsEnum.FLAG_NONE);
de = te.postings(null, null, PostingsEnum.NONE);
assertEquals(0, de.nextDoc());
assertEquals(1, de.nextDoc());
assertEquals("abc", te.next().utf8ToString());
de = te.postings(null, null, PostingsEnum.FLAG_NONE);
de = te.postings(null, null, PostingsEnum.NONE);
assertEquals(0, de.nextDoc());
assertEquals(1, de.nextDoc());
@ -867,7 +867,7 @@ public class TestTermsEnum extends LuceneTestCase {
te = terms.intersect(ca, new BytesRef(""));
assertEquals("abc", te.next().utf8ToString());
de = te.postings(null, null, PostingsEnum.FLAG_NONE);
de = te.postings(null, null, PostingsEnum.NONE);
assertEquals(0, de.nextDoc());
assertEquals(1, de.nextDoc());

View File

@ -440,7 +440,7 @@ public class TestFilteredQuery extends LuceneTestCase {
Bits acceptDocs) throws IOException {
final boolean nullBitset = random().nextInt(10) == 5;
final LeafReader reader = context.reader();
PostingsEnum termPostingsEnum = reader.termDocsEnum(new Term("field", "0"));
PostingsEnum termPostingsEnum = reader.postings(new Term("field", "0"));
if (termPostingsEnum == null) {
return null; // no docs -- return null
}
@ -483,7 +483,7 @@ public class TestFilteredQuery extends LuceneTestCase {
assertTrue(
"iterator should not be called if bitset is present",
nullBitset);
return reader.termDocsEnum(new Term("field", "0"));
return reader.postings(new Term("field", "0"));
}
};
@ -538,7 +538,7 @@ public class TestFilteredQuery extends LuceneTestCase {
}
@Override
public DocIdSetIterator iterator() throws IOException {
final PostingsEnum termPostingsEnum = context.reader().termDocsEnum(new Term("field", "0"));
final PostingsEnum termPostingsEnum = context.reader().postings(new Term("field", "0"));
if (termPostingsEnum == null) {
return null;
}

View File

@ -212,7 +212,7 @@ public class TestPositionIncrement extends LuceneTestCase {
final IndexReader readerFromWriter = writer.getReader();
LeafReader r = SlowCompositeReaderWrapper.wrap(readerFromWriter);
PostingsEnum tp = r.termDocsEnum(new Term("content", "a"), PostingsEnum.FLAG_ALL);
PostingsEnum tp = r.postings(new Term("content", "a"), PostingsEnum.ALL);
int count = 0;
assertTrue(tp.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);

View File

@ -689,7 +689,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
// is sufficient to call next(), and then doc(), exactly once with no
// 'validation' checks.
FacetLabel cp = new FacetLabel(FacetsConfig.stringToPath(t.utf8ToString()));
postingsEnum = termsEnum.postings(null, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(null, postingsEnum, PostingsEnum.NONE);
boolean res = cache.put(cp, postingsEnum.nextDoc() + ctx.docBase);
assert !res : "entries should not have been evicted from the cache";
} else {
@ -779,7 +779,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
while (te.next() != null) {
FacetLabel cp = new FacetLabel(FacetsConfig.stringToPath(te.term().utf8ToString()));
final int ordinal = addCategory(cp);
docs = te.postings(null, docs, PostingsEnum.FLAG_NONE);
docs = te.postings(null, docs, PostingsEnum.NONE);
ordinalMap.addMapping(docs.nextDoc() + base, ordinal);
}
base += ar.maxDoc(); // no deletions, so we're ok

View File

@ -131,7 +131,7 @@ class TaxonomyIndexArrays extends ParallelTaxonomyArrays {
// apparent gain.
PostingsEnum positions = MultiFields.getTermPositionsEnum(reader, null,
Consts.FIELD_PAYLOADS, Consts.PAYLOAD_PARENT_BYTES_REF,
PostingsEnum.FLAG_PAYLOADS);
PostingsEnum.PAYLOADS);
// shouldn't really happen, if it does, something's wrong
if (positions == null || positions.advance(first) == DocIdSetIterator.NO_MORE_DOCS) {

View File

@ -131,7 +131,7 @@ public final class TokenStreamFromTermVector extends TokenStream {
final char[] termChars = new char[termBytesRef.length];
final int termCharsLen = UnicodeUtil.UTF8toUTF16(termBytesRef, termChars);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_POSITIONS);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.POSITIONS);
assert dpEnum != null; // presumably checked by TokenSources.hasPositions earlier
dpEnum.nextDoc();
final int freq = dpEnum.freq();

View File

@ -552,7 +552,7 @@ public class PostingsHighlighter {
if (!termsEnum.seekExact(terms[i])) {
continue; // term not found
}
de = postings[i] = termsEnum.postings(null, null, PostingsEnum.FLAG_OFFSETS);
de = postings[i] = termsEnum.postings(null, null, PostingsEnum.OFFSETS);
if (de == null) {
// no positions available
throw new IllegalArgumentException("field '" + field + "' was indexed without offsets, cannot highlight");

View File

@ -104,7 +104,7 @@ public class FieldTermStack {
if (!termSet.contains(term)) {
continue;
}
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_POSITIONS);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.POSITIONS);
if (dpEnum == null) {
// null snippet
return;

View File

@ -138,7 +138,7 @@ class TermsIncludingScoreQuery extends Query {
PostingsEnum postingsEnum = null;
for (int i = 0; i < TermsIncludingScoreQuery.this.terms.size(); i++) {
if (segmentTermsEnum.seekExact(TermsIncludingScoreQuery.this.terms.get(ords[i], spare))) {
postingsEnum = segmentTermsEnum.postings(null, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = segmentTermsEnum.postings(null, postingsEnum, PostingsEnum.NONE);
if (postingsEnum.advance(doc) == doc) {
final float score = TermsIncludingScoreQuery.this.scores[ords[i]];
return new ComplexExplanation(true, score, "Score based on join value " + segmentTermsEnum.term().utf8ToString());
@ -202,7 +202,7 @@ class TermsIncludingScoreQuery extends Query {
PostingsEnum postingsEnum = null;
for (int i = 0; i < terms.size(); i++) {
if (termsEnum.seekExact(terms.get(ords[i], spare))) {
postingsEnum = termsEnum.postings(acceptDocs, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(acceptDocs, postingsEnum, PostingsEnum.NONE);
float score = TermsIncludingScoreQuery.this.scores[ords[i]];
for (int doc = postingsEnum.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = postingsEnum.nextDoc()) {
matchingDocs.set(doc);
@ -278,7 +278,7 @@ class TermsIncludingScoreQuery extends Query {
PostingsEnum postingsEnum = null;
for (int i = 0; i < terms.size(); i++) {
if (termsEnum.seekExact(terms.get(ords[i], spare))) {
postingsEnum = termsEnum.postings(acceptDocs, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(acceptDocs, postingsEnum, PostingsEnum.NONE);
float score = TermsIncludingScoreQuery.this.scores[ords[i]];
for (int doc = postingsEnum.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = postingsEnum.nextDoc()) {
// I prefer this:

View File

@ -740,7 +740,7 @@ public class TestJoinUtil extends LuceneTestCase {
for (BytesRef joinValue : joinValues) {
termsEnum = terms.iterator(termsEnum);
if (termsEnum.seekExact(joinValue)) {
postingsEnum = termsEnum.postings(slowCompositeReader.getLiveDocs(), postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(slowCompositeReader.getLiveDocs(), postingsEnum, PostingsEnum.NONE);
JoinScore joinScore = joinValueToJoinScores.get(joinValue);
for (int doc = postingsEnum.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = postingsEnum.nextDoc()) {

View File

@ -198,8 +198,8 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
while(iwTermsIter.next() != null) {
assertNotNull(memTermsIter.next());
assertEquals(iwTermsIter.term(), memTermsIter.term());
PostingsEnum iwDocsAndPos = iwTermsIter.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum memDocsAndPos = memTermsIter.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum iwDocsAndPos = iwTermsIter.postings(null, null, PostingsEnum.ALL);
PostingsEnum memDocsAndPos = memTermsIter.postings(null, null, PostingsEnum.ALL);
while(iwDocsAndPos.nextDoc() != PostingsEnum.NO_MORE_DOCS) {
assertEquals(iwDocsAndPos.docID(), memDocsAndPos.nextDoc());
assertEquals(iwDocsAndPos.freq(), memDocsAndPos.freq());
@ -318,7 +318,7 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
MemoryIndex memory = new MemoryIndex(random().nextBoolean(), false, random().nextInt(50) * 1024 * 1024);
memory.addField("foo", "bar", analyzer);
LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader();
PostingsEnum disi = TestUtil.docs(random(), reader, "foo", new BytesRef("bar"), null, null, PostingsEnum.FLAG_NONE);
PostingsEnum disi = TestUtil.docs(random(), reader, "foo", new BytesRef("bar"), null, null, PostingsEnum.NONE);
int docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -326,7 +326,7 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
// now reuse and check again
TermsEnum te = reader.terms("foo").iterator(null);
assertTrue(te.seekExact(new BytesRef("bar")));
disi = te.postings(null, disi, PostingsEnum.FLAG_NONE);
disi = te.postings(null, disi, PostingsEnum.NONE);
docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -353,7 +353,7 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
memory.addField("foo", "bar", analyzer);
LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader();
assertEquals(1, reader.terms("foo").getSumTotalTermFreq());
PostingsEnum disi = reader.termDocsEnum(new Term("foo", "bar"), PostingsEnum.FLAG_ALL);
PostingsEnum disi = reader.postings(new Term("foo", "bar"), PostingsEnum.ALL);
int docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -424,8 +424,8 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
LeafReader reader = (LeafReader) mindex.createSearcher().getIndexReader();
assertNull(reader.getNumericDocValues("not-in-index"));
assertNull(reader.getNormValues("not-in-index"));
assertNull(reader.termDocsEnum(new Term("not-in-index", "foo")));
assertNull(reader.termDocsEnum(new Term("not-in-index", "foo"), PostingsEnum.FLAG_ALL));
assertNull(reader.postings(new Term("not-in-index", "foo")));
assertNull(reader.postings(new Term("not-in-index", "foo"), PostingsEnum.ALL));
assertNull(reader.terms("not-in-index"));
}
@ -525,8 +525,8 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
assertNotNull(memTermEnum.next());
assertThat(termEnum.totalTermFreq(), equalTo(memTermEnum.totalTermFreq()));
PostingsEnum docsPosEnum = termEnum.postings(null, null, PostingsEnum.FLAG_POSITIONS);
PostingsEnum memDocsPosEnum = memTermEnum.postings(null, null, PostingsEnum.FLAG_POSITIONS);
PostingsEnum docsPosEnum = termEnum.postings(null, null, PostingsEnum.POSITIONS);
PostingsEnum memDocsPosEnum = memTermEnum.postings(null, null, PostingsEnum.POSITIONS);
String currentTerm = termEnum.term().utf8ToString();
assertThat("Token mismatch for field: " + field_name, currentTerm, equalTo(memTermEnum.term().utf8ToString()));

View File

@ -171,7 +171,7 @@ public class SortingLeafReader extends FilterLeafReader {
}
final PostingsEnum inDocs = in.postings(newToOld(liveDocs), inReuse, flags);
final boolean withFreqs = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >=0 && (flags & PostingsEnum.FLAG_FREQS) != 0;
final boolean withFreqs = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >=0 && (flags & PostingsEnum.FREQS) != 0;
return new SortingDocsEnum(docMap.size(), wrapReuse, inDocs, withFreqs, docMap);
}

View File

@ -365,7 +365,7 @@ public class DocTermOrds implements Accountable {
final int df = te.docFreq();
if (df <= maxTermDocFreq) {
postingsEnum = te.postings(liveDocs, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = te.postings(liveDocs, postingsEnum, PostingsEnum.NONE);
// dF, but takes deletions into account
int actualDF = 0;

View File

@ -288,7 +288,7 @@ class FieldCacheImpl implements FieldCache {
break;
}
visitTerm(term);
docs = termsEnum.postings(null, docs, PostingsEnum.FLAG_NONE);
docs = termsEnum.postings(null, docs, PostingsEnum.NONE);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
@ -419,7 +419,7 @@ class FieldCacheImpl implements FieldCache {
res = new FixedBitSet(maxDoc);
}
docs = termsEnum.postings(null, docs, PostingsEnum.FLAG_NONE);
docs = termsEnum.postings(null, docs, PostingsEnum.NONE);
// TODO: use bulk API
while (true) {
final int docID = docs.nextDoc();
@ -698,7 +698,7 @@ class FieldCacheImpl implements FieldCache {
}
termOrdToBytesOffset.add(bytes.copyUsingLengthPrefix(term));
docs = termsEnum.postings(null, docs, PostingsEnum.FLAG_NONE);
docs = termsEnum.postings(null, docs, PostingsEnum.NONE);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
@ -850,7 +850,7 @@ class FieldCacheImpl implements FieldCache {
break;
}
final long pointer = bytes.copyUsingLengthPrefix(term);
docs = termsEnum.postings(null, docs, PostingsEnum.FLAG_NONE);
docs = termsEnum.postings(null, docs, PostingsEnum.NONE);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {

View File

@ -235,7 +235,7 @@ public abstract class SorterTestBase extends LuceneTestCase {
public void testDocsAndPositionsEnum() throws Exception {
TermsEnum termsEnum = sortedReader.terms(DOC_POSITIONS_FIELD).iterator(null);
assertEquals(SeekStatus.FOUND, termsEnum.seekCeil(new BytesRef(DOC_POSITIONS_TERM)));
PostingsEnum sortedPositions = termsEnum.postings(null, null, PostingsEnum.FLAG_ALL);
PostingsEnum sortedPositions = termsEnum.postings(null, null, PostingsEnum.ALL);
int doc;
// test nextDoc()
@ -252,7 +252,7 @@ public abstract class SorterTestBase extends LuceneTestCase {
// test advance()
final PostingsEnum reuse = sortedPositions;
sortedPositions = termsEnum.postings(null, reuse, PostingsEnum.FLAG_ALL);
sortedPositions = termsEnum.postings(null, reuse, PostingsEnum.ALL);
if (sortedPositions instanceof SortingDocsEnum) {
assertTrue(((SortingDocsEnum) sortedPositions).reused(reuse)); // make sure reuse worked
}

View File

@ -207,7 +207,7 @@ public final class TermsFilter extends Filter implements Accountable {
spare.offset = offsets[i];
spare.length = offsets[i+1] - offsets[i];
if (termsEnum.seekExact(spare)) {
docs = termsEnum.postings(acceptDocs, docs, PostingsEnum.FLAG_NONE); // no freq since we don't need them
docs = termsEnum.postings(acceptDocs, docs, PostingsEnum.NONE); // no freq since we don't need them
builder.or(docs);
}
}

View File

@ -106,7 +106,7 @@ public class DuplicateFilter extends Filter {
if (currTerm == null) {
break;
} else {
docs = termsEnum.postings(acceptDocs, docs, PostingsEnum.FLAG_NONE);
docs = termsEnum.postings(acceptDocs, docs, PostingsEnum.NONE);
int doc = docs.nextDoc();
if (doc != DocIdSetIterator.NO_MORE_DOCS) {
if (keepMode == KeepMode.KM_USE_FIRST_OCCURRENCE) {
@ -144,7 +144,7 @@ public class DuplicateFilter extends Filter {
} else {
if (termsEnum.docFreq() > 1) {
// unset potential duplicates
docs = termsEnum.postings(acceptDocs, docs, PostingsEnum.FLAG_NONE);
docs = termsEnum.postings(acceptDocs, docs, PostingsEnum.NONE);
int doc = docs.nextDoc();
if (doc != DocIdSetIterator.NO_MORE_DOCS) {
if (keepMode == KeepMode.KM_USE_FIRST_OCCURRENCE) {

View File

@ -395,7 +395,7 @@ public class TermAutomatonQuery extends Query {
TermsEnum termsEnum = context.reader().terms(field).iterator(null);
termsEnum.seekExact(term, state);
enums[ent.getKey()] = new EnumAndScorer(ent.getKey(), termsEnum.postings(acceptDocs, null, PostingsEnum.FLAG_POSITIONS));
enums[ent.getKey()] = new EnumAndScorer(ent.getKey(), termsEnum.postings(acceptDocs, null, PostingsEnum.POSITIONS));
}
}

View File

@ -94,7 +94,7 @@ public abstract class AbstractPrefixTreeFilter extends Filter {
protected void collectDocs(BitSet bitSet) throws IOException {
assert termsEnum != null;
postingsEnum = termsEnum.postings(acceptDocs, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(acceptDocs, postingsEnum, PostingsEnum.NONE);
bitSet.or(postingsEnum);
}

View File

@ -186,7 +186,7 @@ public class ContainsPrefixTreeFilter extends AbstractPrefixTreeFilter {
private SmallDocSet collectDocs(Bits acceptContains) throws IOException {
SmallDocSet set = null;
postingsEnum = termsEnum.postings(acceptContains, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(acceptContains, postingsEnum, PostingsEnum.NONE);
int docid;
while ((docid = postingsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
if (set == null) {

View File

@ -173,7 +173,7 @@ public class PrefixTreeFacetCounter {
return termsEnum.docFreq();
}
int count = 0;
postingsEnum = termsEnum.postings(acceptDocs, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(acceptDocs, postingsEnum, PostingsEnum.NONE);
while (postingsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
count++;
}
@ -184,7 +184,7 @@ public class PrefixTreeFacetCounter {
if (acceptDocs == null) {
return true;
}
postingsEnum = termsEnum.postings(acceptDocs, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(acceptDocs, postingsEnum, PostingsEnum.NONE);
return (postingsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
}

View File

@ -70,7 +70,7 @@ public abstract class ShapeFieldCacheProvider<T extends Shape> {
while (term != null) {
T shape = readShape(term);
if( shape != null ) {
docs = te.postings(null, docs, PostingsEnum.FLAG_NONE);
docs = te.postings(null, docs, PostingsEnum.NONE);
Integer docid = docs.nextDoc();
while (docid != DocIdSetIterator.NO_MORE_DOCS) {
idx.add( docid, shape );

View File

@ -263,7 +263,7 @@ public class BlendedInfixSuggester extends AnalyzingInfixSuggester {
if (matchedTokens.contains(docTerm) || (prefixToken != null && docTerm.startsWith(prefixToken))) {
PostingsEnum docPosEnum = it.postings(null, null, PostingsEnum.FLAG_OFFSETS);
PostingsEnum docPosEnum = it.postings(null, null, PostingsEnum.OFFSETS);
docPosEnum.nextDoc();
// use the first occurrence of the term

View File

@ -185,16 +185,16 @@ public final class AssertingPostingsFormat extends PostingsFormat {
int flags = 0;
if (hasPositions == false) {
if (hasFreqs) {
flags = flags | PostingsEnum.FLAG_FREQS;
flags = flags | PostingsEnum.FREQS;
}
postingsEnum = termsEnum.postings(null, postingsEnum, flags);
} else {
flags = PostingsEnum.FLAG_POSITIONS;
flags = PostingsEnum.POSITIONS;
if (hasPayloads) {
flags |= PostingsEnum.FLAG_PAYLOADS;
flags |= PostingsEnum.PAYLOADS;
}
if (hasOffsets) {
flags = flags | PostingsEnum.FLAG_OFFSETS;
flags = flags | PostingsEnum.OFFSETS;
}
postingsEnum = termsEnum.postings(null, postingsEnum, flags);
}

View File

@ -263,18 +263,18 @@ public final class RAMOnlyPostingsFormat extends PostingsFormat {
if (writeFreqs == false) {
enumFlags = 0;
} else if (writePositions == false) {
enumFlags = PostingsEnum.FLAG_FREQS;
enumFlags = PostingsEnum.FREQS;
} else if (writeOffsets == false) {
if (writePayloads) {
enumFlags = PostingsEnum.FLAG_PAYLOADS;
enumFlags = PostingsEnum.PAYLOADS;
} else {
enumFlags = 0;
}
} else {
if (writePayloads) {
enumFlags = PostingsEnum.FLAG_PAYLOADS | PostingsEnum.FLAG_OFFSETS;
enumFlags = PostingsEnum.PAYLOADS | PostingsEnum.OFFSETS;
} else {
enumFlags = PostingsEnum.FLAG_OFFSETS;
enumFlags = PostingsEnum.OFFSETS;
}
}

View File

@ -1156,7 +1156,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
for (Entry<String, String> entry : entrySet) {
// pk lookup
PostingsEnum termPostingsEnum = slowR.termDocsEnum(new Term("id", entry.getKey()));
PostingsEnum termPostingsEnum = slowR.postings(new Term("id", entry.getKey()));
int docId = termPostingsEnum.nextDoc();
expected = new BytesRef(entry.getValue());
final BytesRef actual = docValues.get(docId);

View File

@ -647,14 +647,14 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
if (maxAllowed.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
return null;
}
if ((flags & PostingsEnum.FLAG_OFFSETS) == PostingsEnum.FLAG_OFFSETS && maxAllowed.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) < 0) {
if ((flags & PostingsEnum.OFFSETS) == PostingsEnum.OFFSETS && maxAllowed.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) < 0) {
return null;
}
if ((flags & PostingsEnum.FLAG_PAYLOADS) == PostingsEnum.FLAG_PAYLOADS && allowPayloads == false) {
if ((flags & PostingsEnum.PAYLOADS) == PostingsEnum.PAYLOADS && allowPayloads == false) {
return null;
}
}
if ((flags & PostingsEnum.FLAG_FREQS) != 0 && maxAllowed.compareTo(IndexOptions.DOCS_AND_FREQS) < 0) {
if ((flags & PostingsEnum.FREQS) != 0 && maxAllowed.compareTo(IndexOptions.DOCS_AND_FREQS) < 0) {
return null;
}
return getSeedPostings(current.getKey().utf8ToString(), current.getValue().seed, false, maxAllowed, allowPayloads);
@ -815,12 +815,12 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
prevPostingsEnum = threadState.reusePostingsEnum;
}
int flags = PostingsEnum.FLAG_POSITIONS;
int flags = PostingsEnum.POSITIONS;
if (alwaysTestMax || random().nextBoolean()) {
flags |= PostingsEnum.FLAG_OFFSETS;
flags |= PostingsEnum.OFFSETS;
}
if (alwaysTestMax || random().nextBoolean()) {
flags |= PostingsEnum.FLAG_PAYLOADS;
flags |= PostingsEnum.PAYLOADS;
}
if (VERBOSE) {
@ -836,7 +836,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
if (options.contains(Option.REUSE_ENUMS) && random().nextInt(10) < 9) {
prevPostingsEnum = threadState.reusePostingsEnum;
}
threadState.reusePostingsEnum = termsEnum.postings(liveDocs, prevPostingsEnum, doCheckFreqs ? PostingsEnum.FLAG_FREQS : PostingsEnum.FLAG_NONE);
threadState.reusePostingsEnum = termsEnum.postings(liveDocs, prevPostingsEnum, doCheckFreqs ? PostingsEnum.FREQS : PostingsEnum.NONE);
postingsEnum = threadState.reusePostingsEnum;
}
} else {
@ -844,12 +844,12 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
prevPostingsEnum = threadState.reusePostingsEnum;
}
int flags = PostingsEnum.FLAG_POSITIONS;
int flags = PostingsEnum.POSITIONS;
if (alwaysTestMax || doCheckOffsets || random().nextInt(3) == 1) {
flags |= PostingsEnum.FLAG_OFFSETS;
flags |= PostingsEnum.OFFSETS;
}
if (alwaysTestMax || doCheckPayloads|| random().nextInt(3) == 1) {
flags |= PostingsEnum.FLAG_PAYLOADS;
flags |= PostingsEnum.PAYLOADS;
}
if (VERBOSE) {
@ -1414,9 +1414,9 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
TermsEnum te = terms.iterator(null);
te.seekExact(fieldAndTerm.term);
checkReuse(te, PostingsEnum.FLAG_FREQS, PostingsEnum.FLAG_ALL, false);
checkReuse(te, PostingsEnum.FREQS, PostingsEnum.ALL, false);
if (isPostingsEnumReuseImplemented())
checkReuse(te, PostingsEnum.FLAG_ALL, PostingsEnum.FLAG_ALL, true);
checkReuse(te, PostingsEnum.ALL, PostingsEnum.ALL, true);
fieldsProducer.close();
dir.close();
@ -1495,7 +1495,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
LeafReader ar = getOnlySegmentReader(ir);
TermsEnum termsEnum = ar.terms("field").iterator(null);
assertTrue(termsEnum.seekExact(new BytesRef("value")));
PostingsEnum docsEnum = termsEnum.postings(null, null, DocsEnum.FLAG_NONE);
PostingsEnum docsEnum = termsEnum.postings(null, null, PostingsEnum.NONE);
assertEquals(0, docsEnum.nextDoc());
assertEquals(1, docsEnum.freq());
assertEquals(1, docsEnum.nextDoc());
@ -1622,9 +1622,9 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
BytesRef term = termsEnum.term();
boolean noPositions = random().nextBoolean();
if (noPositions) {
docs = termsEnum.postings(null, docs, PostingsEnum.FLAG_FREQS);
docs = termsEnum.postings(null, docs, PostingsEnum.FREQS);
} else {
docs = termsEnum.postings(null, null, PostingsEnum.FLAG_POSITIONS);
docs = termsEnum.postings(null, null, PostingsEnum.POSITIONS);
}
int docFreq = 0;
long totalTermFreq = 0;
@ -1670,9 +1670,9 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
if (termsEnum.seekExact(new BytesRef(term))) {
boolean noPositions = random().nextBoolean();
if (noPositions) {
docs = termsEnum.postings(null, docs, PostingsEnum.FLAG_FREQS);
docs = termsEnum.postings(null, docs, PostingsEnum.FREQS);
} else {
docs = termsEnum.postings(null, null, PostingsEnum.FLAG_POSITIONS);
docs = termsEnum.postings(null, null, PostingsEnum.POSITIONS);
}
int docFreq = 0;

View File

@ -452,14 +452,14 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
this.docsEnum.set(postingsEnum);
bits.clear(0);
PostingsEnum docsAndPositionsEnum = termsEnum.postings(bits, random().nextBoolean() ? null : this.docsEnum.get(), PostingsEnum.FLAG_POSITIONS);
PostingsEnum docsAndPositionsEnum = termsEnum.postings(bits, random().nextBoolean() ? null : this.docsEnum.get(), PostingsEnum.POSITIONS);
assertEquals(ft.storeTermVectorOffsets() || ft.storeTermVectorPositions(), docsAndPositionsEnum != null);
if (docsAndPositionsEnum != null) {
assertEquals(PostingsEnum.NO_MORE_DOCS, docsAndPositionsEnum.nextDoc());
}
bits.set(0);
docsAndPositionsEnum = termsEnum.postings(random().nextBoolean() ? bits : null, random().nextBoolean() ? null : docsAndPositionsEnum, PostingsEnum.FLAG_POSITIONS);
docsAndPositionsEnum = termsEnum.postings(random().nextBoolean() ? bits : null, random().nextBoolean() ? null : docsAndPositionsEnum, PostingsEnum.POSITIONS);
assertEquals(ft.storeTermVectorOffsets() || ft.storeTermVectorPositions(), docsAndPositionsEnum != null);
if (terms.hasPositions() || terms.hasOffsets()) {
assertEquals(0, docsAndPositionsEnum.nextDoc());

View File

@ -1893,17 +1893,17 @@ public abstract class LuceneTestCase extends Assert {
assertEquals(info, term, rightTermsEnum.next());
assertTermStatsEquals(info, leftTermsEnum, rightTermsEnum);
if (deep) {
assertDocsAndPositionsEnumEquals(info, leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.FLAG_ALL),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.FLAG_ALL));
assertDocsAndPositionsEnumEquals(info, leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.FLAG_ALL),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.FLAG_ALL));
assertDocsAndPositionsEnumEquals(info, leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.ALL),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.ALL));
assertDocsAndPositionsEnumEquals(info, leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.ALL),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.ALL));
assertPositionsSkippingEquals(info, leftReader, leftTermsEnum.docFreq(),
leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.FLAG_ALL),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.FLAG_ALL));
leftPositions = leftTermsEnum.postings(null, leftPositions, PostingsEnum.ALL),
rightPositions = rightTermsEnum.postings(null, rightPositions, PostingsEnum.ALL));
assertPositionsSkippingEquals(info, leftReader, leftTermsEnum.docFreq(),
leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.FLAG_ALL),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.FLAG_ALL));
leftPositions = leftTermsEnum.postings(randomBits, leftPositions, PostingsEnum.ALL),
rightPositions = rightTermsEnum.postings(randomBits, rightPositions, PostingsEnum.ALL));
// with freqs:
assertDocsEnumEquals(info, leftDocs = leftTermsEnum.postings(null, leftDocs),
@ -1914,11 +1914,11 @@ public abstract class LuceneTestCase extends Assert {
true);
// w/o freqs:
assertDocsEnumEquals(info, leftDocs = leftTermsEnum.postings(null, leftDocs, PostingsEnum.FLAG_NONE),
rightDocs = rightTermsEnum.postings(null, rightDocs, PostingsEnum.FLAG_NONE),
assertDocsEnumEquals(info, leftDocs = leftTermsEnum.postings(null, leftDocs, PostingsEnum.NONE),
rightDocs = rightTermsEnum.postings(null, rightDocs, PostingsEnum.NONE),
false);
assertDocsEnumEquals(info, leftDocs = leftTermsEnum.postings(randomBits, leftDocs, PostingsEnum.FLAG_NONE),
rightDocs = rightTermsEnum.postings(randomBits, rightDocs, PostingsEnum.FLAG_NONE),
assertDocsEnumEquals(info, leftDocs = leftTermsEnum.postings(randomBits, leftDocs, PostingsEnum.NONE),
rightDocs = rightTermsEnum.postings(randomBits, rightDocs, PostingsEnum.NONE),
false);
// with freqs:
@ -1933,12 +1933,12 @@ public abstract class LuceneTestCase extends Assert {
// w/o freqs:
assertDocsSkippingEquals(info, leftReader, leftTermsEnum.docFreq(),
leftDocs = leftTermsEnum.postings(null, leftDocs, PostingsEnum.FLAG_NONE),
rightDocs = rightTermsEnum.postings(null, rightDocs, PostingsEnum.FLAG_NONE),
leftDocs = leftTermsEnum.postings(null, leftDocs, PostingsEnum.NONE),
rightDocs = rightTermsEnum.postings(null, rightDocs, PostingsEnum.NONE),
false);
assertDocsSkippingEquals(info, leftReader, leftTermsEnum.docFreq(),
leftDocs = leftTermsEnum.postings(randomBits, leftDocs, PostingsEnum.FLAG_NONE),
rightDocs = rightTermsEnum.postings(randomBits, rightDocs, PostingsEnum.FLAG_NONE),
leftDocs = leftTermsEnum.postings(randomBits, leftDocs, PostingsEnum.NONE),
rightDocs = rightTermsEnum.postings(randomBits, rightDocs, PostingsEnum.NONE),
false);
}
}

View File

@ -1018,17 +1018,17 @@ public final class TestUtil {
if (random.nextBoolean()) {
final int posFlags;
switch (random.nextInt(4)) {
case 0: posFlags = PostingsEnum.FLAG_POSITIONS; break;
case 1: posFlags = PostingsEnum.FLAG_OFFSETS; break;
case 2: posFlags = PostingsEnum.FLAG_PAYLOADS; break;
default: posFlags = PostingsEnum.FLAG_OFFSETS | PostingsEnum.FLAG_PAYLOADS; break;
case 0: posFlags = PostingsEnum.POSITIONS; break;
case 1: posFlags = PostingsEnum.OFFSETS; break;
case 2: posFlags = PostingsEnum.PAYLOADS; break;
default: posFlags = PostingsEnum.OFFSETS | PostingsEnum.PAYLOADS; break;
}
PostingsEnum docsAndPositions = termsEnum.postings(liveDocs, null, posFlags);
if (docsAndPositions != null) {
return docsAndPositions;
}
}
flags |= PostingsEnum.FLAG_FREQS;
flags |= PostingsEnum.FREQS;
}
return termsEnum.postings(liveDocs, reuse, flags);
}

View File

@ -394,7 +394,7 @@ public class LukeRequestHandler extends RequestHandlerBase
if (text == null) { // Ran off the end of the terms enum without finding any live docs with that field in them.
return null;
}
postingsEnum = termsEnum.postings(reader.getLiveDocs(), postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(reader.getLiveDocs(), postingsEnum, PostingsEnum.NONE);
if (postingsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
return reader.document(postingsEnum.docID());
}

View File

@ -692,7 +692,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
for (String id : elevations.ids) {
term.copyChars(id);
if (seen.contains(id) == false && termsEnum.seekExact(term.get())) {
postingsEnum = termsEnum.postings(liveDocs, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(liveDocs, postingsEnum, PostingsEnum.NONE);
if (postingsEnum != null) {
int docId = postingsEnum.nextDoc();
if (docId == DocIdSetIterator.NO_MORE_DOCS ) continue; // must have been deleted

View File

@ -341,7 +341,7 @@ public class TermVectorComponent extends SearchComponent implements SolrCoreAwar
termInfo.add("tf", freq);
}
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.FLAG_ALL);
dpEnum = termsEnum.postings(null, dpEnum, PostingsEnum.ALL);
boolean useOffsets = false;
boolean usePositions = false;
if (dpEnum != null) {

View File

@ -780,7 +780,7 @@ public class SimpleFacets {
// TODO: specialize when base docset is a bitset or hash set (skipDocs)? or does it matter for this?
// TODO: do this per-segment for better efficiency (MultiDocsEnum just uses base class impl)
// TODO: would passing deleted docs lead to better efficiency over checking the fastForRandomSet?
postingsEnum = termsEnum.postings(null, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(null, postingsEnum, PostingsEnum.NONE);
c=0;
if (postingsEnum instanceof MultiPostingsEnum) {

View File

@ -424,7 +424,7 @@ class JoinQuery extends Query {
if (freq < minDocFreqFrom) {
fromTermDirectCount++;
// OK to skip liveDocs, since we check for intersection with docs matching query
fromDeState.postingsEnum = fromDeState.termsEnum.postings(null, fromDeState.postingsEnum, PostingsEnum.FLAG_NONE);
fromDeState.postingsEnum = fromDeState.termsEnum.postings(null, fromDeState.postingsEnum, PostingsEnum.NONE);
PostingsEnum postingsEnum = fromDeState.postingsEnum;
if (postingsEnum instanceof MultiPostingsEnum) {
@ -489,7 +489,7 @@ class JoinQuery extends Query {
toTermDirectCount++;
// need to use liveDocs here so we don't map to any deleted ones
toDeState.postingsEnum = toDeState.termsEnum.postings(toDeState.liveDocs, toDeState.postingsEnum, PostingsEnum.FLAG_NONE);
toDeState.postingsEnum = toDeState.termsEnum.postings(toDeState.liveDocs, toDeState.postingsEnum, PostingsEnum.NONE);
PostingsEnum postingsEnum = toDeState.postingsEnum;
if (postingsEnum instanceof MultiPostingsEnum) {

View File

@ -797,7 +797,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
if (!termsEnum.seekExact(termBytes)) {
return -1;
}
PostingsEnum docs = termsEnum.postings(leafReader.getLiveDocs(), null, PostingsEnum.FLAG_NONE);
PostingsEnum docs = termsEnum.postings(leafReader.getLiveDocs(), null, PostingsEnum.NONE);
if (docs == null) return -1;
int id = docs.nextDoc();
return id == DocIdSetIterator.NO_MORE_DOCS ? -1 : id;
@ -819,7 +819,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
TermsEnum te = terms.iterator(null);
if (te.seekExact(idBytes)) {
PostingsEnum docs = te.postings(reader.getLiveDocs(), null, PostingsEnum.FLAG_NONE);
PostingsEnum docs = te.postings(reader.getLiveDocs(), null, PostingsEnum.NONE);
int id = docs.nextDoc();
if (id == DocIdSetIterator.NO_MORE_DOCS) continue;
assert docs.nextDoc() == DocIdSetIterator.NO_MORE_DOCS;
@ -1164,7 +1164,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
int bitsSet = 0;
FixedBitSet fbs = null;
PostingsEnum postingsEnum = deState.termsEnum.postings(deState.liveDocs, deState.postingsEnum, PostingsEnum.FLAG_NONE);
PostingsEnum postingsEnum = deState.termsEnum.postings(deState.liveDocs, deState.postingsEnum, PostingsEnum.NONE);
if (deState.postingsEnum == null) {
deState.postingsEnum = postingsEnum;
}

View File

@ -305,7 +305,7 @@ public class FileFloatSource extends ValueSource {
continue;
}
postingsEnum = termsEnum.postings(null, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(null, postingsEnum, PostingsEnum.NONE);
int doc;
while ((doc = postingsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
vals[doc] = fval;

View File

@ -195,7 +195,7 @@ public class SolrIndexSplitter {
hash = hashRouter.sliceHash(idString, null, null, null);
}
postingsEnum = termsEnum.postings(liveDocs, postingsEnum, PostingsEnum.FLAG_NONE);
postingsEnum = termsEnum.postings(liveDocs, postingsEnum, PostingsEnum.NONE);
for (;;) {
int doc = postingsEnum.nextDoc();
if (doc == DocIdSetIterator.NO_MORE_DOCS) break;

View File

@ -132,7 +132,7 @@ public class TestRTGBase extends SolrTestCaseJ4 {
if (!termsEnum.seekExact(termBytes)) {
return -1;
}
PostingsEnum docs = termsEnum.postings(MultiFields.getLiveDocs(r), null, PostingsEnum.FLAG_NONE);
PostingsEnum docs = termsEnum.postings(MultiFields.getLiveDocs(r), null, PostingsEnum.NONE);
int id = docs.nextDoc();
if (id != DocIdSetIterator.NO_MORE_DOCS) {
int next = docs.nextDoc();