LUCENE-6039: cutover to IndexOptions.NO/DocValuesType.NO instead of null

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1635790 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2014-10-31 15:10:08 +00:00
parent 05ad610074
commit bc41d58cd3
149 changed files with 713 additions and 670 deletions

View File

@ -194,6 +194,13 @@ Bug Fixes
behave properly when wrapping other ValueSources which do not exist for the specified document behave properly when wrapping other ValueSources which do not exist for the specified document
(hossman) (hossman)
* LUCENE-6039: Add IndexOptions.NO and DocValuesType.NO instead of
using null to mean not index and no doc values, renamed
IndexOptions.DOCS_ONLY to DOCS, and pulled IndexOptions and
DocValues out of FieldInfo into their own classes in
org.apache.lucene.index (Simon Willnauer, Robert Muir, Mike
McCandless)
Documentation Documentation
* LUCENE-5392: Add/improve analysis package documentation to reflect * LUCENE-5392: Add/improve analysis package documentation to reflect

View File

@ -51,7 +51,6 @@ import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
@ -920,12 +919,12 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
private void addNoProxDoc(IndexWriter writer) throws IOException { private void addNoProxDoc(IndexWriter writer) throws IOException {
Document doc = new Document(); Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED); FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setIndexOptions(IndexOptions.DOCS_ONLY); customType.setIndexOptions(IndexOptions.DOCS);
Field f = new Field("content3", "aaa", customType); Field f = new Field("content3", "aaa", customType);
doc.add(f); doc.add(f);
FieldType customType2 = new FieldType(); FieldType customType2 = new FieldType();
customType2.setStored(true); customType2.setStored(true);
customType2.setIndexOptions(IndexOptions.DOCS_ONLY); customType2.setIndexOptions(IndexOptions.DOCS);
f = new Field("content4", "aaa", customType2); f = new Field("content4", "aaa", customType2);
doc.add(f); doc.add(f);
writer.addDocument(doc); writer.addDocument(doc);

View File

@ -31,9 +31,9 @@ import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
@ -140,7 +140,7 @@ public class BlockTermsReader extends FieldsProducer {
assert numTerms >= 0; assert numTerms >= 0;
final long termsStartPointer = in.readVLong(); final long termsStartPointer = in.readVLong();
final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field); final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field);
final long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY ? -1 : in.readVLong(); final long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS ? -1 : in.readVLong();
final long sumDocFreq = in.readVLong(); final long sumDocFreq = in.readVLong();
final int docCount = in.readVInt(); final int docCount = in.readVInt();
final int longsSize = in.readVInt(); final int longsSize = in.readVInt();
@ -827,7 +827,7 @@ public class BlockTermsReader extends FieldsProducer {
// docFreq, totalTermFreq // docFreq, totalTermFreq
state.docFreq = freqReader.readVInt(); state.docFreq = freqReader.readVInt();
//System.out.println(" dF=" + state.docFreq); //System.out.println(" dF=" + state.docFreq);
if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
state.totalTermFreq = state.docFreq + freqReader.readVLong(); state.totalTermFreq = state.docFreq + freqReader.readVLong();
//System.out.println(" totTF=" + state.totalTermFreq); //System.out.println(" totTF=" + state.totalTermFreq);
} }

View File

@ -28,7 +28,7 @@ import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsConsumer; import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.PostingsWriterBase; import org.apache.lucene.codecs.PostingsWriterBase;
import org.apache.lucene.codecs.TermStats; import org.apache.lucene.codecs.TermStats;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields; import org.apache.lucene.index.Fields;
@ -172,7 +172,7 @@ public class BlockTermsWriter extends FieldsConsumer implements Closeable {
out.writeVInt(field.fieldInfo.number); out.writeVInt(field.fieldInfo.number);
out.writeVLong(field.numTerms); out.writeVLong(field.numTerms);
out.writeVLong(field.termsStartPointer); out.writeVLong(field.termsStartPointer);
if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
out.writeVLong(field.sumTotalTermFreq); out.writeVLong(field.sumTotalTermFreq);
} }
out.writeVLong(field.sumDocFreq); out.writeVLong(field.sumDocFreq);
@ -347,7 +347,7 @@ public class BlockTermsWriter extends FieldsConsumer implements Closeable {
final BlockTermState state = pendingTerms[termCount].state; final BlockTermState state = pendingTerms[termCount].state;
assert state != null; assert state != null;
bytesWriter.writeVInt(state.docFreq); bytesWriter.writeVInt(state.docFreq);
if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
bytesWriter.writeVLong(state.totalTermFreq-state.docFreq); bytesWriter.writeVLong(state.totalTermFreq-state.docFreq);
} }
} }

View File

@ -29,7 +29,7 @@ import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsReaderBase; import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output; import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
@ -125,7 +125,7 @@ public final class OrdsBlockTreeTermsReader extends FieldsProducer {
final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field); final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field);
assert fieldInfo != null: "field=" + field; assert fieldInfo != null: "field=" + field;
assert numTerms <= Integer.MAX_VALUE; assert numTerms <= Integer.MAX_VALUE;
final long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY ? -1 : in.readVLong(); final long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS ? -1 : in.readVLong();
final long sumDocFreq = in.readVLong(); final long sumDocFreq = in.readVLong();
final int docCount = in.readVInt(); final int docCount = in.readVInt();
final int longsSize = in.readVInt(); final int longsSize = in.readVInt();

View File

@ -27,11 +27,11 @@ import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.PostingsWriterBase; import org.apache.lucene.codecs.PostingsWriterBase;
import org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter; // javadocs import org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter; // javadocs
import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output; import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields; import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
@ -646,7 +646,7 @@ public final class OrdsBlockTreeTermsWriter extends FieldsConsumer {
// Write term stats, to separate byte[] blob: // Write term stats, to separate byte[] blob:
statsWriter.writeVInt(state.docFreq); statsWriter.writeVInt(state.docFreq);
if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
assert state.totalTermFreq >= state.docFreq: state.totalTermFreq + " vs " + state.docFreq; assert state.totalTermFreq >= state.docFreq: state.totalTermFreq + " vs " + state.docFreq;
statsWriter.writeVLong(state.totalTermFreq - state.docFreq); statsWriter.writeVLong(state.totalTermFreq - state.docFreq);
} }
@ -691,7 +691,7 @@ public final class OrdsBlockTreeTermsWriter extends FieldsConsumer {
// Write term stats, to separate byte[] blob: // Write term stats, to separate byte[] blob:
statsWriter.writeVInt(state.docFreq); statsWriter.writeVInt(state.docFreq);
if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
assert state.totalTermFreq >= state.docFreq; assert state.totalTermFreq >= state.docFreq;
statsWriter.writeVLong(state.totalTermFreq - state.docFreq); statsWriter.writeVLong(state.totalTermFreq - state.docFreq);
} }
@ -802,7 +802,7 @@ public final class OrdsBlockTreeTermsWriter extends FieldsConsumer {
BlockTermState state = postingsWriter.writeTerm(text, termsEnum, docsSeen); BlockTermState state = postingsWriter.writeTerm(text, termsEnum, docsSeen);
if (state != null) { if (state != null) {
assert state.docFreq != 0; assert state.docFreq != 0;
assert fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY || state.totalTermFreq >= state.docFreq: "postingsWriter=" + postingsWriter; assert fieldInfo.getIndexOptions() == IndexOptions.DOCS || state.totalTermFreq >= state.docFreq: "postingsWriter=" + postingsWriter;
sumDocFreq += state.docFreq; sumDocFreq += state.docFreq;
sumTotalTermFreq += state.totalTermFreq; sumTotalTermFreq += state.totalTermFreq;
pushTerm(text); pushTerm(text);
@ -927,7 +927,7 @@ public final class OrdsBlockTreeTermsWriter extends FieldsConsumer {
out.writeVLong(field.numTerms); out.writeVLong(field.numTerms);
out.writeVInt(field.rootCode.bytes.length); out.writeVInt(field.rootCode.bytes.length);
out.writeBytes(field.rootCode.bytes.bytes, field.rootCode.bytes.offset, field.rootCode.bytes.length); out.writeBytes(field.rootCode.bytes.bytes, field.rootCode.bytes.offset, field.rootCode.bytes.length);
if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
out.writeVLong(field.sumTotalTermFreq); out.writeVLong(field.sumTotalTermFreq);
} }
out.writeVLong(field.sumDocFreq); out.writeVLong(field.sumDocFreq);

View File

@ -21,8 +21,8 @@ import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output; import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataInput;

View File

@ -22,7 +22,7 @@ import java.io.IOException;
import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output; import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;

View File

@ -21,7 +21,7 @@ import java.io.IOException;
import org.apache.lucene.codecs.BlockTermState; import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output; import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -292,7 +292,7 @@ final class OrdsIntersectTermsEnumFrame {
// stats // stats
termState.docFreq = statsReader.readVInt(); termState.docFreq = statsReader.readVInt();
//if (DEBUG) System.out.println(" dF=" + state.docFreq); //if (DEBUG) System.out.println(" dF=" + state.docFreq);
if (ite.fr.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (ite.fr.fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
termState.totalTermFreq = termState.docFreq + statsReader.readVLong(); termState.totalTermFreq = termState.docFreq + statsReader.readVLong();
//if (DEBUG) System.out.println(" totTF=" + state.totalTermFreq); //if (DEBUG) System.out.println(" totTF=" + state.totalTermFreq);
} }

View File

@ -27,7 +27,7 @@ import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output; import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataInput;

View File

@ -21,7 +21,7 @@ import java.io.IOException;
import org.apache.lucene.codecs.BlockTermState; import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output; import org.apache.lucene.codecs.blocktreeords.FSTOrdsOutputs.Output;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.TermsEnum.SeekStatus; import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
@ -499,7 +499,7 @@ final class OrdsSegmentTermsEnumFrame {
// stats // stats
state.docFreq = statsReader.readVInt(); state.docFreq = statsReader.readVInt();
//if (DEBUG) System.out.println(" dF=" + state.docFreq); //if (DEBUG) System.out.println(" dF=" + state.docFreq);
if (ste.fr.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (ste.fr.fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
state.totalTermFreq = state.docFreq + statsReader.readVLong(); state.totalTermFreq = state.docFreq + statsReader.readVLong();
//if (DEBUG) System.out.println(" totTF=" + state.totalTermFreq); //if (DEBUG) System.out.println(" totTF=" + state.totalTermFreq);
} }

View File

@ -29,9 +29,9 @@ import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat; // javadocs import org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat; // javadocs
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.Fields; import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.OrdTermState; import org.apache.lucene.index.OrdTermState;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SegmentWriteState;
@ -327,7 +327,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
this.minSkipCount = minSkipCount; this.minSkipCount = minSkipCount;
hasFreq = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_ONLY) > 0; hasFreq = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS) > 0;
hasPos = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) > 0; hasPos = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) > 0;
hasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) > 0; hasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) > 0;
hasPayloads = fieldInfo.hasPayloads(); hasPayloads = fieldInfo.hasPayloads();

View File

@ -29,7 +29,7 @@ import java.util.TreeMap;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
@ -110,7 +110,7 @@ public class FSTOrdTermsReader extends FieldsProducer {
final int numFields = blockIn.readVInt(); final int numFields = blockIn.readVInt();
for (int i = 0; i < numFields; i++) { for (int i = 0; i < numFields; i++) {
FieldInfo fieldInfo = fieldInfos.fieldInfo(blockIn.readVInt()); FieldInfo fieldInfo = fieldInfos.fieldInfo(blockIn.readVInt());
boolean hasFreq = fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY; boolean hasFreq = fieldInfo.getIndexOptions() != IndexOptions.DOCS;
long numTerms = blockIn.readVLong(); long numTerms = blockIn.readVLong();
long sumTotalTermFreq = hasFreq ? blockIn.readVLong() : -1; long sumTotalTermFreq = hasFreq ? blockIn.readVLong() : -1;
long sumDocFreq = blockIn.readVLong(); long sumDocFreq = blockIn.readVLong();

View File

@ -25,11 +25,11 @@ import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsConsumer; import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.PostingsWriterBase; import org.apache.lucene.codecs.PostingsWriterBase;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields; import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
@ -231,7 +231,7 @@ public class FSTOrdTermsWriter extends FieldsConsumer {
for (FieldMetaData field : fields) { for (FieldMetaData field : fields) {
blockOut.writeVInt(field.fieldInfo.number); blockOut.writeVInt(field.fieldInfo.number);
blockOut.writeVLong(field.numTerms); blockOut.writeVLong(field.numTerms);
if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
blockOut.writeVLong(field.sumTotalTermFreq); blockOut.writeVLong(field.sumTotalTermFreq);
} }
blockOut.writeVLong(field.sumDocFreq); blockOut.writeVLong(field.sumDocFreq);
@ -335,7 +335,7 @@ public class FSTOrdTermsWriter extends FieldsConsumer {
if (delta == 0) { if (delta == 0) {
statsOut.writeVInt(state.docFreq<<1|1); statsOut.writeVInt(state.docFreq<<1|1);
} else { } else {
statsOut.writeVInt(state.docFreq<<1|0); statsOut.writeVInt(state.docFreq<<1);
statsOut.writeVLong(state.totalTermFreq-state.docFreq); statsOut.writeVLong(state.totalTermFreq-state.docFreq);
} }
} else { } else {

View File

@ -22,7 +22,7 @@ import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.DataOutput;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
@ -129,7 +129,7 @@ class FSTTermOutputs extends Outputs<FSTTermOutputs.TermData> {
} }
protected FSTTermOutputs(FieldInfo fieldInfo, int longsSize) { protected FSTTermOutputs(FieldInfo fieldInfo, int longsSize) {
this.hasPos = (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY); this.hasPos = fieldInfo.getIndexOptions() != IndexOptions.DOCS;
this.longsSize = longsSize; this.longsSize = longsSize;
} }

View File

@ -28,7 +28,7 @@ import java.util.TreeMap;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
@ -95,7 +95,7 @@ public class FSTTermsReader extends FieldsProducer {
int fieldNumber = in.readVInt(); int fieldNumber = in.readVInt();
FieldInfo fieldInfo = fieldInfos.fieldInfo(fieldNumber); FieldInfo fieldInfo = fieldInfos.fieldInfo(fieldNumber);
long numTerms = in.readVLong(); long numTerms = in.readVLong();
long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY ? -1 : in.readVLong(); long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS ? -1 : in.readVLong();
long sumDocFreq = in.readVLong(); long sumDocFreq = in.readVLong();
int docCount = in.readVInt(); int docCount = in.readVInt();
int longsSize = in.readVInt(); int longsSize = in.readVInt();

View File

@ -25,7 +25,7 @@ import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsConsumer; import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.PostingsWriterBase; import org.apache.lucene.codecs.PostingsWriterBase;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields; import org.apache.lucene.index.Fields;
@ -204,7 +204,7 @@ public class FSTTermsWriter extends FieldsConsumer {
for (FieldMetaData field : fields) { for (FieldMetaData field : fields) {
out.writeVInt(field.fieldInfo.number); out.writeVInt(field.fieldInfo.number);
out.writeVLong(field.numTerms); out.writeVLong(field.numTerms);
if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
out.writeVLong(field.sumTotalTermFreq); out.writeVLong(field.sumTotalTermFreq);
} }
out.writeVLong(field.sumDocFreq); out.writeVLong(field.sumDocFreq);

View File

@ -33,7 +33,7 @@ import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields; import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
@ -145,7 +145,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
lastDocID = docID; lastDocID = docID;
docCount++; docCount++;
if (field.getIndexOptions() == IndexOptions.DOCS_ONLY) { if (field.getIndexOptions() == IndexOptions.DOCS) {
buffer.writeVInt(delta); buffer.writeVInt(delta);
} else if (termDocFreq == 1) { } else if (termDocFreq == 1) {
buffer.writeVInt((delta<<1) | 1); buffer.writeVInt((delta<<1) | 1);
@ -232,7 +232,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
assert buffer2.getFilePointer() == 0; assert buffer2.getFilePointer() == 0;
buffer2.writeVInt(stats.docFreq); buffer2.writeVInt(stats.docFreq);
if (field.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (field.getIndexOptions() != IndexOptions.DOCS) {
buffer2.writeVLong(stats.totalTermFreq-stats.docFreq); buffer2.writeVLong(stats.totalTermFreq-stats.docFreq);
} }
int pos = (int) buffer2.getFilePointer(); int pos = (int) buffer2.getFilePointer();
@ -262,7 +262,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
if (termCount > 0) { if (termCount > 0) {
out.writeVInt(termCount); out.writeVInt(termCount);
out.writeVInt(field.number); out.writeVInt(field.number);
if (field.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (field.getIndexOptions() != IndexOptions.DOCS) {
out.writeVLong(sumTotalTermFreq); out.writeVLong(sumTotalTermFreq);
} }
out.writeVLong(sumDocFreq); out.writeVLong(sumDocFreq);
@ -470,7 +470,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
return docID = NO_MORE_DOCS; return docID = NO_MORE_DOCS;
} }
docUpto++; docUpto++;
if (indexOptions == IndexOptions.DOCS_ONLY) { if (indexOptions == IndexOptions.DOCS) {
accum += in.readVInt(); accum += in.readVInt();
} else { } else {
final int code = in.readVInt(); final int code = in.readVInt();
@ -754,7 +754,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
if (!didDecode) { if (!didDecode) {
buffer.reset(current.output.bytes, current.output.offset, current.output.length); buffer.reset(current.output.bytes, current.output.offset, current.output.length);
docFreq = buffer.readVInt(); docFreq = buffer.readVInt();
if (field.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (field.getIndexOptions() != IndexOptions.DOCS) {
totalTermFreq = docFreq + buffer.readVLong(); totalTermFreq = docFreq + buffer.readVLong();
} else { } else {
totalTermFreq = -1; totalTermFreq = -1;
@ -896,7 +896,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
field = fieldInfos.fieldInfo(fieldNumber); field = fieldInfos.fieldInfo(fieldNumber);
if (field == null) { if (field == null) {
throw new CorruptIndexException("invalid field number: " + fieldNumber, in); throw new CorruptIndexException("invalid field number: " + fieldNumber, in);
} else if (field.getIndexOptions() != IndexOptions.DOCS_ONLY) { } else if (field.getIndexOptions() != IndexOptions.DOCS) {
sumTotalTermFreq = in.readVLong(); sumTotalTermFreq = in.readVLong();
} else { } else {
sumTotalTermFreq = -1; sumTotalTermFreq = -1;

View File

@ -17,16 +17,6 @@ package org.apache.lucene.codecs.simpletext;
* limitations under the License. * limitations under the License.
*/ */
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.END;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.FIELD;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.LENGTH;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.MAXLENGTH;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.MINVALUE;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.NUMVALUES;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.ORDPATTERN;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.PATTERN;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.TYPE;
import java.io.IOException; import java.io.IOException;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
@ -43,8 +33,8 @@ import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
@ -61,6 +51,16 @@ import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.StringHelper;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.END;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.FIELD;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.LENGTH;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.MAXLENGTH;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.MINVALUE;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.NUMVALUES;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.ORDPATTERN;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.PATTERN;
import static org.apache.lucene.codecs.simpletext.SimpleTextDocValuesWriter.TYPE;
class SimpleTextDocValuesReader extends DocValuesProducer { class SimpleTextDocValuesReader extends DocValuesProducer {
private static final long BASE_RAM_BYTES_USED = private static final long BASE_RAM_BYTES_USED =
@ -104,7 +104,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
assert startsWith(TYPE) : scratch.get().utf8ToString(); assert startsWith(TYPE) : scratch.get().utf8ToString();
DocValuesType dvType = DocValuesType.valueOf(stripPrefix(TYPE)); DocValuesType dvType = DocValuesType.valueOf(stripPrefix(TYPE));
assert dvType != null; assert dvType != DocValuesType.NO;
if (dvType == DocValuesType.NUMERIC) { if (dvType == DocValuesType.NUMERIC) {
readLine(); readLine();
assert startsWith(MINVALUE): "got " + scratch.get().utf8ToString() + " field=" + fieldName + " ext=" + ext; assert startsWith(MINVALUE): "got " + scratch.get().utf8ToString() + " field=" + fieldName + " ext=" + ext;

View File

@ -30,7 +30,7 @@ import org.apache.lucene.codecs.DocValuesConsumer;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.FieldInfo.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
@ -71,8 +71,8 @@ class SimpleTextDocValuesWriter extends DocValuesConsumer {
@Override @Override
public void addNumericField(FieldInfo field, Iterable<Number> values) throws IOException { public void addNumericField(FieldInfo field, Iterable<Number> values) throws IOException {
assert fieldSeen(field.name); assert fieldSeen(field.name);
assert field.getDocValuesType() == FieldInfo.DocValuesType.NUMERIC || field.hasNorms(); assert field.getDocValuesType() == DocValuesType.NUMERIC || field.hasNorms();
writeFieldEntry(field, FieldInfo.DocValuesType.NUMERIC); writeFieldEntry(field, DocValuesType.NUMERIC);
// first pass to find min/max // first pass to find min/max
long minValue = Long.MAX_VALUE; long minValue = Long.MAX_VALUE;
@ -145,7 +145,7 @@ class SimpleTextDocValuesWriter extends DocValuesConsumer {
final int length = value == null ? 0 : value.length; final int length = value == null ? 0 : value.length;
maxLength = Math.max(maxLength, length); maxLength = Math.max(maxLength, length);
} }
writeFieldEntry(field, FieldInfo.DocValuesType.BINARY); writeFieldEntry(field, DocValuesType.BINARY);
// write maxLength // write maxLength
SimpleTextUtil.write(data, MAXLENGTH); SimpleTextUtil.write(data, MAXLENGTH);
@ -198,7 +198,7 @@ class SimpleTextDocValuesWriter extends DocValuesConsumer {
public void addSortedField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrd) throws IOException { public void addSortedField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrd) throws IOException {
assert fieldSeen(field.name); assert fieldSeen(field.name);
assert field.getDocValuesType() == DocValuesType.SORTED; assert field.getDocValuesType() == DocValuesType.SORTED;
writeFieldEntry(field, FieldInfo.DocValuesType.SORTED); writeFieldEntry(field, DocValuesType.SORTED);
int valueCount = 0; int valueCount = 0;
int maxLength = -1; int maxLength = -1;
@ -317,7 +317,7 @@ class SimpleTextDocValuesWriter extends DocValuesConsumer {
public void addSortedSetField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrdCount, Iterable<Number> ords) throws IOException { public void addSortedSetField(FieldInfo field, Iterable<BytesRef> values, Iterable<Number> docToOrdCount, Iterable<Number> ords) throws IOException {
assert fieldSeen(field.name); assert fieldSeen(field.name);
assert field.getDocValuesType() == DocValuesType.SORTED_SET; assert field.getDocValuesType() == DocValuesType.SORTED_SET;
writeFieldEntry(field, FieldInfo.DocValuesType.SORTED_SET); writeFieldEntry(field, DocValuesType.SORTED_SET);
long valueCount = 0; long valueCount = 0;
int maxLength = 0; int maxLength = 0;
@ -423,7 +423,7 @@ class SimpleTextDocValuesWriter extends DocValuesConsumer {
} }
/** write the header for this field */ /** write the header for this field */
private void writeFieldEntry(FieldInfo field, FieldInfo.DocValuesType type) throws IOException { private void writeFieldEntry(FieldInfo field, DocValuesType type) throws IOException {
SimpleTextUtil.write(data, FIELD); SimpleTextUtil.write(data, FIELD);
SimpleTextUtil.write(data, field.name, scratch); SimpleTextUtil.write(data, field.name, scratch);
SimpleTextUtil.writeNewline(data); SimpleTextUtil.writeNewline(data);

View File

@ -24,12 +24,12 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FieldInfosFormat;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
@ -88,15 +88,10 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
assert StringHelper.startsWith(scratch.get(), NUMBER); assert StringHelper.startsWith(scratch.get(), NUMBER);
int fieldNumber = Integer.parseInt(readString(NUMBER.length, scratch)); int fieldNumber = Integer.parseInt(readString(NUMBER.length, scratch));
final IndexOptions indexOptions;
SimpleTextUtil.readLine(input, scratch); SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), INDEXOPTIONS); assert StringHelper.startsWith(scratch.get(), INDEXOPTIONS);
String s = readString(INDEXOPTIONS.length, scratch); String s = readString(INDEXOPTIONS.length, scratch);
if ("null".equals(s)) { final IndexOptions indexOptions = IndexOptions.valueOf(s);
indexOptions = null;
} else {
indexOptions = IndexOptions.valueOf(s);
}
SimpleTextUtil.readLine(input, scratch); SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), STORETV); assert StringHelper.startsWith(scratch.get(), STORETV);
@ -154,11 +149,7 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
} }
public DocValuesType docValuesType(String dvType) { public DocValuesType docValuesType(String dvType) {
if ("false".equals(dvType)) { return DocValuesType.valueOf(dvType);
return null;
} else {
return DocValuesType.valueOf(dvType);
}
} }
private String readString(int offset, BytesRefBuilder scratch) { private String readString(int offset, BytesRefBuilder scratch) {
@ -187,12 +178,8 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
SimpleTextUtil.write(out, INDEXOPTIONS); SimpleTextUtil.write(out, INDEXOPTIONS);
IndexOptions indexOptions = fi.getIndexOptions(); IndexOptions indexOptions = fi.getIndexOptions();
if (indexOptions != null) { assert indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.hasPayloads();
assert fi.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.hasPayloads(); SimpleTextUtil.write(out, indexOptions.toString(), scratch);
SimpleTextUtil.write(out, fi.getIndexOptions().toString(), scratch);
} else {
SimpleTextUtil.write(out, "null", scratch);
}
SimpleTextUtil.writeNewline(out); SimpleTextUtil.writeNewline(out);
SimpleTextUtil.write(out, STORETV); SimpleTextUtil.write(out, STORETV);
@ -245,6 +232,6 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
} }
private static String getDocValuesType(DocValuesType type) { private static String getDocValuesType(DocValuesType type) {
return type == null ? "false" : type.toString(); return type.toString();
} }
} }

View File

@ -17,16 +17,6 @@ package org.apache.lucene.codecs.simpletext;
* limitations under the License. * limitations under the License.
*/ */
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.DOC;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.END;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.END_OFFSET;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.FIELD;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.FREQ;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.PAYLOAD;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.POS;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.START_OFFSET;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.TERM;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Collections; import java.util.Collections;
@ -39,8 +29,8 @@ import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
@ -57,11 +47,9 @@ import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.fst.Builder; import org.apache.lucene.util.fst.Builder;
import org.apache.lucene.util.fst.BytesRefFSTEnum; import org.apache.lucene.util.fst.BytesRefFSTEnum;
import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.FST;
@ -69,6 +57,16 @@ import org.apache.lucene.util.fst.PairOutputs;
import org.apache.lucene.util.fst.PositiveIntOutputs; import org.apache.lucene.util.fst.PositiveIntOutputs;
import org.apache.lucene.util.fst.Util; import org.apache.lucene.util.fst.Util;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.DOC;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.END;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.END_OFFSET;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.FIELD;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.FREQ;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.PAYLOAD;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.POS;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.START_OFFSET;
import static org.apache.lucene.codecs.simpletext.SimpleTextFieldsWriter.TERM;
class SimpleTextFieldsReader extends FieldsProducer { class SimpleTextFieldsReader extends FieldsProducer {
private static final long BASE_RAM_BYTES_USED = private static final long BASE_RAM_BYTES_USED =
@ -205,7 +203,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
@Override @Override
public long totalTermFreq() { public long totalTermFreq() {
return indexOptions == IndexOptions.DOCS_ONLY ? -1 : totalTermFreq; return indexOptions == IndexOptions.DOCS ? -1 : totalTermFreq;
} }
@Override @Override
@ -216,7 +214,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
} else { } else {
docsEnum = new SimpleTextDocsEnum(); docsEnum = new SimpleTextDocsEnum();
} }
return docsEnum.reset(docsStart, liveDocs, indexOptions == IndexOptions.DOCS_ONLY, docFreq); return docsEnum.reset(docsStart, liveDocs, indexOptions == IndexOptions.DOCS, docFreq);
} }
@Override @Override
@ -626,7 +624,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
@Override @Override
public long getSumTotalTermFreq() { public long getSumTotalTermFreq() {
return fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY ? -1 : sumTotalTermFreq; return fieldInfo.getIndexOptions() == IndexOptions.DOCS ? -1 : sumTotalTermFreq;
} }
@Override @Override

View File

@ -28,11 +28,11 @@ import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.FilteredTermsEnum;
import org.apache.lucene.index.MergeState; import org.apache.lucene.index.MergeState;
import org.apache.lucene.index.FieldInfo.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.MultiDocValues.OrdinalMap; import org.apache.lucene.index.MultiDocValues.OrdinalMap;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SegmentWriteState; // javadocs
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.SortedSetDocValues;
@ -138,7 +138,7 @@ public abstract class DocValuesConsumer implements Closeable {
for (FieldInfo mergeFieldInfo : mergeState.mergeFieldInfos) { for (FieldInfo mergeFieldInfo : mergeState.mergeFieldInfos) {
DocValuesType type = mergeFieldInfo.getDocValuesType(); DocValuesType type = mergeFieldInfo.getDocValuesType();
if (type != null) { if (type != DocValuesType.NO) {
if (type == DocValuesType.NUMERIC) { if (type == DocValuesType.NUMERIC) {
List<NumericDocValues> toMerge = new ArrayList<>(); List<NumericDocValues> toMerge = new ArrayList<>();
List<Bits> docsWithField = new ArrayList<>(); List<Bits> docsWithField = new ArrayList<>();

View File

@ -21,8 +21,8 @@ import java.io.IOException;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;

View File

@ -28,9 +28,9 @@ import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsProducer; import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsReaderBase; import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
@ -170,7 +170,7 @@ public final class BlockTreeTermsReader extends FieldsProducer {
if (fieldInfo == null) { if (fieldInfo == null) {
throw new CorruptIndexException("invalid field number: " + field, termsIn); throw new CorruptIndexException("invalid field number: " + field, termsIn);
} }
final long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY ? -1 : termsIn.readVLong(); final long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS ? -1 : termsIn.readVLong();
final long sumDocFreq = termsIn.readVLong(); final long sumDocFreq = termsIn.readVLong();
final int docCount = termsIn.readVInt(); final int docCount = termsIn.readVInt();
final int longsSize = termsIn.readVInt(); final int longsSize = termsIn.readVInt();

View File

@ -25,11 +25,11 @@ import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsConsumer; import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.PostingsWriterBase; import org.apache.lucene.codecs.PostingsWriterBase;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields; import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
@ -695,7 +695,7 @@ public final class BlockTreeTermsWriter extends FieldsConsumer {
// Write term stats, to separate byte[] blob: // Write term stats, to separate byte[] blob:
statsWriter.writeVInt(state.docFreq); statsWriter.writeVInt(state.docFreq);
if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
assert state.totalTermFreq >= state.docFreq: state.totalTermFreq + " vs " + state.docFreq; assert state.totalTermFreq >= state.docFreq: state.totalTermFreq + " vs " + state.docFreq;
statsWriter.writeVLong(state.totalTermFreq - state.docFreq); statsWriter.writeVLong(state.totalTermFreq - state.docFreq);
} }
@ -736,7 +736,7 @@ public final class BlockTreeTermsWriter extends FieldsConsumer {
// Write term stats, to separate byte[] blob: // Write term stats, to separate byte[] blob:
statsWriter.writeVInt(state.docFreq); statsWriter.writeVInt(state.docFreq);
if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
assert state.totalTermFreq >= state.docFreq; assert state.totalTermFreq >= state.docFreq;
statsWriter.writeVLong(state.totalTermFreq - state.docFreq); statsWriter.writeVLong(state.totalTermFreq - state.docFreq);
} }
@ -824,6 +824,7 @@ public final class BlockTreeTermsWriter extends FieldsConsumer {
TermsWriter(FieldInfo fieldInfo) { TermsWriter(FieldInfo fieldInfo) {
this.fieldInfo = fieldInfo; this.fieldInfo = fieldInfo;
assert fieldInfo.getIndexOptions() != IndexOptions.NO;
docsSeen = new FixedBitSet(maxDoc); docsSeen = new FixedBitSet(maxDoc);
this.longsSize = postingsWriter.setField(fieldInfo); this.longsSize = postingsWriter.setField(fieldInfo);
@ -843,7 +844,7 @@ public final class BlockTreeTermsWriter extends FieldsConsumer {
BlockTermState state = postingsWriter.writeTerm(text, termsEnum, docsSeen); BlockTermState state = postingsWriter.writeTerm(text, termsEnum, docsSeen);
if (state != null) { if (state != null) {
assert state.docFreq != 0; assert state.docFreq != 0;
assert fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY || state.totalTermFreq >= state.docFreq: "postingsWriter=" + postingsWriter; assert fieldInfo.getIndexOptions() == IndexOptions.DOCS || state.totalTermFreq >= state.docFreq: "postingsWriter=" + postingsWriter;
sumDocFreq += state.docFreq; sumDocFreq += state.docFreq;
sumTotalTermFreq += state.totalTermFreq; sumTotalTermFreq += state.totalTermFreq;
pushTerm(text); pushTerm(text);
@ -944,7 +945,7 @@ public final class BlockTreeTermsWriter extends FieldsConsumer {
longsSize, longsSize,
minTerm, maxTerm)); minTerm, maxTerm));
} else { } else {
assert sumTotalTermFreq == 0 || fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY && sumTotalTermFreq == -1; assert sumTotalTermFreq == 0 || fieldInfo.getIndexOptions() == IndexOptions.DOCS && sumTotalTermFreq == -1;
assert sumDocFreq == 0; assert sumDocFreq == 0;
assert docsSeen.cardinality() == 0; assert docsSeen.cardinality() == 0;
} }
@ -974,7 +975,8 @@ public final class BlockTreeTermsWriter extends FieldsConsumer {
termsOut.writeVLong(field.numTerms); termsOut.writeVLong(field.numTerms);
termsOut.writeVInt(field.rootCode.length); termsOut.writeVInt(field.rootCode.length);
termsOut.writeBytes(field.rootCode.bytes, field.rootCode.offset, field.rootCode.length); termsOut.writeBytes(field.rootCode.bytes, field.rootCode.offset, field.rootCode.length);
if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { assert field.fieldInfo.getIndexOptions() != IndexOptions.NO;
if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
termsOut.writeVLong(field.sumTotalTermFreq); termsOut.writeVLong(field.sumTotalTermFreq);
} }
termsOut.writeVLong(field.sumDocFreq); termsOut.writeVLong(field.sumDocFreq);

View File

@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataInput;

View File

@ -21,7 +21,7 @@ import java.io.IOException;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;

View File

@ -20,7 +20,7 @@ package org.apache.lucene.codecs.blocktree;
import java.io.IOException; import java.io.IOException;
import org.apache.lucene.codecs.BlockTermState; import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -278,7 +278,7 @@ final class IntersectTermsEnumFrame {
// stats // stats
termState.docFreq = statsReader.readVInt(); termState.docFreq = statsReader.readVInt();
//if (DEBUG) System.out.println(" dF=" + state.docFreq); //if (DEBUG) System.out.println(" dF=" + state.docFreq);
if (ite.fr.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (ite.fr.fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
termState.totalTermFreq = termState.docFreq + statsReader.readVLong(); termState.totalTermFreq = termState.docFreq + statsReader.readVLong();
//if (DEBUG) System.out.println(" totTF=" + state.totalTermFreq); //if (DEBUG) System.out.println(" totTF=" + state.totalTermFreq);
} }

View File

@ -23,7 +23,7 @@ import java.io.PrintStream;
import org.apache.lucene.codecs.BlockTermState; import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataInput;

View File

@ -20,7 +20,7 @@ package org.apache.lucene.codecs.blocktree;
import java.io.IOException; import java.io.IOException;
import org.apache.lucene.codecs.BlockTermState; import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.TermsEnum.SeekStatus; import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
@ -403,7 +403,7 @@ final class SegmentTermsEnumFrame {
// stats // stats
state.docFreq = statsReader.readVInt(); state.docFreq = statsReader.readVInt();
//if (DEBUG) System.out.println(" dF=" + state.docFreq); //if (DEBUG) System.out.println(" dF=" + state.docFreq);
if (ste.fr.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { if (ste.fr.fieldInfo.getIndexOptions() != IndexOptions.DOCS) {
state.totalTermFreq = state.docFreq + statsReader.readVLong(); state.totalTermFreq = state.docFreq + statsReader.readVLong();
//if (DEBUG) System.out.println(" totTF=" + state.totalTermFreq); //if (DEBUG) System.out.println(" totTF=" + state.totalTermFreq);
} }

View File

@ -21,11 +21,11 @@ import java.io.IOException;
import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesConsumer;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.DataOutput;
import org.apache.lucene.util.SmallFloat; import org.apache.lucene.util.SmallFloat;
import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.FST;

View File

@ -25,12 +25,12 @@ import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FieldInfosFormat;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
@ -159,35 +159,33 @@ public final class Lucene50FieldInfosFormat extends FieldInfosFormat {
static { static {
// We "mirror" DocValues enum values with the constants below; let's try to ensure if we add a new DocValuesType while this format is // We "mirror" DocValues enum values with the constants below; let's try to ensure if we add a new DocValuesType while this format is
// still used for writing, we remember to fix this encoding: // still used for writing, we remember to fix this encoding:
assert DocValuesType.values().length == 5; assert DocValuesType.values().length == 6;
} }
private static byte docValuesByte(DocValuesType type) { private static byte docValuesByte(DocValuesType type) {
if (type == null) { switch(type) {
case NO:
return 0; return 0;
} else { case NUMERIC:
switch(type) { return 1;
case NUMERIC: case BINARY:
return 1; return 2;
case BINARY: case SORTED:
return 2; return 3;
case SORTED: case SORTED_SET:
return 3; return 4;
case SORTED_SET: case SORTED_NUMERIC:
return 4; return 5;
case SORTED_NUMERIC: default:
return 5; // BUG
default: throw new AssertionError("unhandled DocValuesType: " + type);
// BUG
throw new AssertionError("unhandled DocValuesType: " + type);
}
} }
} }
private static DocValuesType getDocValuesType(IndexInput input, byte b) throws IOException { private static DocValuesType getDocValuesType(IndexInput input, byte b) throws IOException {
switch(b) { switch(b) {
case 0: case 0:
return null; return DocValuesType.NO;
case 1: case 1:
return DocValuesType.NUMERIC; return DocValuesType.NUMERIC;
case 2: case 2:
@ -206,35 +204,33 @@ public final class Lucene50FieldInfosFormat extends FieldInfosFormat {
static { static {
// We "mirror" IndexOptions enum values with the constants below; let's try to ensure if we add a new IndexOption while this format is // We "mirror" IndexOptions enum values with the constants below; let's try to ensure if we add a new IndexOption while this format is
// still used for writing, we remember to fix this encoding: // still used for writing, we remember to fix this encoding:
assert IndexOptions.values().length == 4; assert IndexOptions.values().length == 5;
} }
private static byte indexOptionsByte(IndexOptions indexOptions) { private static byte indexOptionsByte(IndexOptions indexOptions) {
if (indexOptions == null) { switch (indexOptions) {
case NO:
return 0; return 0;
} else { case DOCS:
switch (indexOptions) { return 1;
case DOCS_ONLY: case DOCS_AND_FREQS:
return 1; return 2;
case DOCS_AND_FREQS: case DOCS_AND_FREQS_AND_POSITIONS:
return 2; return 3;
case DOCS_AND_FREQS_AND_POSITIONS: case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS:
return 3; return 4;
case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS: default:
return 4; // BUG:
default: throw new AssertionError("unhandled IndexOptions: " + indexOptions);
// BUG:
throw new AssertionError("unhandled IndexOptions: " + indexOptions);
}
} }
} }
private static IndexOptions getIndexOptions(IndexInput input, byte b) throws IOException { private static IndexOptions getIndexOptions(IndexInput input, byte b) throws IOException {
switch (b) { switch (b) {
case 0: case 0:
return null; return IndexOptions.NO;
case 1: case 1:
return IndexOptions.DOCS_ONLY; return IndexOptions.DOCS;
case 2: case 2:
return IndexOptions.DOCS_AND_FREQS; return IndexOptions.DOCS_AND_FREQS;
case 3: case 3:

View File

@ -31,7 +31,7 @@ import org.apache.lucene.codecs.PostingsWriterBase;
import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader; import org.apache.lucene.codecs.blocktree.BlockTreeTermsReader;
import org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter; import org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermState;
@ -186,7 +186,7 @@ import org.apache.lucene.util.packed.PackedInts;
* *
* <p>The .doc file contains the lists of documents which contain each term, along * <p>The .doc file contains the lists of documents which contain each term, along
* with the frequency of the term in that document (except when frequencies are * with the frequency of the term in that document (except when frequencies are
* omitted: {@link IndexOptions#DOCS_ONLY}). It also saves skip data to the beginning of * omitted: {@link IndexOptions#DOCS}). It also saves skip data to the beginning of
* each packed or VInt block, when the length of document list is larger than packed block size.</p> * each packed or VInt block, when the length of document list is larger than packed block size.</p>
* *
* <ul> * <ul>
@ -232,7 +232,7 @@ import org.apache.lucene.util.packed.PackedInts;
* and three times in document eleven, with frequencies indexed, would be the * and three times in document eleven, with frequencies indexed, would be the
* following sequence of VInts:</p> * following sequence of VInts:</p>
* <p>15, 8, 3</p> * <p>15, 8, 3</p>
* <p>If frequencies were omitted ({@link IndexOptions#DOCS_ONLY}) it would be this * <p>If frequencies were omitted ({@link IndexOptions#DOCS}) it would be this
* sequence of VInts instead:</p> * sequence of VInts instead:</p>
* <p>7,4</p> * <p>7,4</p>
* </li> * </li>

View File

@ -17,17 +17,6 @@ package org.apache.lucene.codecs.lucene50;
* limitations under the License. * limitations under the License.
*/ */
import static org.apache.lucene.codecs.lucene50.ForUtil.MAX_DATA_SIZE;
import static org.apache.lucene.codecs.lucene50.ForUtil.MAX_ENCODED_SIZE;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.BLOCK_SIZE;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.DOC_CODEC;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.MAX_SKIP_LEVELS;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.PAY_CODEC;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.POS_CODEC;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.TERMS_CODEC;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.VERSION_CURRENT;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.VERSION_START;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -39,8 +28,8 @@ import org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.IntBlockTermStat
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
@ -51,6 +40,17 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
import static org.apache.lucene.codecs.lucene50.ForUtil.MAX_DATA_SIZE;
import static org.apache.lucene.codecs.lucene50.ForUtil.MAX_ENCODED_SIZE;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.BLOCK_SIZE;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.DOC_CODEC;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.MAX_SKIP_LEVELS;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.PAY_CODEC;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.POS_CODEC;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.TERMS_CODEC;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.VERSION_CURRENT;
import static org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat.VERSION_START;
/** /**
* Concrete class that reads docId(maybe frq,pos,offset,payloads) list * Concrete class that reads docId(maybe frq,pos,offset,payloads) list
* with postings format. * with postings format.

View File

@ -18,7 +18,7 @@ package org.apache.lucene.document;
*/ */
import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
/** /**
@ -46,7 +46,7 @@ public class BinaryDocValuesField extends Field {
*/ */
public static final FieldType TYPE = new FieldType(); public static final FieldType TYPE = new FieldType();
static { static {
TYPE.setDocValueType(FieldInfo.DocValuesType.BINARY); TYPE.setDocValueType(DocValuesType.BINARY);
TYPE.freeze(); TYPE.freeze();
} }

View File

@ -19,7 +19,9 @@ package org.apache.lucene.document;
import java.util.*; import java.util.*;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexDocument; import org.apache.lucene.index.IndexDocument;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader; // for javadoc import org.apache.lucene.index.IndexReader; // for javadoc
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField; import org.apache.lucene.index.StorableField;
@ -310,7 +312,7 @@ public final class Document implements IndexDocument {
return new FilterIterator<StorableField, Field>(fields.iterator()) { return new FilterIterator<StorableField, Field>(fields.iterator()) {
@Override @Override
protected boolean predicateFunction(Field field) { protected boolean predicateFunction(Field field) {
return field.type.stored() || field.type.docValueType() != null; return field.type.stored() || field.type.docValueType() != DocValuesType.NO;
} }
}; };
} }
@ -319,7 +321,7 @@ public final class Document implements IndexDocument {
return new FilterIterator<IndexableField, Field>(fields.iterator()) { return new FilterIterator<IndexableField, Field>(fields.iterator()) {
@Override @Override
protected boolean predicateFunction(Field field) { protected boolean predicateFunction(Field field) {
return field.type.indexOptions() != null; return field.type.indexOptions() != IndexOptions.NO;
} }
}; };
} }

View File

@ -18,7 +18,7 @@ package org.apache.lucene.document;
*/ */
import org.apache.lucene.analysis.NumericTokenStream; // javadocs import org.apache.lucene.analysis.NumericTokenStream; // javadocs
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.NumericRangeFilter; // javadocs import org.apache.lucene.search.NumericRangeFilter; // javadocs
import org.apache.lucene.search.NumericRangeQuery; // javadocs import org.apache.lucene.search.NumericRangeQuery; // javadocs
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -121,7 +121,7 @@ public final class DoubleField extends Field {
static { static {
TYPE_NOT_STORED.setTokenized(true); TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true); TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS_ONLY); TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
TYPE_NOT_STORED.setNumericType(FieldType.NumericType.DOUBLE); TYPE_NOT_STORED.setNumericType(FieldType.NumericType.DOUBLE);
TYPE_NOT_STORED.freeze(); TYPE_NOT_STORED.freeze();
} }
@ -134,7 +134,7 @@ public final class DoubleField extends Field {
static { static {
TYPE_STORED.setTokenized(true); TYPE_STORED.setTokenized(true);
TYPE_STORED.setOmitNorms(true); TYPE_STORED.setOmitNorms(true);
TYPE_STORED.setIndexOptions(IndexOptions.DOCS_ONLY); TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
TYPE_STORED.setNumericType(FieldType.NumericType.DOUBLE); TYPE_STORED.setNumericType(FieldType.NumericType.DOUBLE);
TYPE_STORED.setStored(true); TYPE_STORED.setStored(true);
TYPE_STORED.freeze(); TYPE_STORED.freeze();

View File

@ -26,12 +26,13 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.document.FieldType.NumericType; import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.FieldInvertState; // javadocs
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexWriter; // javadocs import org.apache.lucene.index.IndexWriter; // javadocs
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.index.StorableField; import org.apache.lucene.index.StorableField;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.index.FieldInvertState; // javadocs
/** /**
* Expert: directly create a field for a document. Most * Expert: directly create a field for a document. Most
@ -122,7 +123,7 @@ public class Field implements IndexableField, StorableField {
if (type.stored()) { if (type.stored()) {
throw new IllegalArgumentException("fields with a Reader value cannot be stored"); throw new IllegalArgumentException("fields with a Reader value cannot be stored");
} }
if (type.indexOptions() != null && !type.tokenized()) { if (type.indexOptions() != IndexOptions.NO && !type.tokenized()) {
throw new IllegalArgumentException("non-tokenized fields must use String values"); throw new IllegalArgumentException("non-tokenized fields must use String values");
} }
@ -148,7 +149,7 @@ public class Field implements IndexableField, StorableField {
if (tokenStream == null) { if (tokenStream == null) {
throw new NullPointerException("tokenStream cannot be null"); throw new NullPointerException("tokenStream cannot be null");
} }
if (type.indexOptions() == null || !type.tokenized()) { if (type.indexOptions() == IndexOptions.NO || !type.tokenized()) {
throw new IllegalArgumentException("TokenStream fields must be indexed and tokenized"); throw new IllegalArgumentException("TokenStream fields must be indexed and tokenized");
} }
if (type.stored()) { if (type.stored()) {
@ -214,7 +215,7 @@ public class Field implements IndexableField, StorableField {
if (bytes == null) { if (bytes == null) {
throw new IllegalArgumentException("bytes cannot be null"); throw new IllegalArgumentException("bytes cannot be null");
} }
if (type.indexOptions() != null) { if (type.indexOptions() != IndexOptions.NO) {
throw new IllegalArgumentException("Fields with BytesRef values cannot be indexed"); throw new IllegalArgumentException("Fields with BytesRef values cannot be indexed");
} }
this.fieldsData = bytes; this.fieldsData = bytes;
@ -241,7 +242,7 @@ public class Field implements IndexableField, StorableField {
if (value == null) { if (value == null) {
throw new IllegalArgumentException("value cannot be null"); throw new IllegalArgumentException("value cannot be null");
} }
if (!type.stored() && type.indexOptions() == null) { if (!type.stored() && type.indexOptions() == IndexOptions.NO) {
throw new IllegalArgumentException("it doesn't make sense to have a field that " throw new IllegalArgumentException("it doesn't make sense to have a field that "
+ "is neither indexed nor stored"); + "is neither indexed nor stored");
} }
@ -338,7 +339,7 @@ public class Field implements IndexableField, StorableField {
if (!(fieldsData instanceof BytesRef)) { if (!(fieldsData instanceof BytesRef)) {
throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to BytesRef"); throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to BytesRef");
} }
if (type.indexOptions() != null) { if (type.indexOptions() != IndexOptions.NO) {
throw new IllegalArgumentException("cannot set a BytesRef value on an indexed field"); throw new IllegalArgumentException("cannot set a BytesRef value on an indexed field");
} }
if (value == null) { if (value == null) {
@ -419,7 +420,7 @@ public class Field implements IndexableField, StorableField {
* values from stringValue() or getBinaryValue() * values from stringValue() or getBinaryValue()
*/ */
public void setTokenStream(TokenStream tokenStream) { public void setTokenStream(TokenStream tokenStream) {
if (type.indexOptions() == null || !type.tokenized()) { if (type.indexOptions() == IndexOptions.NO || !type.tokenized()) {
throw new IllegalArgumentException("TokenStream fields must be indexed and tokenized"); throw new IllegalArgumentException("TokenStream fields must be indexed and tokenized");
} }
if (type.numericType() != null) { if (type.numericType() != null) {
@ -452,7 +453,7 @@ public class Field implements IndexableField, StorableField {
*/ */
public void setBoost(float boost) { public void setBoost(float boost) {
if (boost != 1.0f) { if (boost != 1.0f) {
if (type.indexOptions() == null || type.omitNorms()) { if (type.indexOptions() == IndexOptions.NO || type.omitNorms()) {
throw new IllegalArgumentException("You cannot set an index-time boost on an unindexed field, or one that omits norms"); throw new IllegalArgumentException("You cannot set an index-time boost on an unindexed field, or one that omits norms");
} }
} }
@ -502,7 +503,7 @@ public class Field implements IndexableField, StorableField {
@Override @Override
public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) throws IOException { public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) throws IOException {
if (fieldType().indexOptions() == null) { if (fieldType().indexOptions() == IndexOptions.NO) {
// Not indexed // Not indexed
return null; return null;
} }

View File

@ -18,8 +18,8 @@ package org.apache.lucene.document;
*/ */
import org.apache.lucene.analysis.Analyzer; // javadocs import org.apache.lucene.analysis.Analyzer; // javadocs
import org.apache.lucene.index.FieldInfo.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.search.NumericRangeQuery; // javadocs import org.apache.lucene.search.NumericRangeQuery; // javadocs
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -50,11 +50,11 @@ public class FieldType implements IndexableFieldType {
private boolean storeTermVectorPositions; private boolean storeTermVectorPositions;
private boolean storeTermVectorPayloads; private boolean storeTermVectorPayloads;
private boolean omitNorms; private boolean omitNorms;
private IndexOptions indexOptions; private IndexOptions indexOptions = IndexOptions.NO;
private NumericType numericType; private NumericType numericType;
private boolean frozen; private boolean frozen;
private int numericPrecisionStep = NumericUtils.PRECISION_STEP_DEFAULT; private int numericPrecisionStep = NumericUtils.PRECISION_STEP_DEFAULT;
private DocValuesType docValueType; private DocValuesType docValueType = DocValuesType.NO;
/** /**
* Create a new mutable FieldType with all of the properties from <code>ref</code> * Create a new mutable FieldType with all of the properties from <code>ref</code>
@ -263,7 +263,7 @@ public class FieldType implements IndexableFieldType {
* {@inheritDoc} * {@inheritDoc}
* <p> * <p>
* The default is {@link IndexOptions#DOCS_AND_FREQS_AND_POSITIONS}. * The default is {@link IndexOptions#DOCS_AND_FREQS_AND_POSITIONS}.
* @see #setIndexOptions(org.apache.lucene.index.FieldInfo.IndexOptions) * @see #setIndexOptions(IndexOptions)
*/ */
@Override @Override
public IndexOptions indexOptions() { public IndexOptions indexOptions() {
@ -279,6 +279,9 @@ public class FieldType implements IndexableFieldType {
*/ */
public void setIndexOptions(IndexOptions value) { public void setIndexOptions(IndexOptions value) {
checkIfFrozen(); checkIfFrozen();
if (value == null) {
throw new NullPointerException("IndexOptions cannot be null");
}
this.indexOptions = value; this.indexOptions = value;
} }
@ -341,7 +344,7 @@ public class FieldType implements IndexableFieldType {
if (stored()) { if (stored()) {
result.append("stored"); result.append("stored");
} }
if (indexOptions != null) { if (indexOptions != IndexOptions.NO) {
if (result.length() > 0) if (result.length() > 0)
result.append(","); result.append(",");
result.append("indexed"); result.append("indexed");
@ -374,9 +377,10 @@ public class FieldType implements IndexableFieldType {
result.append(numericPrecisionStep); result.append(numericPrecisionStep);
} }
} }
if (docValueType != null) { if (docValueType != DocValuesType.NO) {
if (result.length() > 0) if (result.length() > 0) {
result.append(","); result.append(",");
}
result.append("docValueType="); result.append("docValueType=");
result.append(docValueType); result.append(docValueType);
} }
@ -390,7 +394,7 @@ public class FieldType implements IndexableFieldType {
* {@inheritDoc} * {@inheritDoc}
* <p> * <p>
* The default is <code>null</code> (no docValues) * The default is <code>null</code> (no docValues)
* @see #setDocValueType(org.apache.lucene.index.FieldInfo.DocValuesType) * @see #setDocValueType(DocValuesType)
*/ */
@Override @Override
public DocValuesType docValueType() { public DocValuesType docValueType() {
@ -406,6 +410,9 @@ public class FieldType implements IndexableFieldType {
*/ */
public void setDocValueType(DocValuesType type) { public void setDocValueType(DocValuesType type) {
checkIfFrozen(); checkIfFrozen();
if (type == null) {
throw new NullPointerException("DocValuesType cannot be null");
}
docValueType = type; docValueType = type;
} }
@ -414,7 +421,7 @@ public class FieldType implements IndexableFieldType {
final int prime = 31; final int prime = 31;
int result = 1; int result = 1;
result = prime * result + ((docValueType == null) ? 0 : docValueType.hashCode()); result = prime * result + ((docValueType == null) ? 0 : docValueType.hashCode());
result = prime * result + ((indexOptions == null) ? 0 : indexOptions.hashCode()); result = prime * result + indexOptions.hashCode();
result = prime * result + numericPrecisionStep; result = prime * result + numericPrecisionStep;
result = prime * result + ((numericType == null) ? 0 : numericType.hashCode()); result = prime * result + ((numericType == null) ? 0 : numericType.hashCode());
result = prime * result + (omitNorms ? 1231 : 1237); result = prime * result + (omitNorms ? 1231 : 1237);

View File

@ -18,7 +18,7 @@ package org.apache.lucene.document;
*/ */
import org.apache.lucene.analysis.NumericTokenStream; // javadocs import org.apache.lucene.analysis.NumericTokenStream; // javadocs
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.NumericRangeFilter; // javadocs import org.apache.lucene.search.NumericRangeFilter; // javadocs
import org.apache.lucene.search.NumericRangeQuery; // javadocs import org.apache.lucene.search.NumericRangeQuery; // javadocs
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -121,7 +121,7 @@ public final class FloatField extends Field {
static { static {
TYPE_NOT_STORED.setTokenized(true); TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true); TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS_ONLY); TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
TYPE_NOT_STORED.setNumericType(FieldType.NumericType.FLOAT); TYPE_NOT_STORED.setNumericType(FieldType.NumericType.FLOAT);
TYPE_NOT_STORED.setNumericPrecisionStep(NumericUtils.PRECISION_STEP_DEFAULT_32); TYPE_NOT_STORED.setNumericPrecisionStep(NumericUtils.PRECISION_STEP_DEFAULT_32);
TYPE_NOT_STORED.freeze(); TYPE_NOT_STORED.freeze();
@ -135,7 +135,7 @@ public final class FloatField extends Field {
static { static {
TYPE_STORED.setTokenized(true); TYPE_STORED.setTokenized(true);
TYPE_STORED.setOmitNorms(true); TYPE_STORED.setOmitNorms(true);
TYPE_STORED.setIndexOptions(IndexOptions.DOCS_ONLY); TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
TYPE_STORED.setNumericType(FieldType.NumericType.FLOAT); TYPE_STORED.setNumericType(FieldType.NumericType.FLOAT);
TYPE_STORED.setNumericPrecisionStep(NumericUtils.PRECISION_STEP_DEFAULT_32); TYPE_STORED.setNumericPrecisionStep(NumericUtils.PRECISION_STEP_DEFAULT_32);
TYPE_STORED.setStored(true); TYPE_STORED.setStored(true);

View File

@ -18,7 +18,7 @@ package org.apache.lucene.document;
*/ */
import org.apache.lucene.analysis.NumericTokenStream; // javadocs import org.apache.lucene.analysis.NumericTokenStream; // javadocs
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.NumericRangeFilter; // javadocs import org.apache.lucene.search.NumericRangeFilter; // javadocs
import org.apache.lucene.search.NumericRangeQuery; // javadocs import org.apache.lucene.search.NumericRangeQuery; // javadocs
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -121,7 +121,7 @@ public final class IntField extends Field {
static { static {
TYPE_NOT_STORED.setTokenized(true); TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true); TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS_ONLY); TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
TYPE_NOT_STORED.setNumericType(FieldType.NumericType.INT); TYPE_NOT_STORED.setNumericType(FieldType.NumericType.INT);
TYPE_NOT_STORED.setNumericPrecisionStep(NumericUtils.PRECISION_STEP_DEFAULT_32); TYPE_NOT_STORED.setNumericPrecisionStep(NumericUtils.PRECISION_STEP_DEFAULT_32);
TYPE_NOT_STORED.freeze(); TYPE_NOT_STORED.freeze();
@ -135,7 +135,7 @@ public final class IntField extends Field {
static { static {
TYPE_STORED.setTokenized(true); TYPE_STORED.setTokenized(true);
TYPE_STORED.setOmitNorms(true); TYPE_STORED.setOmitNorms(true);
TYPE_STORED.setIndexOptions(IndexOptions.DOCS_ONLY); TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
TYPE_STORED.setNumericType(FieldType.NumericType.INT); TYPE_STORED.setNumericType(FieldType.NumericType.INT);
TYPE_STORED.setNumericPrecisionStep(NumericUtils.PRECISION_STEP_DEFAULT_32); TYPE_STORED.setNumericPrecisionStep(NumericUtils.PRECISION_STEP_DEFAULT_32);
TYPE_STORED.setStored(true); TYPE_STORED.setStored(true);

View File

@ -18,7 +18,7 @@ package org.apache.lucene.document;
*/ */
import org.apache.lucene.analysis.NumericTokenStream; // javadocs import org.apache.lucene.analysis.NumericTokenStream; // javadocs
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.NumericRangeFilter; // javadocs import org.apache.lucene.search.NumericRangeFilter; // javadocs
import org.apache.lucene.search.NumericRangeQuery; // javadocs import org.apache.lucene.search.NumericRangeQuery; // javadocs
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
@ -131,7 +131,7 @@ public final class LongField extends Field {
static { static {
TYPE_NOT_STORED.setTokenized(true); TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true); TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS_ONLY); TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
TYPE_NOT_STORED.setNumericType(FieldType.NumericType.LONG); TYPE_NOT_STORED.setNumericType(FieldType.NumericType.LONG);
TYPE_NOT_STORED.freeze(); TYPE_NOT_STORED.freeze();
} }
@ -144,7 +144,7 @@ public final class LongField extends Field {
static { static {
TYPE_STORED.setTokenized(true); TYPE_STORED.setTokenized(true);
TYPE_STORED.setOmitNorms(true); TYPE_STORED.setOmitNorms(true);
TYPE_STORED.setIndexOptions(IndexOptions.DOCS_ONLY); TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
TYPE_STORED.setNumericType(FieldType.NumericType.LONG); TYPE_STORED.setNumericType(FieldType.NumericType.LONG);
TYPE_STORED.setStored(true); TYPE_STORED.setStored(true);
TYPE_STORED.freeze(); TYPE_STORED.freeze();

View File

@ -17,7 +17,7 @@ package org.apache.lucene.document;
* limitations under the License. * limitations under the License.
*/ */
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.DocValuesType;
/** /**
* <p> * <p>
@ -40,7 +40,7 @@ public class NumericDocValuesField extends Field {
*/ */
public static final FieldType TYPE = new FieldType(); public static final FieldType TYPE = new FieldType();
static { static {
TYPE.setDocValueType(FieldInfo.DocValuesType.NUMERIC); TYPE.setDocValueType(DocValuesType.NUMERIC);
TYPE.freeze(); TYPE.freeze();
} }

View File

@ -17,7 +17,7 @@ package org.apache.lucene.document;
* limitations under the License. * limitations under the License.
*/ */
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
/** /**
@ -43,7 +43,7 @@ public class SortedDocValuesField extends Field {
*/ */
public static final FieldType TYPE = new FieldType(); public static final FieldType TYPE = new FieldType();
static { static {
TYPE.setDocValueType(FieldInfo.DocValuesType.SORTED); TYPE.setDocValueType(DocValuesType.SORTED);
TYPE.freeze(); TYPE.freeze();
} }

View File

@ -17,7 +17,7 @@ package org.apache.lucene.document;
* limitations under the License. * limitations under the License.
*/ */
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
/** /**
@ -50,7 +50,7 @@ public class SortedNumericDocValuesField extends Field {
*/ */
public static final FieldType TYPE = new FieldType(); public static final FieldType TYPE = new FieldType();
static { static {
TYPE.setDocValueType(FieldInfo.DocValuesType.SORTED_NUMERIC); TYPE.setDocValueType(DocValuesType.SORTED_NUMERIC);
TYPE.freeze(); TYPE.freeze();
} }

View File

@ -17,7 +17,7 @@ package org.apache.lucene.document;
* limitations under the License. * limitations under the License.
*/ */
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
/** /**
@ -44,7 +44,7 @@ public class SortedSetDocValuesField extends Field {
*/ */
public static final FieldType TYPE = new FieldType(); public static final FieldType TYPE = new FieldType();
static { static {
TYPE.setDocValueType(FieldInfo.DocValuesType.SORTED_SET); TYPE.setDocValueType(DocValuesType.SORTED_SET);
TYPE.freeze(); TYPE.freeze();
} }

View File

@ -17,7 +17,7 @@ package org.apache.lucene.document;
* limitations under the License. * limitations under the License.
*/ */
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
/** A field that is indexed but not tokenized: the entire /** A field that is indexed but not tokenized: the entire
* String value is indexed as a single token. For example * String value is indexed as a single token. For example
@ -37,12 +37,12 @@ public final class StringField extends Field {
static { static {
TYPE_NOT_STORED.setOmitNorms(true); TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS_ONLY); TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
TYPE_NOT_STORED.setTokenized(false); TYPE_NOT_STORED.setTokenized(false);
TYPE_NOT_STORED.freeze(); TYPE_NOT_STORED.freeze();
TYPE_STORED.setOmitNorms(true); TYPE_STORED.setOmitNorms(true);
TYPE_STORED.setIndexOptions(IndexOptions.DOCS_ONLY); TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
TYPE_STORED.setStored(true); TYPE_STORED.setStored(true);
TYPE_STORED.setTokenized(false); TYPE_STORED.setTokenized(false);
TYPE_STORED.freeze(); TYPE_STORED.freeze();

View File

@ -20,7 +20,7 @@ package org.apache.lucene.document;
import java.io.Reader; import java.io.Reader;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
/** A field that is indexed and tokenized, without term /** A field that is indexed and tokenized, without term
* vectors. For example this would be used on a 'body' * vectors. For example this would be used on a 'body'

View File

@ -1,15 +1,5 @@
package org.apache.lucene.index; package org.apache.lucene.index;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PagedGrowableWriter;
import org.apache.lucene.util.packed.PagedMutable;
/* /*
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with * contributor license agreements. See the NOTICE file distributed with
@ -27,6 +17,15 @@ import org.apache.lucene.util.packed.PagedMutable;
* limitations under the License. * limitations under the License.
*/ */
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PagedGrowableWriter;
import org.apache.lucene.util.packed.PagedMutable;
/** /**
* A {@link DocValuesFieldUpdates} which holds updates of documents, of a single * A {@link DocValuesFieldUpdates} which holds updates of documents, of a single
* {@link BinaryDocValuesField}. * {@link BinaryDocValuesField}.
@ -102,7 +101,7 @@ class BinaryDocValuesFieldUpdates extends DocValuesFieldUpdates {
private final int bitsPerValue; private final int bitsPerValue;
public BinaryDocValuesFieldUpdates(String field, int maxDoc) { public BinaryDocValuesFieldUpdates(String field, int maxDoc) {
super(field, FieldInfo.DocValuesType.BINARY); super(field, DocValuesType.BINARY);
bitsPerValue = PackedInts.bitsRequired(maxDoc - 1); bitsPerValue = PackedInts.bitsRequired(maxDoc - 1);
docs = new PagedMutable(1, PAGE_SIZE, bitsPerValue, PackedInts.COMPACT); docs = new PagedMutable(1, PAGE_SIZE, bitsPerValue, PackedInts.COMPACT);
offsets = new PagedGrowableWriter(1, PAGE_SIZE, 1, PackedInts.FAST); offsets = new PagedGrowableWriter(1, PAGE_SIZE, 1, PackedInts.FAST);

View File

@ -33,7 +33,6 @@ import java.util.Map;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.index.CheckIndex.Status.DocValuesStatus; import org.apache.lucene.index.CheckIndex.Status.DocValuesStatus;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;

View File

@ -29,8 +29,6 @@ import org.apache.lucene.codecs.NormsConsumer;
import org.apache.lucene.codecs.NormsFormat; import org.apache.lucene.codecs.NormsFormat;
import org.apache.lucene.codecs.StoredFieldsWriter; import org.apache.lucene.codecs.StoredFieldsWriter;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
@ -364,7 +362,10 @@ final class DefaultIndexingChain extends DocConsumer {
} }
DocValuesType dvType = fieldType.docValueType(); DocValuesType dvType = fieldType.docValueType();
if (dvType != null) { if (dvType == null) {
throw new NullPointerException("docValueType cannot be null (field: \"" + fieldName + "\")");
}
if (dvType != DocValuesType.NO) {
indexDocValue(fp, dvType, field); indexDocValue(fp, dvType, field);
} }
} }
@ -378,7 +379,7 @@ final class DefaultIndexingChain extends DocConsumer {
} }
private static void verifyFieldType(String name, IndexableFieldType ft) { private static void verifyFieldType(String name, IndexableFieldType ft) {
if (ft.indexOptions() == null) { if (ft.indexOptions() == IndexOptions.NO) {
if (ft.storeTermVectors()) { if (ft.storeTermVectors()) {
throw new IllegalArgumentException("cannot store term vectors " throw new IllegalArgumentException("cannot store term vectors "
+ "for a field that is not indexed (field=\"" + name + "\")"); + "for a field that is not indexed (field=\"" + name + "\")");

View File

@ -1,12 +1,5 @@
package org.apache.lucene.index; package org.apache.lucene.index;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.index.NumericDocValuesFieldUpdates;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.packed.PagedGrowableWriter;
/* /*
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with * contributor license agreements. See the NOTICE file distributed with
@ -24,6 +17,12 @@ import org.apache.lucene.util.packed.PagedGrowableWriter;
* limitations under the License. * limitations under the License.
*/ */
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.packed.PagedGrowableWriter;
/** /**
* Holds updates of a single DocValues field, for a set of documents. * Holds updates of a single DocValues field, for a set of documents.
* *
@ -98,7 +97,7 @@ abstract class DocValuesFieldUpdates {
return ramBytesPerDoc; return ramBytesPerDoc;
} }
DocValuesFieldUpdates getUpdates(String field, FieldInfo.DocValuesType type) { DocValuesFieldUpdates getUpdates(String field, DocValuesType type) {
switch (type) { switch (type) {
case NUMERIC: case NUMERIC:
return numericDVUpdates.get(field); return numericDVUpdates.get(field);
@ -109,7 +108,7 @@ abstract class DocValuesFieldUpdates {
} }
} }
DocValuesFieldUpdates newUpdates(String field, FieldInfo.DocValuesType type, int maxDoc) { DocValuesFieldUpdates newUpdates(String field, DocValuesType type, int maxDoc) {
switch (type) { switch (type) {
case NUMERIC: case NUMERIC:
assert numericDVUpdates.get(field) == null; assert numericDVUpdates.get(field) == null;
@ -133,10 +132,13 @@ abstract class DocValuesFieldUpdates {
} }
final String field; final String field;
final FieldInfo.DocValuesType type; final DocValuesType type;
protected DocValuesFieldUpdates(String field, FieldInfo.DocValuesType type) { protected DocValuesFieldUpdates(String field, DocValuesType type) {
this.field = field; this.field = field;
if (type == null) {
throw new NullPointerException("DocValuesType cannot be null");
}
this.type = type; this.type = type;
} }

View File

@ -0,0 +1,59 @@
package org.apache.lucene.index;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* DocValues types. Note that DocValues is strongly typed, so a
* field cannot have different types across different documents.
*/
public enum DocValuesType {
/**
* No doc values for this field.
*/
NO,
/**
* A per-document Number
*/
NUMERIC,
/**
* A per-document byte[]. Values may be larger than
* 32766 bytes, but different codecs may enforce their own limits.
*/
BINARY,
/**
* A pre-sorted byte[]. Fields with this type only store distinct byte values
* and store an additional offset pointer per document to dereference the shared
* byte[]. The stored byte[] is presorted and allows access via document id,
* ordinal and by-value. Values must be <= 32766 bytes.
*/
SORTED,
/**
* A pre-sorted Number[]. Fields with this type store numeric values in sorted
* order according to {@link Long#compare(long, long)}.
*/
SORTED_NUMERIC,
/**
* A pre-sorted Set&lt;byte[]&gt;. Fields with this type only store distinct byte values
* and store additional offset pointers per document to dereference the shared
* byte[]s. The stored byte[] is presorted and allows access via document id,
* ordinal and by-value. Values must be <= 32766 bytes.
*/
SORTED_SET,
}

View File

@ -1,15 +1,5 @@
package org.apache.lucene.index; package org.apache.lucene.index;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_ARRAY_HEADER;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_CHAR;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_INT;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_HEADER;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
/* /*
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with * contributor license agreements. See the NOTICE file distributed with
@ -27,6 +17,16 @@ import org.apache.lucene.util.RamUsageEstimator;
* limitations under the License. * limitations under the License.
*/ */
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_ARRAY_HEADER;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_CHAR;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_INT;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_HEADER;
import static org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
/** An in-place update to a DocValues field. */ /** An in-place update to a DocValues field. */
abstract class DocValuesUpdate { abstract class DocValuesUpdate {
@ -39,7 +39,7 @@ abstract class DocValuesUpdate {
*/ */
private static final int RAW_SIZE_IN_BYTES = 8*NUM_BYTES_OBJECT_HEADER + 8*NUM_BYTES_OBJECT_REF + 8*NUM_BYTES_INT; private static final int RAW_SIZE_IN_BYTES = 8*NUM_BYTES_OBJECT_HEADER + 8*NUM_BYTES_OBJECT_REF + 8*NUM_BYTES_INT;
final FieldInfo.DocValuesType type; final DocValuesType type;
final Term term; final Term term;
final String field; final String field;
final Object value; final Object value;
@ -52,7 +52,7 @@ abstract class DocValuesUpdate {
* @param field the {@link NumericDocValuesField} to update * @param field the {@link NumericDocValuesField} to update
* @param value the updated value * @param value the updated value
*/ */
protected DocValuesUpdate(FieldInfo.DocValuesType type, Term term, String field, Object value) { protected DocValuesUpdate(DocValuesType type, Term term, String field, Object value) {
this.type = type; this.type = type;
this.term = term; this.term = term;
this.field = field; this.field = field;
@ -82,7 +82,7 @@ abstract class DocValuesUpdate {
private static final long RAW_VALUE_SIZE_IN_BYTES = NUM_BYTES_ARRAY_HEADER + 2*NUM_BYTES_INT + NUM_BYTES_OBJECT_REF; private static final long RAW_VALUE_SIZE_IN_BYTES = NUM_BYTES_ARRAY_HEADER + 2*NUM_BYTES_INT + NUM_BYTES_OBJECT_REF;
BinaryDocValuesUpdate(Term term, String field, BytesRef value) { BinaryDocValuesUpdate(Term term, String field, BytesRef value) {
super(FieldInfo.DocValuesType.BINARY, term, field, value); super(DocValuesType.BINARY, term, field, value);
} }
@Override @Override
@ -96,7 +96,7 @@ abstract class DocValuesUpdate {
static final class NumericDocValuesUpdate extends DocValuesUpdate { static final class NumericDocValuesUpdate extends DocValuesUpdate {
NumericDocValuesUpdate(Term term, String field, Long value) { NumericDocValuesUpdate(Term term, String field, Long value) {
super(FieldInfo.DocValuesType.NUMERIC, term, field, value); super(DocValuesType.NUMERIC, term, field, value);
} }
@Override @Override

View File

@ -19,7 +19,6 @@ package org.apache.lucene.index;
import java.io.IOException; import java.io.IOException;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.Bits; // javadocs import org.apache.lucene.util.Bits; // javadocs
@ -50,7 +49,7 @@ public abstract class DocsEnum extends DocIdSetIterator {
/** /**
* Returns term frequency in the current document, or 1 if the field was * Returns term frequency in the current document, or 1 if the field was
* indexed with {@link IndexOptions#DOCS_ONLY}. Do not call this before * indexed with {@link IndexOptions#DOCS}. Do not call this before
* {@link #nextDoc} is first called, nor after {@link #nextDoc} returns * {@link #nextDoc} is first called, nor after {@link #nextDoc} returns
* {@link DocIdSetIterator#NO_MORE_DOCS}. * {@link DocIdSetIterator#NO_MORE_DOCS}.
* *

View File

@ -34,90 +34,19 @@ public final class FieldInfo {
/** Internal field number */ /** Internal field number */
public final int number; public final int number;
private DocValuesType docValueType; private DocValuesType docValueType = DocValuesType.NO;
// True if any document indexed term vectors // True if any document indexed term vectors
private boolean storeTermVector; private boolean storeTermVector;
private boolean omitNorms; // omit norms associated with indexed fields private boolean omitNorms; // omit norms associated with indexed fields
private IndexOptions indexOptions; private IndexOptions indexOptions = IndexOptions.NO;
private boolean storePayloads; // whether this field stores payloads together with term positions private boolean storePayloads; // whether this field stores payloads together with term positions
private Map<String,String> attributes; private Map<String,String> attributes;
private long dvGen; private long dvGen;
/**
* Controls how much information is stored in the postings lists.
* @lucene.experimental
*/
public static enum IndexOptions {
// NOTE: order is important here; FieldInfo uses this
// order to merge two conflicting IndexOptions (always
// "downgrades" by picking the lowest).
/**
* Only documents are indexed: term frequencies and positions are omitted.
* Phrase and other positional queries on the field will throw an exception, and scoring
* will behave as if any term in the document appears only once.
*/
// TODO: maybe rename to just DOCS?
DOCS_ONLY,
/**
* Only documents and term frequencies are indexed: positions are omitted.
* This enables normal scoring, except Phrase and other positional queries
* will throw an exception.
*/
DOCS_AND_FREQS,
/**
* Indexes documents, frequencies and positions.
* This is a typical default for full-text search: full scoring is enabled
* and positional queries are supported.
*/
DOCS_AND_FREQS_AND_POSITIONS,
/**
* Indexes documents, frequencies, positions and offsets.
* Character offsets are encoded alongside the positions.
*/
DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS,
}
/**
* DocValues types.
* Note that DocValues is strongly typed, so a field cannot have different types
* across different documents.
*/
public static enum DocValuesType {
/**
* A per-document Number
*/
NUMERIC,
/**
* A per-document byte[]. Values may be larger than
* 32766 bytes, but different codecs may enforce their own limits.
*/
BINARY,
/**
* A pre-sorted byte[]. Fields with this type only store distinct byte values
* and store an additional offset pointer per document to dereference the shared
* byte[]. The stored byte[] is presorted and allows access via document id,
* ordinal and by-value. Values must be <= 32766 bytes.
*/
SORTED,
/**
* A pre-sorted Number[]. Fields with this type store numeric values in sorted
* order according to {@link Long#compare(long, long)}.
*/
SORTED_NUMERIC,
/**
* A pre-sorted Set&lt;byte[]&gt;. Fields with this type only store distinct byte values
* and store additional offset pointers per document to dereference the shared
* byte[]s. The stored byte[] is presorted and allows access via document id,
* ordinal and by-value. Values must be <= 32766 bytes.
*/
SORTED_SET
}
/** /**
* Sole constructor. * Sole constructor.
* *
@ -126,19 +55,24 @@ public final class FieldInfo {
public FieldInfo(String name, int number, boolean storeTermVector, boolean omitNorms, public FieldInfo(String name, int number, boolean storeTermVector, boolean omitNorms,
boolean storePayloads, IndexOptions indexOptions, DocValuesType docValues, boolean storePayloads, IndexOptions indexOptions, DocValuesType docValues,
long dvGen, Map<String,String> attributes) { long dvGen, Map<String,String> attributes) {
if (docValues == null) {
throw new NullPointerException("DocValuesType cannot be null");
}
if (indexOptions == null) {
throw new NullPointerException("IndexOptions cannot be null");
}
this.name = name; this.name = name;
this.number = number; this.number = number;
this.docValueType = docValues; this.docValueType = docValues;
if (indexOptions != null) { this.indexOptions = indexOptions;
if (indexOptions != IndexOptions.NO) {
this.storeTermVector = storeTermVector; this.storeTermVector = storeTermVector;
this.storePayloads = storePayloads; this.storePayloads = storePayloads;
this.omitNorms = omitNorms; this.omitNorms = omitNorms;
this.indexOptions = indexOptions;
} else { // for non-indexed fields, leave defaults } else { // for non-indexed fields, leave defaults
this.storeTermVector = false; this.storeTermVector = false;
this.storePayloads = false; this.storePayloads = false;
this.omitNorms = false; this.omitNorms = false;
this.indexOptions = null;
} }
this.dvGen = dvGen; this.dvGen = dvGen;
this.attributes = attributes; this.attributes = attributes;
@ -150,7 +84,7 @@ public final class FieldInfo {
* Always returns true (or throws IllegalStateException) * Always returns true (or throws IllegalStateException)
*/ */
public boolean checkConsistency() { public boolean checkConsistency() {
if (indexOptions != null) { if (indexOptions != IndexOptions.NO) {
// Cannot store payloads unless positions are indexed: // Cannot store payloads unless positions are indexed:
if (indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0 && storePayloads) { if (indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0 && storePayloads) {
throw new IllegalStateException("indexed field '" + name + "' cannot have payloads without positions"); throw new IllegalStateException("indexed field '" + name + "' cannot have payloads without positions");
@ -180,26 +114,29 @@ public final class FieldInfo {
// should only be called by FieldInfos#addOrUpdate // should only be called by FieldInfos#addOrUpdate
void update(boolean storeTermVector, boolean omitNorms, boolean storePayloads, IndexOptions indexOptions) { void update(boolean storeTermVector, boolean omitNorms, boolean storePayloads, IndexOptions indexOptions) {
if (indexOptions == null) {
throw new NullPointerException("IndexOptions cannot be null");
}
//System.out.println("FI.update field=" + name + " indexed=" + indexed + " omitNorms=" + omitNorms + " this.omitNorms=" + this.omitNorms); //System.out.println("FI.update field=" + name + " indexed=" + indexed + " omitNorms=" + omitNorms + " this.omitNorms=" + this.omitNorms);
if (this.indexOptions != indexOptions) { if (this.indexOptions != indexOptions) {
if (this.indexOptions == null) { if (this.indexOptions == IndexOptions.NO) {
this.indexOptions = indexOptions; this.indexOptions = indexOptions;
} else if (indexOptions != null) { } else if (indexOptions != IndexOptions.NO) {
// downgrade // downgrade
this.indexOptions = this.indexOptions.compareTo(indexOptions) < 0 ? this.indexOptions : indexOptions; this.indexOptions = this.indexOptions.compareTo(indexOptions) < 0 ? this.indexOptions : indexOptions;
} }
} }
if (this.indexOptions != null) { // if updated field data is not for indexing, leave the updates out if (this.indexOptions != IndexOptions.NO) { // if updated field data is not for indexing, leave the updates out
this.storeTermVector |= storeTermVector; // once vector, always vector this.storeTermVector |= storeTermVector; // once vector, always vector
this.storePayloads |= storePayloads; this.storePayloads |= storePayloads;
// Awkward: only drop norms if incoming update is indexed: // Awkward: only drop norms if incoming update is indexed:
if (indexOptions != null && this.omitNorms != omitNorms) { if (indexOptions != IndexOptions.NO && this.omitNorms != omitNorms) {
this.omitNorms = true; // if one require omitNorms at least once, it remains off for life this.omitNorms = true; // if one require omitNorms at least once, it remains off for life
} }
} }
if (this.indexOptions == null || this.indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { if (this.indexOptions == IndexOptions.NO || this.indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
// cannot store payloads if we don't store positions: // cannot store payloads if we don't store positions:
this.storePayloads = false; this.storePayloads = false;
} }
@ -207,14 +144,14 @@ public final class FieldInfo {
} }
void setDocValuesType(DocValuesType type) { void setDocValuesType(DocValuesType type) {
if (docValueType != null && docValueType != type) { if (docValueType != DocValuesType.NO && docValueType != type) {
throw new IllegalArgumentException("cannot change DocValues type from " + docValueType + " to " + type + " for field \"" + name + "\""); throw new IllegalArgumentException("cannot change DocValues type from " + docValueType + " to " + type + " for field \"" + name + "\"");
} }
docValueType = type; docValueType = type;
assert checkConsistency(); assert checkConsistency();
} }
/** Returns IndexOptions for the field, or null if the field is not indexed */ /** Returns IndexOptions for the field, or IndexOptions.NO if the field is not indexed */
public IndexOptions getIndexOptions() { public IndexOptions getIndexOptions() {
return indexOptions; return indexOptions;
} }
@ -223,11 +160,12 @@ public final class FieldInfo {
* Returns true if this field has any docValues. * Returns true if this field has any docValues.
*/ */
public boolean hasDocValues() { public boolean hasDocValues() {
return docValueType != null; return docValueType != DocValuesType.NO;
} }
/** /**
* Returns {@link DocValuesType} of the docValues. this may be null if the field has no docvalues. * Returns {@link DocValuesType} of the docValues; this is
* {@code DocValuesType.NO} if the field has no docvalues.
*/ */
public DocValuesType getDocValuesType() { public DocValuesType getDocValuesType() {
return docValueType; return docValueType;
@ -253,7 +191,7 @@ public final class FieldInfo {
} }
void setStorePayloads() { void setStorePayloads() {
if (indexOptions != null && indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) { if (indexOptions != IndexOptions.NO && indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) {
storePayloads = true; storePayloads = true;
} }
assert checkConsistency(); assert checkConsistency();
@ -274,10 +212,10 @@ public final class FieldInfo {
} }
/** /**
* Returns true if this field is indexed (has non-null {@link #getIndexOptions}). * Returns true if this field is indexed ({@link #getIndexOptions} is not IndexOptions.NO).
*/ */
public boolean isIndexed() { public boolean isIndexed() {
return indexOptions != null; return indexOptions != IndexOptions.NO;
} }
/** /**

View File

@ -25,9 +25,6 @@ import java.util.Map;
import java.util.SortedMap; import java.util.SortedMap;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
/** /**
* Collection of {@link FieldInfo}s (accessible by number or by name). * Collection of {@link FieldInfo}s (accessible by number or by name).
* @lucene.experimental * @lucene.experimental
@ -72,7 +69,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
hasVectors |= info.hasVectors(); hasVectors |= info.hasVectors();
hasProx |= info.isIndexed() && info.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; hasProx |= info.isIndexed() && info.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
hasFreq |= info.isIndexed() && info.getIndexOptions() != IndexOptions.DOCS_ONLY; hasFreq |= info.isIndexed() && info.getIndexOptions() != IndexOptions.DOCS;
hasOffsets |= info.isIndexed() && info.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; hasOffsets |= info.isIndexed() && info.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
hasNorms |= info.hasNorms(); hasNorms |= info.hasNorms();
hasDocValues |= info.hasDocValues(); hasDocValues |= info.hasDocValues();
@ -190,18 +187,17 @@ public class FieldInfos implements Iterable<FieldInfo> {
* is used as the field number. * is used as the field number.
*/ */
synchronized int addOrGet(String fieldName, int preferredFieldNumber, DocValuesType dvType) { synchronized int addOrGet(String fieldName, int preferredFieldNumber, DocValuesType dvType) {
if (dvType != null) { if (dvType != DocValuesType.NO) {
DocValuesType currentDVType = docValuesType.get(fieldName); DocValuesType currentDVType = docValuesType.get(fieldName);
if (currentDVType == null) { if (currentDVType == null) {
docValuesType.put(fieldName, dvType); docValuesType.put(fieldName, dvType);
} else if (currentDVType != null && currentDVType != dvType) { } else if (currentDVType != DocValuesType.NO && currentDVType != dvType) {
throw new IllegalArgumentException("cannot change DocValues type from " + currentDVType + " to " + dvType + " for field \"" + fieldName + "\""); throw new IllegalArgumentException("cannot change DocValues type from " + currentDVType + " to " + dvType + " for field \"" + fieldName + "\"");
} }
} }
Integer fieldNumber = nameToNumber.get(fieldName); Integer fieldNumber = nameToNumber.get(fieldName);
if (fieldNumber == null) { if (fieldNumber == null) {
final Integer preferredBoxed = Integer.valueOf(preferredFieldNumber); final Integer preferredBoxed = Integer.valueOf(preferredFieldNumber);
if (preferredFieldNumber != -1 && !numberToName.containsKey(preferredBoxed)) { if (preferredFieldNumber != -1 && !numberToName.containsKey(preferredBoxed)) {
// cool - we can use this number globally // cool - we can use this number globally
fieldNumber = preferredBoxed; fieldNumber = preferredBoxed;
@ -212,7 +208,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
} }
fieldNumber = lowestUnassignedFieldNumber; fieldNumber = lowestUnassignedFieldNumber;
} }
assert fieldNumber >= 0;
numberToName.put(fieldNumber, fieldName); numberToName.put(fieldNumber, fieldName);
nameToNumber.put(fieldName, fieldNumber); nameToNumber.put(fieldName, fieldNumber);
} }
@ -228,7 +224,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
throw new IllegalArgumentException("field name \"" + name + "\" is already mapped to field number \"" + nameToNumber.get(name) + "\", not \"" + number + "\""); throw new IllegalArgumentException("field name \"" + name + "\" is already mapped to field number \"" + nameToNumber.get(name) + "\", not \"" + number + "\"");
} }
DocValuesType currentDVType = docValuesType.get(name); DocValuesType currentDVType = docValuesType.get(name);
if (dvType != null && currentDVType != null && dvType != currentDVType) { if (dvType != DocValuesType.NO && currentDVType != null && currentDVType != DocValuesType.NO && dvType != currentDVType) {
throw new IllegalArgumentException("cannot change DocValues type from " + currentDVType + " to " + dvType + " for field \"" + name + "\""); throw new IllegalArgumentException("cannot change DocValues type from " + currentDVType + " to " + dvType + " for field \"" + name + "\"");
} }
} }
@ -298,6 +294,9 @@ public class FieldInfos implements Iterable<FieldInfo> {
private FieldInfo addOrUpdateInternal(String name, int preferredFieldNumber, private FieldInfo addOrUpdateInternal(String name, int preferredFieldNumber,
boolean storeTermVector, boolean storeTermVector,
boolean omitNorms, boolean storePayloads, IndexOptions indexOptions, DocValuesType docValues) { boolean omitNorms, boolean storePayloads, IndexOptions indexOptions, DocValuesType docValues) {
if (docValues == null) {
throw new NullPointerException("DocValuesType cannot be null");
}
FieldInfo fi = fieldInfo(name); FieldInfo fi = fieldInfo(name);
if (fi == null) { if (fi == null) {
// This field wasn't yet added to this in-RAM // This field wasn't yet added to this in-RAM
@ -313,7 +312,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
} else { } else {
fi.update(storeTermVector, omitNorms, storePayloads, indexOptions); fi.update(storeTermVector, omitNorms, storePayloads, indexOptions);
if (docValues != null) { if (docValues != DocValuesType.NO) {
// Only pay the synchronization cost if fi does not already have a DVType // Only pay the synchronization cost if fi does not already have a DVType
boolean updateGlobal = !fi.hasDocValues(); boolean updateGlobal = !fi.hasDocValues();
if (updateGlobal) { if (updateGlobal) {

View File

@ -23,7 +23,6 @@ import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FreqProxTermsWriterPerField.FreqProxPostingsArray; import org.apache.lucene.index.FreqProxTermsWriterPerField.FreqProxPostingsArray;
import org.apache.lucene.util.AttributeSource; // javadocs import org.apache.lucene.util.AttributeSource; // javadocs
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;

View File

@ -21,7 +21,6 @@ import java.io.IOException;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
@ -50,7 +49,7 @@ final class FreqProxTermsWriterPerField extends TermsHashPerField {
public FreqProxTermsWriterPerField(FieldInvertState invertState, TermsHash termsHash, FieldInfo fieldInfo, TermsHashPerField nextPerField) { public FreqProxTermsWriterPerField(FieldInvertState invertState, TermsHash termsHash, FieldInfo fieldInfo, TermsHashPerField nextPerField) {
super(fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 ? 2 : 1, invertState, termsHash, nextPerField, fieldInfo); super(fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 ? 2 : 1, invertState, termsHash, nextPerField, fieldInfo);
IndexOptions indexOptions = fieldInfo.getIndexOptions(); IndexOptions indexOptions = fieldInfo.getIndexOptions();
assert indexOptions != null; assert indexOptions != IndexOptions.NO;
hasFreq = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; hasFreq = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0;
hasProx = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; hasProx = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
hasOffsets = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; hasOffsets = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
@ -196,7 +195,7 @@ final class FreqProxTermsWriterPerField extends TermsHashPerField {
@Override @Override
ParallelPostingsArray createPostingsArray(int size) { ParallelPostingsArray createPostingsArray(int size) {
IndexOptions indexOptions = fieldInfo.getIndexOptions(); IndexOptions indexOptions = fieldInfo.getIndexOptions();
assert indexOptions != null; assert indexOptions != IndexOptions.NO;
boolean hasFreq = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; boolean hasFreq = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0;
boolean hasProx = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; boolean hasProx = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
boolean hasOffsets = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; boolean hasOffsets = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;

View File

@ -0,0 +1,55 @@
package org.apache.lucene.index;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Controls how much information is stored in the postings lists.
* @lucene.experimental
*/
public enum IndexOptions {
// NOTE: order is important here; FieldInfo uses this
// order to merge two conflicting IndexOptions (always
// "downgrades" by picking the lowest).
/** Not indexed */
NO,
/**
* Only documents are indexed: term frequencies and positions are omitted.
* Phrase and other positional queries on the field will throw an exception, and scoring
* will behave as if any term in the document appears only once.
*/
DOCS,
/**
* Only documents and term frequencies are indexed: positions are omitted.
* This enables normal scoring, except Phrase and other positional queries
* will throw an exception.
*/
DOCS_AND_FREQS,
/**
* Indexes documents, frequencies and positions.
* This is a typical default for full-text search: full scoring is enabled
* and positional queries are supported.
*/
DOCS_AND_FREQS_AND_POSITIONS,
/**
* Indexes documents, frequencies, positions and offsets.
* Character offsets are encoded alongside the positions.
*/
DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS,
}

View File

@ -45,7 +45,6 @@ import org.apache.lucene.codecs.FieldInfosFormat;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.DocValuesUpdate.BinaryDocValuesUpdate; import org.apache.lucene.index.DocValuesUpdate.BinaryDocValuesUpdate;
import org.apache.lucene.index.DocValuesUpdate.NumericDocValuesUpdate; import org.apache.lucene.index.DocValuesUpdate.NumericDocValuesUpdate;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfos.FieldNumbers; import org.apache.lucene.index.FieldInfos.FieldNumbers;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.MergeState.CheckAbort; import org.apache.lucene.index.MergeState.CheckAbort;
@ -1499,6 +1498,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
final Field f = updates[i]; final Field f = updates[i];
final DocValuesType dvType = f.fieldType().docValueType(); final DocValuesType dvType = f.fieldType().docValueType();
if (dvType == null) { if (dvType == null) {
throw new NullPointerException("DocValuesType cannot be null (field: \"" + f.name() + "\")");
}
if (dvType == DocValuesType.NO) {
throw new IllegalArgumentException("can only update NUMERIC or BINARY fields! field=" + f.name()); throw new IllegalArgumentException("can only update NUMERIC or BINARY fields! field=" + f.name());
} }
if (!globalFieldNumberMap.contains(f.name(), dvType)) { if (!globalFieldNumberMap.contains(f.name(), dvType)) {

View File

@ -64,8 +64,8 @@ public interface IndexableField extends GeneralField {
* the range of that encoding. * the range of that encoding.
* <p> * <p>
* It is illegal to return a boost other than 1.0f for a field that is not * It is illegal to return a boost other than 1.0f for a field that is not
* indexed ({@link IndexableFieldType#indexOptions()} is null) or omits normalization values * indexed ({@link IndexableFieldType#indexOptions()} is IndexOptions.NO) or
* ({@link IndexableFieldType#omitNorms()} returns true). * omits normalization values ({@link IndexableFieldType#omitNorms()} returns true).
* *
* @see Similarity#computeNorm(FieldInvertState) * @see Similarity#computeNorm(FieldInvertState)
* @see DefaultSimilarity#encodeNormValue(float) * @see DefaultSimilarity#encodeNormValue(float)

View File

@ -18,8 +18,6 @@ package org.apache.lucene.index;
*/ */
import org.apache.lucene.analysis.Analyzer; // javadocs import org.apache.lucene.analysis.Analyzer; // javadocs
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
/** /**
* Describes the properties of a field. * Describes the properties of a field.
@ -34,7 +32,8 @@ public interface IndexableFieldType {
* True if this field's value should be analyzed by the * True if this field's value should be analyzed by the
* {@link Analyzer}. * {@link Analyzer}.
* <p> * <p>
* This has no effect if {@link #indexOptions()} returns null. * This has no effect if {@link #indexOptions()} returns
* IndexOptions.NO.
*/ */
// TODO: shouldn't we remove this? Whether/how a field is // TODO: shouldn't we remove this? Whether/how a field is
// tokenized is an impl detail under Field? // tokenized is an impl detail under Field?
@ -48,7 +47,8 @@ public interface IndexableFieldType {
* can be accessed in a document-oriented way from * can be accessed in a document-oriented way from
* {@link IndexReader#getTermVector(int,String)}. * {@link IndexReader#getTermVector(int,String)}.
* <p> * <p>
* This option is illegal if {@link #indexOptions()} returns null. * This option is illegal if {@link #indexOptions()} returns
* IndexOptions.NO.
*/ */
public boolean storeTermVectors(); public boolean storeTermVectors();
@ -88,13 +88,12 @@ public interface IndexableFieldType {
public boolean omitNorms(); public boolean omitNorms();
/** {@link IndexOptions}, describing what should be /** {@link IndexOptions}, describing what should be
* recorded into the inverted index, or null if this field * recorded into the inverted index */
* is not indexed */
public IndexOptions indexOptions(); public IndexOptions indexOptions();
/** /**
* DocValues {@link DocValuesType}: if non-null then the field's value * DocValues {@link DocValuesType}: how the field's value will be indexed
* will be indexed into docValues. * into docValues.
*/ */
public DocValuesType docValueType(); public DocValuesType docValueType();
} }

View File

@ -86,7 +86,7 @@ class NumericDocValuesFieldUpdates extends DocValuesFieldUpdates {
private int size; private int size;
public NumericDocValuesFieldUpdates(String field, int maxDoc) { public NumericDocValuesFieldUpdates(String field, int maxDoc) {
super(field, FieldInfo.DocValuesType.NUMERIC); super(field, DocValuesType.NUMERIC);
bitsPerValue = PackedInts.bitsRequired(maxDoc - 1); bitsPerValue = PackedInts.bitsRequired(maxDoc - 1);
docs = new PagedMutable(1, PAGE_SIZE, bitsPerValue, PackedInts.COMPACT); docs = new PagedMutable(1, PAGE_SIZE, bitsPerValue, PackedInts.COMPACT);
values = new PagedGrowableWriter(1, PAGE_SIZE, 1, PackedInts.FAST); values = new PagedGrowableWriter(1, PAGE_SIZE, 1, PackedInts.FAST);

View File

@ -32,7 +32,6 @@ import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.NormsProducer; import org.apache.lucene.codecs.NormsProducer;
import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.codecs.TermVectorsReader; import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
@ -340,7 +339,7 @@ public final class SegmentReader extends LeafReader implements Accountable {
// Field does not exist // Field does not exist
return null; return null;
} }
if (fi.getDocValuesType() == null) { if (fi.getDocValuesType() == DocValuesType.NO) {
// Field was not indexed with doc values // Field was not indexed with doc values
return null; return null;
} }
@ -385,7 +384,7 @@ public final class SegmentReader extends LeafReader implements Accountable {
// Field does not exist // Field does not exist
return null; return null;
} }
if (fi.getDocValuesType() == null) { if (fi.getDocValuesType() == DocValuesType.NO) {
// Field was not indexed with doc values // Field was not indexed with doc values
return null; return null;
} }

View File

@ -23,7 +23,6 @@ import java.util.Map;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.MultiDocValues.MultiSortedDocValues; import org.apache.lucene.index.MultiDocValues.MultiSortedDocValues;
import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues; import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues;
import org.apache.lucene.index.MultiDocValues.OrdinalMap; import org.apache.lucene.index.MultiDocValues.OrdinalMap;

View File

@ -111,7 +111,7 @@ final class TermVectorsConsumerPerField extends TermsHashPerField {
@Override @Override
boolean start(IndexableField field, boolean first) { boolean start(IndexableField field, boolean first) {
assert field.fieldType().indexOptions() != null; assert field.fieldType().indexOptions() != IndexOptions.NO;
if (first) { if (first) {

View File

@ -161,7 +161,7 @@ while ((docid = docsAndPositionsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS
of occurrences of this term across all documents. Note that this statistic of occurrences of this term across all documents. Note that this statistic
is unavailable (returns <code>-1</code>) if term frequencies were omitted is unavailable (returns <code>-1</code>) if term frequencies were omitted
from the index from the index
({@link org.apache.lucene.index.FieldInfo.IndexOptions#DOCS_ONLY DOCS_ONLY}) ({@link org.apache.lucene.index.IndexOptions#DOCS DOCS})
for the field. Like docFreq(), it will also count occurrences that appear in for the field. Like docFreq(), it will also count occurrences that appear in
deleted documents. deleted documents.
</ul> </ul>
@ -194,7 +194,7 @@ while ((docid = docsAndPositionsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS
field, and like totalTermFreq() it will also count occurrences that appear in field, and like totalTermFreq() it will also count occurrences that appear in
deleted documents, and will be unavailable (returns <code>-1</code>) if term deleted documents, and will be unavailable (returns <code>-1</code>) if term
frequencies were omitted from the index frequencies were omitted from the index
({@link org.apache.lucene.index.FieldInfo.IndexOptions#DOCS_ONLY DOCS_ONLY}) ({@link org.apache.lucene.index.IndexOptions#DOCS DOCS})
for the field. for the field.
</ul> </ul>
</p> </p>

View File

@ -25,19 +25,19 @@ import java.util.Random;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.Fields; import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.automaton.AutomatonTestUtil;
import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.AutomatonTestUtil;
import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.RegExp; import org.apache.lucene.util.automaton.RegExp;
public class TestMockAnalyzer extends BaseTokenStreamTestCase { public class TestMockAnalyzer extends BaseTokenStreamTestCase {
@ -305,7 +305,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
final RandomIndexWriter writer = new RandomIndexWriter(random(), newDirectory()); final RandomIndexWriter writer = new RandomIndexWriter(random(), newDirectory());
final Document doc = new Document(); final Document doc = new Document();
final FieldType ft = new FieldType(); final FieldType ft = new FieldType();
ft.setIndexOptions(IndexOptions.DOCS_ONLY); ft.setIndexOptions(IndexOptions.DOCS);
ft.setTokenized(true); ft.setTokenized(true);
ft.setStoreTermVectors(true); ft.setStoreTermVectors(true);
ft.setStoreTermVectorPositions(true); ft.setStoreTermVectorPositions(true);

View File

@ -22,8 +22,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
@ -66,7 +65,10 @@ public class TestBlockPostingsFormat2 extends LuceneTestCase {
private Document newDocument() { private Document newDocument() {
Document doc = new Document(); Document doc = new Document();
for (IndexOptions option : FieldInfo.IndexOptions.values()) { for (IndexOptions option : IndexOptions.values()) {
if (option == IndexOptions.NO) {
continue;
}
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
// turn on tvs for a cross-check, since we rely upon checkindex in this test (for now) // turn on tvs for a cross-check, since we rely upon checkindex in this test (for now)
ft.setStoreTermVectors(true); ft.setStoreTermVectors(true);

View File

@ -32,18 +32,18 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
@ -90,7 +90,7 @@ public class TestBlockPostingsFormat3 extends LuceneTestCase {
FieldType docsOnlyType = new FieldType(TextField.TYPE_NOT_STORED); FieldType docsOnlyType = new FieldType(TextField.TYPE_NOT_STORED);
// turn this on for a cross-check // turn this on for a cross-check
docsOnlyType.setStoreTermVectors(true); docsOnlyType.setStoreTermVectors(true);
docsOnlyType.setIndexOptions(IndexOptions.DOCS_ONLY); docsOnlyType.setIndexOptions(IndexOptions.DOCS);
FieldType docsAndFreqsType = new FieldType(TextField.TYPE_NOT_STORED); FieldType docsAndFreqsType = new FieldType(TextField.TYPE_NOT_STORED);
// turn this on for a cross-check // turn this on for a cross-check

View File

@ -25,7 +25,7 @@ import java.util.List;
import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.StorableField; import org.apache.lucene.index.StorableField;
@ -65,7 +65,7 @@ public class TestDocument extends LuceneTestCase {
assertTrue(binaryFld.binaryValue() != null); assertTrue(binaryFld.binaryValue() != null);
assertTrue(binaryFld.fieldType().stored()); assertTrue(binaryFld.fieldType().stored());
assertNull(binaryFld.fieldType().indexOptions()); assertEquals(IndexOptions.NO, binaryFld.fieldType().indexOptions());
String binaryTest = doc.getBinaryValue("binary").utf8ToString(); String binaryTest = doc.getBinaryValue("binary").utf8ToString();
assertTrue(binaryTest.equals(binaryVal)); assertTrue(binaryTest.equals(binaryVal));

View File

@ -18,8 +18,8 @@ package org.apache.lucene.document;
*/ */
import org.apache.lucene.document.FieldType.NumericType; import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.FieldInfo.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
/** simple testcases for concrete impl of IndexableFieldType */ /** simple testcases for concrete impl of IndexableFieldType */

View File

@ -24,13 +24,11 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TimeUnits; import org.apache.lucene.util.TimeUnits;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
/** /**
@ -66,7 +64,7 @@ public class Test2BPostings extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setOmitNorms(true); ft.setOmitNorms(true);
ft.setIndexOptions(IndexOptions.DOCS_ONLY); ft.setIndexOptions(IndexOptions.DOCS);
Field field = new Field("field", new MyTokenStream(), ft); Field field = new Field("field", new MyTokenStream(), ft);
doc.add(field); doc.add(field);

View File

@ -26,17 +26,13 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
import org.apache.lucene.util.LuceneTestCase.Monster; import org.apache.lucene.util.LuceneTestCase.Monster;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
/** /**
* Test indexes 2B docs with 65k freqs each, * Test indexes 2B docs with 65k freqs each,
* so you get > Integer.MAX_VALUE postings data for the term * so you get > Integer.MAX_VALUE postings data for the term

View File

@ -31,7 +31,6 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.BaseDirectoryWrapper;
@ -42,7 +41,6 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase.Monster; import org.apache.lucene.util.LuceneTestCase.Monster;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase.SuppressSysoutChecks;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits; import org.apache.lucene.util.TimeUnits;
@ -197,7 +195,7 @@ public class Test2BTerms extends LuceneTestCase {
final MyTokenStream ts = new MyTokenStream(random(), TERMS_PER_DOC); final MyTokenStream ts = new MyTokenStream(random(), TERMS_PER_DOC);
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.setIndexOptions(IndexOptions.DOCS_ONLY); customType.setIndexOptions(IndexOptions.DOCS);
customType.setOmitNorms(true); customType.setOmitNorms(true);
Field field = new Field("field", ts, customType); Field field = new Field("field", ts, customType);
doc.add(field); doc.add(field);

View File

@ -34,7 +34,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PhraseQuery;

View File

@ -28,7 +28,6 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;

View File

@ -30,8 +30,6 @@ import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -114,10 +112,10 @@ public class TestCodecs extends LuceneTestCase {
public boolean omitNorms() { return false; } public boolean omitNorms() { return false; }
@Override @Override
public IndexOptions indexOptions() { return omitTF ? IndexOptions.DOCS_ONLY : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; } public IndexOptions indexOptions() { return omitTF ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; }
@Override @Override
public DocValuesType docValueType() { return null; } public DocValuesType docValueType() { return DocValuesType.NO; }
}); });
if (storePayloads) { if (storePayloads) {
fieldInfo.setStorePayloads(); fieldInfo.setStorePayloads();

View File

@ -27,7 +27,6 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.AttributeSource;
@ -274,7 +273,7 @@ public class TestDocumentWriter extends LuceneTestCase {
doc.add(newField("f1", "v2", customType2)); doc.add(newField("f1", "v2", customType2));
// f2 has no TF // f2 has no TF
FieldType customType3 = new FieldType(TextField.TYPE_NOT_STORED); FieldType customType3 = new FieldType(TextField.TYPE_NOT_STORED);
customType3.setIndexOptions(IndexOptions.DOCS_ONLY); customType3.setIndexOptions(IndexOptions.DOCS);
Field f = newField("f2", "v1", customType3); Field f = newField("f2", "v1", customType3);
doc.add(f); doc.add(f);
doc.add(newField("f2", "v2", customType2)); doc.add(newField("f2", "v2", customType2));
@ -293,7 +292,7 @@ public class TestDocumentWriter extends LuceneTestCase {
assertEquals("omitTermFreqAndPositions field bit should not be set for f1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.fieldInfo("f1").getIndexOptions()); assertEquals("omitTermFreqAndPositions field bit should not be set for f1", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.fieldInfo("f1").getIndexOptions());
// f2 // f2
assertTrue("f2 should have norms", fi.fieldInfo("f2").hasNorms()); assertTrue("f2 should have norms", fi.fieldInfo("f2").hasNorms());
assertEquals("omitTermFreqAndPositions field bit should be set for f2", IndexOptions.DOCS_ONLY, fi.fieldInfo("f2").getIndexOptions()); assertEquals("omitTermFreqAndPositions field bit should be set for f2", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions());
reader.close(); reader.close();
} }
} }

View File

@ -23,7 +23,6 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;

View File

@ -17,7 +17,6 @@ package org.apache.lucene.index;
* limitations under the License. * limitations under the License.
*/ */
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.*; import java.util.*;
@ -27,7 +26,6 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DocumentStoredFieldVisitor; import org.apache.lucene.document.DocumentStoredFieldVisitor;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.BaseDirectory; import org.apache.lucene.store.BaseDirectory;
import org.apache.lucene.store.BufferedIndexInput; import org.apache.lucene.store.BufferedIndexInput;
@ -96,7 +94,7 @@ public class TestFieldsReader extends LuceneTestCase {
assertTrue(field != null); assertTrue(field != null);
assertFalse(field.fieldType().storeTermVectors()); assertFalse(field.fieldType().storeTermVectors());
assertFalse(field.fieldType().omitNorms()); assertFalse(field.fieldType().omitNorms());
assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS_ONLY); assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS);
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(DocHelper.TEXT_FIELD_3_KEY); DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(DocHelper.TEXT_FIELD_3_KEY);
reader.document(0, visitor); reader.document(0, visitor);

View File

@ -54,7 +54,6 @@ import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
@ -1247,7 +1246,7 @@ public class TestIndexWriter extends LuceneTestCase {
Field f = new Field("binary", b, 10, 17, customType); Field f = new Field("binary", b, 10, 17, customType);
// TODO: this is evil, changing the type after creating the field: // TODO: this is evil, changing the type after creating the field:
customType.setIndexOptions(IndexOptions.DOCS_ONLY); customType.setIndexOptions(IndexOptions.DOCS);
final MockTokenizer doc1field1 = new MockTokenizer(MockTokenizer.WHITESPACE, false); final MockTokenizer doc1field1 = new MockTokenizer(MockTokenizer.WHITESPACE, false);
doc1field1.setReader(new StringReader("doc1field1")); doc1field1.setReader(new StringReader("doc1field1"));
f.setTokenStream(doc1field1); f.setTokenStream(doc1field1);
@ -1807,7 +1806,7 @@ public class TestIndexWriter extends LuceneTestCase {
docsAndFreqs.setIndexOptions(IndexOptions.DOCS_AND_FREQS); docsAndFreqs.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
FieldType docsOnly = new FieldType(TextField.TYPE_NOT_STORED); FieldType docsOnly = new FieldType(TextField.TYPE_NOT_STORED);
docsOnly.setIndexOptions(IndexOptions.DOCS_ONLY); docsOnly.setIndexOptions(IndexOptions.DOCS);
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("field", "a b c", docsAndFreqs)); doc.add(new Field("field", "a b c", docsAndFreqs));

View File

@ -48,7 +48,6 @@ import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PhraseQuery;
@ -58,8 +57,8 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException; import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;

View File

@ -190,7 +190,6 @@ public class TestIndexWriterNRTIsCurrent extends LuceneTestCase {
if (failed == null) { if (failed == null) {
failed = e; failed = e;
} }
return;
} }
} }
} }

View File

@ -28,7 +28,6 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
@ -58,7 +57,7 @@ public class TestIndexableField extends LuceneTestCase {
@Override @Override
public boolean storeTermVectors() { public boolean storeTermVectors() {
return indexOptions() != null && counter % 2 == 1 && counter % 10 != 9; return indexOptions() != IndexOptions.NO && counter % 2 == 1 && counter % 10 != 9;
} }
@Override @Override
@ -82,13 +81,13 @@ public class TestIndexableField extends LuceneTestCase {
} }
@Override @Override
public FieldInfo.IndexOptions indexOptions() { public IndexOptions indexOptions() {
return counter%10 == 3 ? null : FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; return counter%10 == 3 ? IndexOptions.NO : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
} }
@Override @Override
public DocValuesType docValueType() { public DocValuesType docValueType() {
return null; return DocValuesType.NO;
} }
}; };
@ -203,7 +202,7 @@ public class TestIndexableField extends LuceneTestCase {
next = new MyField(finalBaseCount + (fieldUpto++-1)); next = new MyField(finalBaseCount + (fieldUpto++-1));
} }
if (next != null && next.fieldType().indexOptions() != null) return true; if (next != null && next.fieldType().indexOptions() != IndexOptions.NO) return true;
else return this.hasNext(); else return this.hasNext();
} }

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
*/ */
import java.io.IOException; import java.io.IOException;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
@ -25,14 +26,13 @@ import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
@SuppressCodecs({ "SimpleText", "Memory", "Direct" }) @SuppressCodecs({ "SimpleText", "Memory", "Direct" })
@ -267,7 +267,7 @@ public class TestLongPostings extends LuceneTestCase {
// a weaker form of testLongPostings, that doesnt check positions // a weaker form of testLongPostings, that doesnt check positions
public void testLongPostingsNoPositions() throws Exception { public void testLongPostingsNoPositions() throws Exception {
doTestLongPostingsNoPositions(IndexOptions.DOCS_ONLY); doTestLongPostingsNoPositions(IndexOptions.DOCS);
doTestLongPostingsNoPositions(IndexOptions.DOCS_AND_FREQS); doTestLongPostingsNoPositions(IndexOptions.DOCS_AND_FREQS);
} }
@ -373,7 +373,7 @@ public class TestLongPostings extends LuceneTestCase {
final DocsEnum docs; final DocsEnum docs;
final DocsEnum postings; final DocsEnum postings;
if (options == IndexOptions.DOCS_ONLY) { if (options == IndexOptions.DOCS) {
docs = TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, DocsEnum.FLAG_NONE); docs = TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, DocsEnum.FLAG_NONE);
postings = null; postings = null;
} else { } else {

View File

@ -23,7 +23,6 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
@ -73,7 +72,7 @@ public class TestOmitPositions extends LuceneTestCase {
// f1,f2,f3: docs only // f1,f2,f3: docs only
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setIndexOptions(IndexOptions.DOCS_ONLY); ft.setIndexOptions(IndexOptions.DOCS);
Field f1 = newField("f1", "This field has docs only", ft); Field f1 = newField("f1", "This field has docs only", ft);
d.add(f1); d.add(f1);
@ -157,19 +156,19 @@ public class TestOmitPositions extends LuceneTestCase {
SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
FieldInfos fi = reader.getFieldInfos(); FieldInfos fi = reader.getFieldInfos();
// docs + docs = docs // docs + docs = docs
assertEquals(IndexOptions.DOCS_ONLY, fi.fieldInfo("f1").getIndexOptions()); assertEquals(IndexOptions.DOCS, fi.fieldInfo("f1").getIndexOptions());
// docs + docs/freqs = docs // docs + docs/freqs = docs
assertEquals(IndexOptions.DOCS_ONLY, fi.fieldInfo("f2").getIndexOptions()); assertEquals(IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions());
// docs + docs/freqs/pos = docs // docs + docs/freqs/pos = docs
assertEquals(IndexOptions.DOCS_ONLY, fi.fieldInfo("f3").getIndexOptions()); assertEquals(IndexOptions.DOCS, fi.fieldInfo("f3").getIndexOptions());
// docs/freqs + docs = docs // docs/freqs + docs = docs
assertEquals(IndexOptions.DOCS_ONLY, fi.fieldInfo("f4").getIndexOptions()); assertEquals(IndexOptions.DOCS, fi.fieldInfo("f4").getIndexOptions());
// docs/freqs + docs/freqs = docs/freqs // docs/freqs + docs/freqs = docs/freqs
assertEquals(IndexOptions.DOCS_AND_FREQS, fi.fieldInfo("f5").getIndexOptions()); assertEquals(IndexOptions.DOCS_AND_FREQS, fi.fieldInfo("f5").getIndexOptions());
// docs/freqs + docs/freqs/pos = docs/freqs // docs/freqs + docs/freqs/pos = docs/freqs
assertEquals(IndexOptions.DOCS_AND_FREQS, fi.fieldInfo("f6").getIndexOptions()); assertEquals(IndexOptions.DOCS_AND_FREQS, fi.fieldInfo("f6").getIndexOptions());
// docs/freqs/pos + docs = docs // docs/freqs/pos + docs = docs
assertEquals(IndexOptions.DOCS_ONLY, fi.fieldInfo("f7").getIndexOptions()); assertEquals(IndexOptions.DOCS, fi.fieldInfo("f7").getIndexOptions());
// docs/freqs/pos + docs/freqs = docs/freqs // docs/freqs/pos + docs/freqs = docs/freqs
assertEquals(IndexOptions.DOCS_AND_FREQS, fi.fieldInfo("f8").getIndexOptions()); assertEquals(IndexOptions.DOCS_AND_FREQS, fi.fieldInfo("f8").getIndexOptions());
// docs/freqs/pos + docs/freqs/pos = docs/freqs/pos // docs/freqs/pos + docs/freqs/pos = docs/freqs/pos

View File

@ -25,7 +25,6 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.CollectionStatistics;
@ -66,7 +65,7 @@ public class TestOmitTf extends LuceneTestCase {
private static final FieldType normalType = new FieldType(TextField.TYPE_NOT_STORED); private static final FieldType normalType = new FieldType(TextField.TYPE_NOT_STORED);
static { static {
omitType.setIndexOptions(IndexOptions.DOCS_ONLY); omitType.setIndexOptions(IndexOptions.DOCS);
} }
// Tests whether the DocumentWriter correctly enable the // Tests whether the DocumentWriter correctly enable the
@ -107,8 +106,8 @@ public class TestOmitTf extends LuceneTestCase {
SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
FieldInfos fi = reader.getFieldInfos(); FieldInfos fi = reader.getFieldInfos();
assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS_ONLY, fi.fieldInfo("f1").getIndexOptions()); assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f1").getIndexOptions());
assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS_ONLY, fi.fieldInfo("f2").getIndexOptions()); assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions());
reader.close(); reader.close();
ram.close(); ram.close();
@ -159,8 +158,8 @@ public class TestOmitTf extends LuceneTestCase {
SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
FieldInfos fi = reader.getFieldInfos(); FieldInfos fi = reader.getFieldInfos();
assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS_ONLY, fi.fieldInfo("f1").getIndexOptions()); assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f1").getIndexOptions());
assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS_ONLY, fi.fieldInfo("f2").getIndexOptions()); assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions());
reader.close(); reader.close();
ram.close(); ram.close();
@ -203,7 +202,7 @@ public class TestOmitTf extends LuceneTestCase {
SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram)); SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(ram));
FieldInfos fi = reader.getFieldInfos(); FieldInfos fi = reader.getFieldInfos();
assertEquals("OmitTermFreqAndPositions field bit should not be set.", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.fieldInfo("f1").getIndexOptions()); assertEquals("OmitTermFreqAndPositions field bit should not be set.", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.fieldInfo("f1").getIndexOptions());
assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS_ONLY, fi.fieldInfo("f2").getIndexOptions()); assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS, fi.fieldInfo("f2").getIndexOptions());
reader.close(); reader.close();
ram.close(); ram.close();
@ -451,7 +450,7 @@ public class TestOmitTf extends LuceneTestCase {
newIndexWriterConfig(new MockAnalyzer(random()))); newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document(); Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setIndexOptions(IndexOptions.DOCS_ONLY); ft.setIndexOptions(IndexOptions.DOCS);
ft.freeze(); ft.freeze();
Field f = newField("foo", "bar", ft); Field f = newField("foo", "bar", ft);
doc.add(f); doc.add(f);

View File

@ -37,7 +37,6 @@ import org.apache.lucene.document.IntField;
import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -65,7 +64,7 @@ public class TestPostingsOffsets extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
if (random().nextBoolean()) { if (random().nextBoolean()) {
ft.setStoreTermVectors(true); ft.setStoreTermVectors(true);
ft.setStoreTermVectorPositions(random().nextBoolean()); ft.setStoreTermVectorPositions(random().nextBoolean());
@ -133,7 +132,7 @@ public class TestPostingsOffsets extends LuceneTestCase {
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
FieldType ft = new FieldType(TextField.TYPE_STORED); FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
if (random().nextBoolean()) { if (random().nextBoolean()) {
ft.setStoreTermVectors(true); ft.setStoreTermVectors(true);
ft.setStoreTermVectorOffsets(random().nextBoolean()); ft.setStoreTermVectorOffsets(random().nextBoolean());
@ -231,7 +230,7 @@ public class TestPostingsOffsets extends LuceneTestCase {
// TODO: randomize what IndexOptions we use; also test // TODO: randomize what IndexOptions we use; also test
// changing this up in one IW buffered segment...: // changing this up in one IW buffered segment...:
ft.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
if (random().nextBoolean()) { if (random().nextBoolean()) {
ft.setStoreTermVectors(true); ft.setStoreTermVectors(true);
ft.setStoreTermVectorOffsets(random().nextBoolean()); ft.setStoreTermVectorOffsets(random().nextBoolean());

View File

@ -176,7 +176,7 @@ public class TestSegmentReader extends LuceneTestCase {
// test omit norms // test omit norms
for (int i=0; i<DocHelper.fields.length; i++) { for (int i=0; i<DocHelper.fields.length; i++) {
IndexableField f = DocHelper.fields[i]; IndexableField f = DocHelper.fields[i];
if (f.fieldType().indexOptions() != null) { if (f.fieldType().indexOptions() != IndexOptions.NO) {
assertEquals(reader.getNormValues(f.name()) != null, !f.fieldType().omitNorms()); assertEquals(reader.getNormValues(f.name()) != null, !f.fieldType().omitNorms());
assertEquals(reader.getNormValues(f.name()) != null, !DocHelper.noNorms.containsKey(f.name())); assertEquals(reader.getNormValues(f.name()) != null, !DocHelper.noNorms.containsKey(f.name()));
if (reader.getNormValues(f.name()) == null) { if (reader.getNormValues(f.name()) == null) {

View File

@ -31,7 +31,6 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;

View File

@ -24,7 +24,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
@ -163,7 +163,7 @@ public class TestSimilarity2 extends LuceneTestCase {
RandomIndexWriter iw = new RandomIndexWriter(random(), dir); RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setIndexOptions(IndexOptions.DOCS_ONLY); ft.setIndexOptions(IndexOptions.DOCS);
ft.freeze(); ft.freeze();
Field f = newField("foo", "bar", ft); Field f = newField("foo", "bar", ft);
doc.add(f); doc.add(f);
@ -188,7 +188,7 @@ public class TestSimilarity2 extends LuceneTestCase {
RandomIndexWriter iw = new RandomIndexWriter(random(), dir); RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setIndexOptions(IndexOptions.DOCS_ONLY); ft.setIndexOptions(IndexOptions.DOCS);
ft.setOmitNorms(true); ft.setOmitNorms(true);
ft.freeze(); ft.freeze();
Field f = newField("foo", "bar", ft); Field f = newField("foo", "bar", ft);

View File

@ -26,6 +26,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
@ -111,7 +112,7 @@ public class TestSimilarityBase extends LuceneTestCase {
for (int i = 0; i < docs.length; i++) { for (int i = 0; i < docs.length; i++) {
Document d = new Document(); Document d = new Document();
FieldType ft = new FieldType(TextField.TYPE_STORED); FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.setIndexOptions(null); ft.setIndexOptions(IndexOptions.NO);
d.add(newField(FIELD_ID, Integer.toString(i), ft)); d.add(newField(FIELD_ID, Integer.toString(i), ft));
d.add(newTextField(FIELD_BODY, docs[i], Field.Store.YES)); d.add(newTextField(FIELD_BODY, docs[i], Field.Store.YES));
writer.addDocument(d); writer.addDocument(d);

View File

@ -22,7 +22,7 @@ import java.util.Arrays;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
/** /**
* Add an instance of this to your {@link Document} for every facet label. * Add an instance of this to your {@link Document} for every facet label.

View File

@ -20,7 +20,7 @@ package org.apache.lucene.facet.sortedset;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.facet.FacetField; import org.apache.lucene.facet.FacetField;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
/** Add an instance of this to your Document for every facet /** Add an instance of this to your Document for every facet
* label to be indexed via SortedSetDocValues. */ * label to be indexed via SortedSetDocValues. */

View File

@ -24,7 +24,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.facet.FacetField; import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.Facets; import org.apache.lucene.facet.Facets;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
/** Add an instance of this to your {@link Document} to add /** Add an instance of this to your {@link Document} to add

View File

@ -35,7 +35,7 @@ import org.apache.lucene.document.IntField;
import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FieldInfo.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;

View File

@ -17,16 +17,19 @@
package org.apache.lucene.search.grouping; package org.apache.lucene.search.grouping;
import java.io.IOException;
import java.util.*;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.*; import org.apache.lucene.document.*;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.ValueSource;
@ -45,9 +48,6 @@ import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.mutable.MutableValue; import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueStr; import org.apache.lucene.util.mutable.MutableValueStr;
import java.io.IOException;
import java.util.*;
// TODO // TODO
// - should test relevance sort too // - should test relevance sort too
// - test null // - test null
@ -551,7 +551,7 @@ public class TestGrouping extends LuceneTestCase {
final List<List<Document>> updateDocs = new ArrayList<>(); final List<List<Document>> updateDocs = new ArrayList<>();
FieldType groupEndType = new FieldType(StringField.TYPE_NOT_STORED); FieldType groupEndType = new FieldType(StringField.TYPE_NOT_STORED);
groupEndType.setIndexOptions(IndexOptions.DOCS_ONLY); groupEndType.setIndexOptions(IndexOptions.DOCS);
groupEndType.setOmitNorms(true); groupEndType.setOmitNorms(true);
//System.out.println("TEST: index groups"); //System.out.println("TEST: index groups");

Some files were not shown because too many files have changed in this diff Show More