LUCENE-1561: rename omitTf -> omitTermFreqAndPositions in Field*

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@756663 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2009-03-20 17:26:01 +00:00
parent 39c8421992
commit c443dcc662
23 changed files with 115 additions and 97 deletions

View File

@ -63,6 +63,9 @@ API Changes
enable compressing & decompressing binary content, external to
Lucene's indexing. Deprecated Field.Store.COMPRESS.
11. LUCENE-1561: Renamed Field.omitTf to Field.omitTermFreqAndPositions
(Otis Gospodnetic via Mike McCandless)
Bug fixes
1. LUCENE-1415: MultiPhraseQuery has incorrect hashCode() and equals()

View File

@ -41,7 +41,7 @@
<property name="name" value="${ant.project.name}"/>
<property name="Name" value="Lucene"/>
<property name="version" value="2.9-dev"/>
<property name="compatibility.tag" value="lucene_2_4_back_compat_tests_20090301a"/>
<property name="compatibility.tag" value="lucene_2_4_back_compat_tests_20090320"/>
<property name="spec.version" value="${version}"/>
<property name="year" value="2000-${current.year}"/>
<property name="final.name" value="lucene-${name}-${version}"/>

View File

@ -377,7 +377,7 @@ public final class TrieUtils {
for (int i=0; i<trieCoded.length; i++) {
final int fnum = Math.min(fields.length-1, i);
final Field f = new Field(fields[fnum], trieCoded[i], Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS);
f.setOmitTf(true);
f.setOmitTermFreqAndPositions(true);
doc.add(f);
}
}

View File

@ -33,7 +33,7 @@ public abstract class AbstractField implements Fieldable {
protected boolean isBinary = false;
protected boolean isCompressed = false;
protected boolean lazy = false;
protected boolean omitTf = false;
protected boolean omitTermFreqAndPositions = false;
protected float boost = 1.0f;
// the one and only data object for all different kind of field values
protected Object fieldsData = null;
@ -263,8 +263,9 @@ public abstract class AbstractField implements Fieldable {
/** True if norms are omitted for this indexed field */
public boolean getOmitNorms() { return omitNorms; }
/** True if tf is omitted for this indexed field */
public boolean getOmitTf() { return omitTf; }
public boolean getOmitTf() { return omitTermFreqAndPositions; }
public boolean getOmitTermFreqAndPositions() { return omitTermFreqAndPositions; }
/** Expert:
*
@ -273,11 +274,9 @@ public abstract class AbstractField implements Fieldable {
*/
public void setOmitNorms(boolean omitNorms) { this.omitNorms=omitNorms; }
/** Expert:
*
* If set, omit tf from postings of this indexed field.
*/
public void setOmitTf(boolean omitTf) { this.omitTf=omitTf; }
public void setOmitTf(boolean omitTermFreqAndPositions) { this.omitTermFreqAndPositions=omitTermFreqAndPositions; }
public void setOmitTermFreqAndPositions(boolean omitTermFreqAndPositions) { this.omitTermFreqAndPositions=omitTermFreqAndPositions; }
public boolean isLazy() {
return lazy;
@ -326,8 +325,8 @@ public abstract class AbstractField implements Fieldable {
if (omitNorms) {
result.append(",omitNorms");
}
if (omitTf) {
result.append(",omitTf");
if (omitTermFreqAndPositions) {
result.append(",omitTermFreqAndPositions");
}
if (lazy){
result.append(",lazy");

View File

@ -18,6 +18,8 @@ package org.apache.lucene.document;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.search.PhraseQuery; // for javadocs
import org.apache.lucene.search.spans.SpanQuery; // for javadocs
import java.io.Reader;
import java.io.Serializable;
@ -145,14 +147,26 @@ public interface Fieldable extends Serializable {
*/
void setOmitNorms(boolean omitNorms);
/** @deprecated Renamed to {@link #setOmitTermFreqAndPositions} */
void setOmitTf(boolean omitTf);
/** @deprecated Renamed to {@link #getOmitTermFreqAndPositions} */
boolean getOmitTf();
/** Expert:
*
* If set, omit term freq, positions and payloads from postings for this field.
* <p><b>NOTE</b>: this is a dangerous option to enable.
* While it reduces storage space required in the index,
* it also means any query requiring positional
* infromation, such as {@link PhraseQuery} or {@link
* SpanQuery} subclasses will silently fail to find
* results.
*/
void setOmitTf(boolean omitTf);
void setOmitTermFreqAndPositions(boolean omitTermFreqAndPositions);
/** True if tf is omitted for this indexed field */
boolean getOmitTf();
boolean getOmitTermFreqAndPositions();
/**
* Indicates whether a Field is Lazy or not. The semantics of Lazy loading are such that if a Field is lazily loaded, retrieving

View File

@ -166,8 +166,8 @@ public class CheckIndex {
int numFields;
/** True if at least one of the fields in this segment
* does not omitTf.
* @see Fieldable#setOmitTf */
* does not omitTermFreqAndPositions.
* @see Fieldable#setOmitTermFreqAndPositions */
public boolean hasProx;
}
}

View File

@ -183,7 +183,7 @@ final class DocFieldProcessorPerThread extends DocConsumerPerThread {
// easily add it
FieldInfo fi = fieldInfos.add(fieldName, field.isIndexed(), field.isTermVectorStored(),
field.isStorePositionWithTermVector(), field.isStoreOffsetWithTermVector(),
field.getOmitNorms(), false, field.getOmitTf());
field.getOmitNorms(), false, field.getOmitTermFreqAndPositions());
fp = new DocFieldProcessorPerField(this, fi);
fp.next = fieldHash[hashPos];
@ -195,7 +195,7 @@ final class DocFieldProcessorPerThread extends DocConsumerPerThread {
} else
fp.fieldInfo.update(field.isIndexed(), field.isTermVectorStored(),
field.isStorePositionWithTermVector(), field.isStoreOffsetWithTermVector(),
field.getOmitNorms(), false, field.getOmitTf());
field.getOmitNorms(), false, field.getOmitTermFreqAndPositions());
if (thisFieldGen != fp.lastGen) {

View File

@ -272,7 +272,7 @@ final class DocumentsWriter {
}
/** Returns true if any of the fields in the current
* buffered docs have omitTf==false */
* buffered docs have omitTermFreqAndPositions==false */
boolean hasProx() {
return (docFieldProcessor != null) ? docFieldProcessor.fieldInfos.hasProx()
: true;

View File

@ -28,13 +28,13 @@ final class FieldInfo {
boolean storePositionWithTermVector;
boolean omitNorms; // omit norms associated with indexed fields
boolean omitTf; // omit tf
boolean omitTermFreqAndPositions;
boolean storePayloads; // whether this field stores payloads together with term positions
FieldInfo(String na, boolean tk, int nu, boolean storeTermVector,
boolean storePositionWithTermVector, boolean storeOffsetWithTermVector,
boolean omitNorms, boolean storePayloads, boolean omitTf) {
boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions) {
name = na;
isIndexed = tk;
number = nu;
@ -43,16 +43,16 @@ final class FieldInfo {
this.storePositionWithTermVector = storePositionWithTermVector;
this.omitNorms = omitNorms;
this.storePayloads = storePayloads;
this.omitTf = omitTf;
this.omitTermFreqAndPositions = omitTermFreqAndPositions;
}
public Object clone() {
return new FieldInfo(name, isIndexed, number, storeTermVector, storePositionWithTermVector,
storeOffsetWithTermVector, omitNorms, storePayloads, omitTf);
storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions);
}
void update(boolean isIndexed, boolean storeTermVector, boolean storePositionWithTermVector,
boolean storeOffsetWithTermVector, boolean omitNorms, boolean storePayloads, boolean omitTf) {
boolean storeOffsetWithTermVector, boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions) {
if (this.isIndexed != isIndexed) {
this.isIndexed = true; // once indexed, always index
}
@ -68,8 +68,8 @@ final class FieldInfo {
if (this.omitNorms != omitNorms) {
this.omitNorms = false; // once norms are stored, always store
}
if (this.omitTf != omitTf) {
this.omitTf = true; // if one require omitTf at least once, it remains off for life
if (this.omitTermFreqAndPositions != omitTermFreqAndPositions) {
this.omitTermFreqAndPositions = true; // if one require omitTermFreqAndPositions at least once, it remains off for life
}
if (this.storePayloads != storePayloads) {
this.storePayloads = true;

View File

@ -40,7 +40,7 @@ final class FieldInfos {
static final byte STORE_OFFSET_WITH_TERMVECTOR = 0x8;
static final byte OMIT_NORMS = 0x10;
static final byte STORE_PAYLOADS = 0x20;
static final byte OMIT_TF = 0x40;
static final byte OMIT_TERM_FREQ_AND_POSITIONS = 0x40;
private ArrayList byNumber = new ArrayList();
private HashMap byName = new HashMap();
@ -88,11 +88,11 @@ final class FieldInfos {
}
}
/** Returns true if any fields do not omitTf */
/** Returns true if any fields do not omitTermFreqAndPositions */
boolean hasProx() {
final int numFields = byNumber.size();
for(int i=0;i<numFields;i++)
if (!fieldInfo(i).omitTf)
if (!fieldInfo(i).omitTermFreqAndPositions)
return true;
return false;
}
@ -197,26 +197,26 @@ final class FieldInfos {
* @param storeOffsetWithTermVector true if the term vector with offsets should be stored
* @param omitNorms true if the norms for the indexed field should be omitted
* @param storePayloads true if payloads should be stored for this field
* @param omitTf true if term freqs should be omitted for this field
* @param omitTermFreqAndPositions true if term freqs should be omitted for this field
*/
synchronized public FieldInfo add(String name, boolean isIndexed, boolean storeTermVector,
boolean storePositionWithTermVector, boolean storeOffsetWithTermVector,
boolean omitNorms, boolean storePayloads, boolean omitTf) {
boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions) {
FieldInfo fi = fieldInfo(name);
if (fi == null) {
return addInternal(name, isIndexed, storeTermVector, storePositionWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTf);
return addInternal(name, isIndexed, storeTermVector, storePositionWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions);
} else {
fi.update(isIndexed, storeTermVector, storePositionWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTf);
fi.update(isIndexed, storeTermVector, storePositionWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions);
}
return fi;
}
private FieldInfo addInternal(String name, boolean isIndexed,
boolean storeTermVector, boolean storePositionWithTermVector,
boolean storeOffsetWithTermVector, boolean omitNorms, boolean storePayloads, boolean omitTf) {
boolean storeOffsetWithTermVector, boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions) {
FieldInfo fi =
new FieldInfo(name, isIndexed, byNumber.size(), storeTermVector, storePositionWithTermVector,
storeOffsetWithTermVector, omitNorms, storePayloads, omitTf);
storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions);
byNumber.add(fi);
byName.put(name, fi);
return fi;
@ -288,7 +288,7 @@ final class FieldInfos {
if (fi.storeOffsetWithTermVector) bits |= STORE_OFFSET_WITH_TERMVECTOR;
if (fi.omitNorms) bits |= OMIT_NORMS;
if (fi.storePayloads) bits |= STORE_PAYLOADS;
if (fi.omitTf) bits |= OMIT_TF;
if (fi.omitTermFreqAndPositions) bits |= OMIT_TERM_FREQ_AND_POSITIONS;
output.writeString(fi.name);
output.writeByte(bits);
@ -306,9 +306,9 @@ final class FieldInfos {
boolean storeOffsetWithTermVector = (bits & STORE_OFFSET_WITH_TERMVECTOR) != 0;
boolean omitNorms = (bits & OMIT_NORMS) != 0;
boolean storePayloads = (bits & STORE_PAYLOADS) != 0;
boolean omitTf = (bits & OMIT_TF) != 0;
boolean omitTermFreqAndPositions = (bits & OMIT_TERM_FREQ_AND_POSITIONS) != 0;
addInternal(name, isIndexed, storeTermVector, storePositionsWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTf);
addInternal(name, isIndexed, storeTermVector, storePositionsWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions);
}
}

View File

@ -34,7 +34,7 @@ final class FormatPostingsDocsWriter extends FormatPostingsDocsConsumer {
final int skipInterval;
final int totalNumDocs;
boolean omitTF;
boolean omitTermFreqAndPositions;
boolean storePayloads;
long freqStart;
FieldInfo fieldInfo;
@ -57,7 +57,7 @@ final class FormatPostingsDocsWriter extends FormatPostingsDocsConsumer {
void setField(FieldInfo fieldInfo) {
this.fieldInfo = fieldInfo;
omitTF = fieldInfo.omitTf;
omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
storePayloads = fieldInfo.storePayloads;
posWriter.setField(fieldInfo);
}
@ -83,7 +83,7 @@ final class FormatPostingsDocsWriter extends FormatPostingsDocsConsumer {
assert docID < totalNumDocs: "docID=" + docID + " totalNumDocs=" + totalNumDocs;
lastDocID = docID;
if (omitTF)
if (omitTermFreqAndPositions)
out.writeVInt(delta);
else if (1 == termDocFreq)
out.writeVInt((delta<<1) | 1);

View File

@ -27,13 +27,13 @@ final class FormatPostingsPositionsWriter extends FormatPostingsPositionsConsume
final FormatPostingsDocsWriter parent;
final IndexOutput out;
boolean omitTF;
boolean omitTermFreqAndPositions;
boolean storePayloads;
int lastPayloadLength = -1;
FormatPostingsPositionsWriter(SegmentWriteState state, FormatPostingsDocsWriter parent) throws IOException {
this.parent = parent;
omitTF = parent.omitTF;
omitTermFreqAndPositions = parent.omitTermFreqAndPositions;
if (parent.parent.parent.fieldInfos.hasProx()) {
// At least one field does not omit TF, so create the
// prox file
@ -50,7 +50,7 @@ final class FormatPostingsPositionsWriter extends FormatPostingsPositionsConsume
/** Add a new position & payload */
void addPosition(int position, byte[] payload, int payloadOffset, int payloadLength) throws IOException {
assert !omitTF: "omitTF is true";
assert !omitTermFreqAndPositions: "omitTermFreqAndPositions is true";
assert out != null;
final int delta = position - lastPosition;
@ -70,8 +70,8 @@ final class FormatPostingsPositionsWriter extends FormatPostingsPositionsConsume
}
void setField(FieldInfo fieldInfo) {
omitTF = fieldInfo.omitTf;
storePayloads = omitTF ? false : fieldInfo.storePayloads;
omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
storePayloads = omitTermFreqAndPositions ? false : fieldInfo.storePayloads;
}
/** Called when we are done adding positions & payloads */

View File

@ -63,7 +63,7 @@ final class FreqProxFieldMergeState {
textOffset = p.textStart & DocumentsWriter.CHAR_BLOCK_MASK;
field.termsHashPerField.initReader(freq, p, 0);
if (!field.fieldInfo.omitTf)
if (!field.fieldInfo.omitTermFreqAndPositions)
field.termsHashPerField.initReader(prox, p, 1);
// Should always be true
@ -78,7 +78,7 @@ final class FreqProxFieldMergeState {
if (p.lastDocCode != -1) {
// Return last doc
docID = p.lastDocID;
if (!field.omitTf)
if (!field.omitTermFreqAndPositions)
termFreq = p.docFreq;
p.lastDocCode = -1;
return true;
@ -88,7 +88,7 @@ final class FreqProxFieldMergeState {
}
final int code = freq.readVInt();
if (field.omitTf)
if (field.omitTermFreqAndPositions)
docID += code;
else {
docID += code >>> 1;

View File

@ -176,7 +176,7 @@ final class FreqProxTermsWriter extends TermsHashConsumer {
FreqProxFieldMergeState[] termStates = new FreqProxFieldMergeState[numFields];
final boolean currentFieldOmitTf = fields[0].fieldInfo.omitTf;
final boolean currentFieldOmitTermFreqAndPositions = fields[0].fieldInfo.omitTermFreqAndPositions;
while(numFields > 0) {
@ -217,8 +217,8 @@ final class FreqProxTermsWriter extends TermsHashConsumer {
// Carefully copy over the prox + payload info,
// changing the format to match Lucene's segment
// format.
if (!currentFieldOmitTf) {
// omitTf == false so we do write positions &
if (!currentFieldOmitTermFreqAndPositions) {
// omitTermFreqAndPositions == false so we do write positions &
// payload
int position = 0;
for(int j=0;j<termDocFreq;j++) {

View File

@ -31,7 +31,7 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
final FieldInfo fieldInfo;
final DocumentsWriter.DocState docState;
final FieldInvertState fieldState;
boolean omitTf;
boolean omitTermFreqAndPositions;
PayloadAttribute payloadAttribute;
public FreqProxTermsWriterPerField(TermsHashPerField termsHashPerField, FreqProxTermsWriterPerThread perThread, FieldInfo fieldInfo) {
@ -40,11 +40,11 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
this.fieldInfo = fieldInfo;
docState = termsHashPerField.docState;
fieldState = termsHashPerField.fieldState;
omitTf = fieldInfo.omitTf;
omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
}
int getStreamCount() {
if (fieldInfo.omitTf)
if (fieldInfo.omitTermFreqAndPositions)
return 1;
else
return 2;
@ -64,7 +64,7 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
void reset() {
// Record, up front, whether our in-RAM format will be
// with or without term freqs:
omitTf = fieldInfo.omitTf;
omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
payloadAttribute = null;
}
@ -107,7 +107,7 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
assert docState.testPoint("FreqProxTermsWriterPerField.newTerm start");
FreqProxTermsWriter.PostingList p = (FreqProxTermsWriter.PostingList) p0;
p.lastDocID = docState.docID;
if (omitTf) {
if (omitTermFreqAndPositions) {
p.lastDocCode = docState.docID;
} else {
p.lastDocCode = docState.docID << 1;
@ -122,9 +122,9 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
FreqProxTermsWriter.PostingList p = (FreqProxTermsWriter.PostingList) p0;
assert omitTf || p.docFreq > 0;
assert omitTermFreqAndPositions || p.docFreq > 0;
if (omitTf) {
if (omitTermFreqAndPositions) {
if (docState.docID != p.lastDocID) {
assert docState.docID > p.lastDocID;
termsHashPerField.writeVInt(0, p.lastDocCode);

View File

@ -92,7 +92,9 @@ public abstract class IndexReader implements Cloneable {
/** All fields that store payloads */
public static final FieldOption STORES_PAYLOADS = new FieldOption ("STORES_PAYLOADS");
/** All fields that omit tf */
public static final FieldOption OMIT_TF = new FieldOption ("OMIT_TF");
public static final FieldOption OMIT_TERM_FREQ_AND_POSITIONS = new FieldOption ("OMIT_TERM_FREQ_AND_POSITIONS");
/** @deprecated Renamed to {@link #OMIT_TERM_FREQ_AND_POSITIONS} */
public static final FieldOption OMIT_TF = OMIT_TERM_FREQ_AND_POSITIONS;
/** All fields which are not indexed */
public static final FieldOption UNINDEXED = new FieldOption ("UNINDEXED");
/** All fields which are indexed with termvectors enabled */

View File

@ -77,7 +77,7 @@ final class SegmentInfo {
private int delCount; // How many deleted docs in this segment, or -1 if not yet known
// (if it's an older index)
private boolean hasProx; // True if this segment has any fields with omitTf==false
private boolean hasProx; // True if this segment has any fields with omitTermFreqAndPositions==false
public SegmentInfo(String name, int docCount, Directory dir) {
this.name = name;

View File

@ -70,7 +70,7 @@ final class SegmentInfos extends Vector {
/** This format adds the boolean hasProx to record if any
* fields in the segment store prox information (ie, have
* omitTf==false) */
* omitTermFreqAndPositions==false) */
public static final int FORMAT_HAS_PROX = -7;
/** This format adds optional commit userData (String) storage. */

View File

@ -208,11 +208,11 @@ final class SegmentMerger {
}
private void addIndexed(IndexReader reader, FieldInfos fieldInfos, Collection names, boolean storeTermVectors, boolean storePositionWithTermVector,
boolean storeOffsetWithTermVector, boolean storePayloads, boolean omitTf) throws IOException {
boolean storeOffsetWithTermVector, boolean storePayloads, boolean omitTermFreqAndPositions) throws IOException {
Iterator i = names.iterator();
while (i.hasNext()) {
String field = (String)i.next();
fieldInfos.add(field, true, storeTermVectors, storePositionWithTermVector, storeOffsetWithTermVector, !reader.hasNorms(field), storePayloads, omitTf);
fieldInfos.add(field, true, storeTermVectors, storePositionWithTermVector, storeOffsetWithTermVector, !reader.hasNorms(field), storePayloads, omitTermFreqAndPositions);
}
}
@ -275,7 +275,7 @@ final class SegmentMerger {
SegmentReader segmentReader = (SegmentReader) reader;
for (int j = 0; j < segmentReader.getFieldInfos().size(); j++) {
FieldInfo fi = segmentReader.getFieldInfos().fieldInfo(j);
fieldInfos.add(fi.name, fi.isIndexed, fi.storeTermVector, fi.storePositionWithTermVector, fi.storeOffsetWithTermVector, !reader.hasNorms(fi.name), fi.storePayloads, fi.omitTf);
fieldInfos.add(fi.name, fi.isIndexed, fi.storeTermVector, fi.storePositionWithTermVector, fi.storeOffsetWithTermVector, !reader.hasNorms(fi.name), fi.storePayloads, fi.omitTermFreqAndPositions);
}
} else {
addIndexed(reader, fieldInfos, reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET), true, true, true, false, false);
@ -496,7 +496,7 @@ final class SegmentMerger {
}
}
boolean omitTF;
boolean omitTermFreqAndPositions;
private final void mergeTermInfos(final FormatPostingsFieldsConsumer consumer) throws CorruptIndexException, IOException {
int base = 0;
@ -544,7 +544,7 @@ final class SegmentMerger {
termsConsumer.finish();
final FieldInfo fieldInfo = fieldInfos.fieldInfo(currentField);
termsConsumer = consumer.addField(fieldInfo);
omitTF = fieldInfo.omitTf;
omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
}
int df = appendPostings(termsConsumer, match, matchSize); // add new TermInfo
@ -605,7 +605,7 @@ final class SegmentMerger {
final int freq = postings.freq();
final FormatPostingsPositionsConsumer posConsumer = docConsumer.addDoc(doc, freq);
if (!omitTF) {
if (!omitTermFreqAndPositions) {
for (int j = 0; j < freq; j++) {
final int position = postings.nextPosition();
final int payloadLength = postings.getPayloadLength();

View File

@ -505,7 +505,7 @@ class SegmentReader extends DirectoryIndexReader {
boolean anyProx = false;
final int numFields = fieldInfos.size();
for(int i=0;!anyProx && i<numFields;i++)
if (!fieldInfos.fieldInfo(i).omitTf)
if (!fieldInfos.fieldInfo(i).omitTermFreqAndPositions)
anyProx = true;
final String fieldsSegment;
@ -959,7 +959,7 @@ class SegmentReader extends DirectoryIndexReader {
else if (!fi.isIndexed && fieldOption == IndexReader.FieldOption.UNINDEXED) {
fieldSet.add(fi.name);
}
else if (fi.omitTf && fieldOption == IndexReader.FieldOption.OMIT_TF) {
else if (fi.omitTermFreqAndPositions && fieldOption == IndexReader.FieldOption.OMIT_TERM_FREQ_AND_POSITIONS) {
fieldSet.add(fi.name);
}
else if (fi.storePayloads && fieldOption == IndexReader.FieldOption.STORES_PAYLOADS) {

View File

@ -41,7 +41,7 @@ class SegmentTermDocs implements TermDocs {
private boolean haveSkipped;
protected boolean currentFieldStoresPayloads;
protected boolean currentFieldOmitTf;
protected boolean currentFieldOmitTermFreqAndPositions;
protected SegmentTermDocs(SegmentReader parent) {
this.parent = parent;
@ -78,7 +78,7 @@ class SegmentTermDocs implements TermDocs {
void seek(TermInfo ti, Term term) throws IOException {
count = 0;
FieldInfo fi = parent.fieldInfos.fieldInfo(term.field);
currentFieldOmitTf = (fi != null) ? fi.omitTf : false;
currentFieldOmitTermFreqAndPositions = (fi != null) ? fi.omitTermFreqAndPositions : false;
currentFieldStoresPayloads = (fi != null) ? fi.storePayloads : false;
if (ti == null) {
df = 0;
@ -111,7 +111,7 @@ class SegmentTermDocs implements TermDocs {
return false;
final int docCode = freqStream.readVInt();
if (currentFieldOmitTf) {
if (currentFieldOmitTermFreqAndPositions) {
doc += docCode;
freq = 1;
} else {
@ -135,7 +135,7 @@ class SegmentTermDocs implements TermDocs {
public int read(final int[] docs, final int[] freqs)
throws IOException {
final int length = docs.length;
if (currentFieldOmitTf) {
if (currentFieldOmitTermFreqAndPositions) {
return readNoTf(docs, freqs, length);
} else {
int i = 0;

View File

@ -60,7 +60,7 @@ extends SegmentTermDocs implements TermPositions {
}
public final int nextPosition() throws IOException {
if (currentFieldOmitTf)
if (currentFieldOmitTermFreqAndPositions)
// This field does not store term freq, positions, payloads
return 0;
// perform lazy skips if neccessary
@ -119,7 +119,7 @@ extends SegmentTermDocs implements TermPositions {
}
private void skipPositions(int n) throws IOException {
assert !currentFieldOmitTf;
assert !currentFieldOmitTermFreqAndPositions;
for (int f = n; f > 0; f--) { // skip unread positions
readDeltaPosition();
skipPayload();

View File

@ -52,8 +52,8 @@ public class TestOmitTf extends LuceneTestCase {
// Tests whether the DocumentWriter correctly enable the
// omitTf bit in the FieldInfo
public void testOmitTf() throws Exception {
// omitTermFreqAndPositions bit in the FieldInfo
public void testOmitTermFreqAndPositions() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer();
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
@ -65,7 +65,7 @@ public class TestOmitTf extends LuceneTestCase {
// this field will NOT have Tf
Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
f2.setOmitTf(true);
f2.setOmitTermFreqAndPositions(true);
d.add(f2);
writer.addDocument(d);
@ -75,10 +75,10 @@ public class TestOmitTf extends LuceneTestCase {
d = new Document();
// Reverese
f1.setOmitTf(true);
f1.setOmitTermFreqAndPositions(true);
d.add(f1);
f2.setOmitTf(false);
f2.setOmitTermFreqAndPositions(false);
d.add(f2);
writer.addDocument(d);
@ -91,15 +91,15 @@ public class TestOmitTf extends LuceneTestCase {
// only one segment in the index, so we can cast to SegmentReader
SegmentReader reader = (SegmentReader) IndexReader.open(ram);
FieldInfos fi = reader.fieldInfos();
assertTrue("OmitTf field bit should be set.", fi.fieldInfo("f1").omitTf);
assertTrue("OmitTf field bit should be set.", fi.fieldInfo("f2").omitTf);
assertTrue("OmitTermFreqAndPositions field bit should be set.", fi.fieldInfo("f1").omitTermFreqAndPositions);
assertTrue("OmitTermFreqAndPositions field bit should be set.", fi.fieldInfo("f2").omitTermFreqAndPositions);
reader.close();
ram.close();
}
// Tests whether merging of docs that have different
// omitTf for the same field works
// omitTermFreqAndPositions for the same field works
public void testMixedMerge() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer();
@ -114,7 +114,7 @@ public class TestOmitTf extends LuceneTestCase {
// this field will NOT have Tf
Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
f2.setOmitTf(true);
f2.setOmitTermFreqAndPositions(true);
d.add(f2);
for(int i=0;i<30;i++)
@ -125,10 +125,10 @@ public class TestOmitTf extends LuceneTestCase {
d = new Document();
// Reverese
f1.setOmitTf(true);
f1.setOmitTermFreqAndPositions(true);
d.add(f1);
f2.setOmitTf(false);
f2.setOmitTermFreqAndPositions(false);
d.add(f2);
for(int i=0;i<30;i++)
@ -144,15 +144,15 @@ public class TestOmitTf extends LuceneTestCase {
// only one segment in the index, so we can cast to SegmentReader
SegmentReader reader = (SegmentReader) IndexReader.open(ram);
FieldInfos fi = reader.fieldInfos();
assertTrue("OmitTf field bit should be set.", fi.fieldInfo("f1").omitTf);
assertTrue("OmitTf field bit should be set.", fi.fieldInfo("f2").omitTf);
assertTrue("OmitTermFreqAndPositions field bit should be set.", fi.fieldInfo("f1").omitTermFreqAndPositions);
assertTrue("OmitTermFreqAndPositions field bit should be set.", fi.fieldInfo("f2").omitTermFreqAndPositions);
reader.close();
ram.close();
}
// Make sure first adding docs that do not omitTf for
// field X, then adding docs that do omitTf for that same
// Make sure first adding docs that do not omitTermFreqAndPositions for
// field X, then adding docs that do omitTermFreqAndPositions for that same
// field,
public void testMixedRAM() throws Exception {
Directory ram = new MockRAMDirectory();
@ -173,7 +173,7 @@ public class TestOmitTf extends LuceneTestCase {
for(int i=0;i<5;i++)
writer.addDocument(d);
f2.setOmitTf(true);
f2.setOmitTermFreqAndPositions(true);
for(int i=0;i<20;i++)
writer.addDocument(d);
@ -189,8 +189,8 @@ public class TestOmitTf extends LuceneTestCase {
// only one segment in the index, so we can cast to SegmentReader
SegmentReader reader = (SegmentReader) IndexReader.open(ram);
FieldInfos fi = reader.fieldInfos();
assertTrue("OmitTf field bit should not be set.", !fi.fieldInfo("f1").omitTf);
assertTrue("OmitTf field bit should be set.", fi.fieldInfo("f2").omitTf);
assertTrue("OmitTermFreqAndPositions field bit should not be set.", !fi.fieldInfo("f1").omitTermFreqAndPositions);
assertTrue("OmitTermFreqAndPositions field bit should be set.", fi.fieldInfo("f2").omitTermFreqAndPositions);
reader.close();
ram.close();
@ -213,7 +213,7 @@ public class TestOmitTf extends LuceneTestCase {
Document d = new Document();
Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
f1.setOmitTf(true);
f1.setOmitTermFreqAndPositions(true);
d.add(f1);
for(int i=0;i<30;i++)
@ -250,7 +250,7 @@ public class TestOmitTf extends LuceneTestCase {
sb.append(term).append(" ");
String content = sb.toString();
Field noTf = new Field("noTf", content + (i%2==0 ? "" : " notf"), Field.Store.NO, Field.Index.ANALYZED);
noTf.setOmitTf(true);
noTf.setOmitTermFreqAndPositions(true);
d.add(noTf);
Field tf = new Field("tf", content + (i%2==0 ? " tf" : ""), Field.Store.NO, Field.Index.ANALYZED);