mirror of https://github.com/apache/lucene.git
LUCENE-2308: Moved over to using IndexableFieldType interface
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1167668 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
283ba51e19
commit
ffb3cbee57
|
@ -166,6 +166,11 @@ Changes in backwards compatibility policy
|
|||
arbitrary relationships. To navigate to a scorer's children, call Scorer.getChildren().
|
||||
(Robert Muir)
|
||||
|
||||
* LUCENE-2308: Field is now instantiated with an instance of IndexableFieldType, of which there
|
||||
is a core implementation FieldType. Most properties describing a Field have been moved to
|
||||
IndexableFieldType. See MIGRATE.txt for more details.
|
||||
(Nikola Tankovic, Mike McCandless, Chris Male)
|
||||
|
||||
Changes in Runtime Behavior
|
||||
|
||||
* LUCENE-2846: omitNorms now behaves like omitTermFrequencyAndPositions, if you
|
||||
|
|
|
@ -410,14 +410,14 @@ LUCENE-1458, LUCENE-2111: Flexible Indexing
|
|||
|
||||
|
||||
|
||||
* LUCENE-2308: Separate FieldType from Field instances
|
||||
* LUCENE-2308: Separate IndexableFieldType from Field instances
|
||||
|
||||
With this change, the indexing details (indexed, tokenized, norms,
|
||||
indexOptions, stored, etc.) are moved into a separate FieldType
|
||||
instance (rather than being stored directly on the Field).
|
||||
|
||||
This means you can create the FieldType instance once, up front, for a
|
||||
given field, and then re-use that instance whenever you instantiate
|
||||
This means you can create the IndexableFieldType instance once, up front,
|
||||
for a given field, and then re-use that instance whenever you instantiate
|
||||
the Field.
|
||||
|
||||
Certain field types are pre-defined since they are common cases:
|
||||
|
@ -454,9 +454,7 @@ You can of course also create your own FieldType from scratch:
|
|||
t.setOmitNorms(true);
|
||||
t.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
|
||||
|
||||
FieldType has a freeze() method to prevent further changes. Note that
|
||||
once a FieldType is bound to a Field, it's frozen, to help prevent
|
||||
confusing bugs.
|
||||
FieldType has a freeze() method to prevent further changes.
|
||||
|
||||
When migrating from the 3.x API, if you did this before:
|
||||
|
||||
|
|
|
@ -26,9 +26,9 @@ import org.apache.lucene.analysis.TokenStream;
|
|||
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
|
|
@ -199,7 +199,7 @@ public class InstantiatedIndex
|
|||
documentsByNumber[i] = document;
|
||||
for (IndexableField field : document.getDocument()) {
|
||||
if (fields == null || fields.contains(field.name())) {
|
||||
if (field.storeTermVectors()) {
|
||||
if (field.fieldType().storeTermVectors()) {
|
||||
if (document.getVectorSpace() == null) {
|
||||
document.setVectorSpace(new HashMap<String, List<InstantiatedTermDocumentInformation>>());
|
||||
}
|
||||
|
@ -291,7 +291,7 @@ public class InstantiatedIndex
|
|||
continue; // deleted
|
||||
}
|
||||
for (IndexableField field : document.getDocument()) {
|
||||
if (field.storeTermVectors() && field.storeTermVectorOffsets()) {
|
||||
if (field.fieldType().storeTermVectors() && field.fieldType().storeTermVectorOffsets()) {
|
||||
TermPositionVector termPositionVector = (TermPositionVector) sourceIndexReader.getTermFreqVector(document.getDocumentNumber(), field.name());
|
||||
if (termPositionVector != null) {
|
||||
for (int i = 0; i < termPositionVector.getTerms().length; i++) {
|
||||
|
|
|
@ -484,28 +484,28 @@ public class InstantiatedIndexWriter implements Closeable {
|
|||
|
||||
|
||||
// once fieldSettings, always fieldSettings.
|
||||
if (field.omitNorms()) {
|
||||
if (field.fieldType().omitNorms()) {
|
||||
fieldSetting.omitNorms = true;
|
||||
}
|
||||
if (field.indexed() ) {
|
||||
if (field.fieldType().indexed() ) {
|
||||
fieldSetting.indexed = true;
|
||||
}
|
||||
if (field.tokenized()) {
|
||||
if (field.fieldType().tokenized()) {
|
||||
fieldSetting.tokenized = true;
|
||||
}
|
||||
if (field.stored()) {
|
||||
if (field.fieldType().stored()) {
|
||||
fieldSetting.stored = true;
|
||||
}
|
||||
if (field.binaryValue() != null) {
|
||||
fieldSetting.isBinary = true;
|
||||
}
|
||||
if (field.storeTermVectors()) {
|
||||
if (field.fieldType().storeTermVectors()) {
|
||||
fieldSetting.storeTermVector = true;
|
||||
}
|
||||
if (field.storeTermVectorPositions()) {
|
||||
if (field.fieldType().storeTermVectorPositions()) {
|
||||
fieldSetting.storePositionWithTermVector = true;
|
||||
}
|
||||
if (field.storeTermVectorOffsets()) {
|
||||
if (field.fieldType().storeTermVectorOffsets()) {
|
||||
fieldSetting.storeOffsetWithTermVector = true;
|
||||
}
|
||||
}
|
||||
|
@ -519,12 +519,12 @@ public class InstantiatedIndexWriter implements Closeable {
|
|||
|
||||
FieldSetting fieldSetting = fieldSettingsByFieldName.get(field.name());
|
||||
|
||||
if (field.indexed()) {
|
||||
if (field.fieldType().indexed()) {
|
||||
|
||||
LinkedList<Token> tokens = new LinkedList<Token>();
|
||||
tokensByField.put(field, tokens);
|
||||
|
||||
if (field.tokenized()) {
|
||||
if (field.fieldType().tokenized()) {
|
||||
final TokenStream tokenStream;
|
||||
// todo readerValue(), binaryValue()
|
||||
if (field.tokenStreamValue() != null) {
|
||||
|
@ -564,7 +564,7 @@ public class InstantiatedIndexWriter implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
if (!field.stored()) {
|
||||
if (!field.fieldType().stored()) {
|
||||
//it.remove();
|
||||
}
|
||||
}
|
||||
|
@ -610,7 +610,7 @@ public class InstantiatedIndexWriter implements Closeable {
|
|||
termDocumentInformationFactory.payloads.add(null);
|
||||
}
|
||||
|
||||
if (eField_Tokens.getKey().storeTermVectorOffsets()) {
|
||||
if (eField_Tokens.getKey().fieldType().storeTermVectorOffsets()) {
|
||||
|
||||
termDocumentInformationFactory.termOffsets.add(new TermVectorOffsetInfo(fieldSetting.offset + token.startOffset(), fieldSetting.offset + token.endOffset()));
|
||||
lastOffset = fieldSetting.offset + token.endOffset();
|
||||
|
@ -619,7 +619,7 @@ public class InstantiatedIndexWriter implements Closeable {
|
|||
|
||||
}
|
||||
|
||||
if (eField_Tokens.getKey().storeTermVectorOffsets()) {
|
||||
if (eField_Tokens.getKey().fieldType().storeTermVectorOffsets()) {
|
||||
fieldSetting.offset = lastOffset + 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ public class TestNRTManager extends LuceneTestCase {
|
|||
Field field1 = (Field) f;
|
||||
|
||||
Field field2 = new Field(field1.name(),
|
||||
((Field) f).getFieldType(),
|
||||
((Field) f).fieldType(),
|
||||
field1.stringValue());
|
||||
doc2.add(field2);
|
||||
}
|
||||
|
|
|
@ -28,9 +28,7 @@ public class TestTermVectorAccessor extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
Document doc;
|
||||
|
||||
doc = new Document();
|
||||
Document doc = new Document();
|
||||
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
|
||||
customType.setStoreTermVectors(true);
|
||||
customType.setStoreTermVectorPositions(true);
|
||||
|
|
|
@ -138,7 +138,7 @@ public abstract class Analyzer implements Closeable {
|
|||
* @return offset gap, added to the next token emitted from {@link #tokenStream(String,Reader)}
|
||||
*/
|
||||
public int getOffsetGap(IndexableField field) {
|
||||
if (field.tokenized()) {
|
||||
if (field.fieldType().tokenized()) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
|
|
|
@ -28,12 +28,12 @@ import org.apache.lucene.util.BytesRef;
|
|||
/** Documents are the unit of indexing and search.
|
||||
*
|
||||
* A Document is a set of fields. Each field has a name and a textual value.
|
||||
* A field may be {@link IndexableField#stored() stored} with the document, in which
|
||||
* A field may be {@link org.apache.lucene.index.IndexableFieldType#stored() stored} with the document, in which
|
||||
* case it is returned with search hits on the document. Thus each document
|
||||
* should typically contain one or more stored fields which uniquely identify
|
||||
* it.
|
||||
*
|
||||
* <p>Note that fields which are <i>not</i> {@link IndexableField#stored() stored} are
|
||||
* <p>Note that fields which are <i>not</i> {@link org.apache.lucene.index.IndexableFieldType#stored() stored} are
|
||||
* <i>not</i> available in documents retrieved from the index, e.g. with {@link
|
||||
* ScoreDoc#doc} or {@link IndexReader#document(int)}.
|
||||
*/
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.lucene.document;
|
|||
import java.io.Reader;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableFieldType;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.values.PerDocFieldValues;
|
||||
import org.apache.lucene.index.values.ValueType;
|
||||
|
@ -32,11 +32,14 @@ import org.apache.lucene.util.BytesRef;
|
|||
* may be atomic keywords, which are not further processed. Such keywords may be
|
||||
* used to represent dates, urls, etc. Fields are optionally stored in the
|
||||
* index, so that they may be returned with hits on the document.
|
||||
* <p/>
|
||||
* Note, Field instances are instantiated with a {@link IndexableFieldType}. Making changes
|
||||
* to the state of the FieldType will impact any Field it is used in, therefore
|
||||
* it is strongly recommended that no changes are made after Field instantiation.
|
||||
*/
|
||||
|
||||
public class Field implements IndexableField {
|
||||
|
||||
protected FieldType type;
|
||||
protected IndexableFieldType type;
|
||||
protected String name = "body";
|
||||
// the data object for all different kind of field values
|
||||
protected Object fieldsData;
|
||||
|
@ -47,13 +50,12 @@ public class Field implements IndexableField {
|
|||
|
||||
protected float boost = 1.0f;
|
||||
|
||||
public Field(String name, FieldType type) {
|
||||
public Field(String name, IndexableFieldType type) {
|
||||
this.name = name;
|
||||
this.type = type;
|
||||
type.freeze();
|
||||
}
|
||||
|
||||
public Field(String name, FieldType type, Reader reader) {
|
||||
public Field(String name, IndexableFieldType type, Reader reader) {
|
||||
if (name == null) {
|
||||
throw new NullPointerException("name cannot be null");
|
||||
}
|
||||
|
@ -64,10 +66,9 @@ public class Field implements IndexableField {
|
|||
this.name = name;
|
||||
this.fieldsData = reader;
|
||||
this.type = type;
|
||||
type.freeze();
|
||||
}
|
||||
|
||||
public Field(String name, FieldType type, TokenStream tokenStream) {
|
||||
public Field(String name, IndexableFieldType type, TokenStream tokenStream) {
|
||||
if (name == null) {
|
||||
throw new NullPointerException("name cannot be null");
|
||||
}
|
||||
|
@ -79,28 +80,25 @@ public class Field implements IndexableField {
|
|||
this.fieldsData = null;
|
||||
this.tokenStream = tokenStream;
|
||||
this.type = type;
|
||||
type.freeze();
|
||||
}
|
||||
|
||||
public Field(String name, FieldType type, byte[] value) {
|
||||
public Field(String name, IndexableFieldType type, byte[] value) {
|
||||
this(name, type, value, 0, value.length);
|
||||
}
|
||||
|
||||
public Field(String name, FieldType type, byte[] value, int offset, int length) {
|
||||
public Field(String name, IndexableFieldType type, byte[] value, int offset, int length) {
|
||||
this.fieldsData = new BytesRef(value, offset, length);
|
||||
this.type = type;
|
||||
this.name = name;
|
||||
type.freeze();
|
||||
}
|
||||
|
||||
public Field(String name, FieldType type, BytesRef bytes) {
|
||||
public Field(String name, IndexableFieldType type, BytesRef bytes) {
|
||||
this.fieldsData = bytes;
|
||||
this.type = type;
|
||||
this.name = name;
|
||||
type.freeze();
|
||||
}
|
||||
|
||||
public Field(String name, FieldType type, String value) {
|
||||
public Field(String name, IndexableFieldType type, String value) {
|
||||
if (name == null) {
|
||||
throw new IllegalArgumentException("name cannot be null");
|
||||
}
|
||||
|
@ -119,7 +117,6 @@ public class Field implements IndexableField {
|
|||
this.type = type;
|
||||
this.name = name;
|
||||
this.fieldsData = value;
|
||||
type.freeze();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -181,7 +178,7 @@ public class Field implements IndexableField {
|
|||
throw new IllegalArgumentException(
|
||||
"cannot set a Reader value on a binary field");
|
||||
}
|
||||
if (stored()) {
|
||||
if (type.stored()) {
|
||||
throw new IllegalArgumentException(
|
||||
"cannot set a Reader value on a stored field");
|
||||
}
|
||||
|
@ -206,7 +203,7 @@ public class Field implements IndexableField {
|
|||
* values from stringValue() or getBinaryValue()
|
||||
*/
|
||||
public void setTokenStream(TokenStream tokenStream) {
|
||||
if (!indexed() || !tokenized()) {
|
||||
if (!type.indexed() || !type.tokenized()) {
|
||||
throw new IllegalArgumentException(
|
||||
"cannot set token stream on non indexed and tokenized field");
|
||||
}
|
||||
|
@ -259,44 +256,12 @@ public class Field implements IndexableField {
|
|||
}
|
||||
}
|
||||
|
||||
/** methods from inner FieldType */
|
||||
/** methods from inner IndexableFieldType */
|
||||
|
||||
public boolean isBinary() {
|
||||
return fieldsData instanceof BytesRef;
|
||||
}
|
||||
|
||||
public boolean stored() {
|
||||
return type.stored();
|
||||
}
|
||||
|
||||
public boolean indexed() {
|
||||
return type.indexed();
|
||||
}
|
||||
|
||||
public boolean tokenized() {
|
||||
return type.tokenized();
|
||||
}
|
||||
|
||||
public boolean omitNorms() {
|
||||
return type.omitNorms();
|
||||
}
|
||||
|
||||
public IndexOptions indexOptions() {
|
||||
return type.indexOptions();
|
||||
}
|
||||
|
||||
public boolean storeTermVectors() {
|
||||
return type.storeTermVectors();
|
||||
}
|
||||
|
||||
public boolean storeTermVectorOffsets() {
|
||||
return type.storeTermVectorOffsets();
|
||||
}
|
||||
|
||||
public boolean storeTermVectorPositions() {
|
||||
return type.storeTermVectorPositions();
|
||||
}
|
||||
|
||||
/** Prints a Field for human consumption. */
|
||||
@Override
|
||||
public String toString() {
|
||||
|
@ -329,7 +294,7 @@ public class Field implements IndexableField {
|
|||
}
|
||||
|
||||
/** Returns FieldType for this field. */
|
||||
public FieldType getFieldType() {
|
||||
public IndexableFieldType fieldType() {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,9 @@ package org.apache.lucene.document;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableFieldType;
|
||||
|
||||
public class FieldType {
|
||||
public class FieldType implements IndexableFieldType {
|
||||
|
||||
private boolean indexed;
|
||||
private boolean stored;
|
||||
|
@ -31,7 +32,7 @@ public class FieldType {
|
|||
private IndexOptions indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
|
||||
private boolean frozen;
|
||||
|
||||
public FieldType(FieldType ref) {
|
||||
public FieldType(IndexableFieldType ref) {
|
||||
this.indexed = ref.indexed();
|
||||
this.stored = ref.stored();
|
||||
this.tokenized = ref.tokenized();
|
||||
|
@ -52,8 +53,11 @@ public class FieldType {
|
|||
}
|
||||
}
|
||||
|
||||
/** Prevents future changes. Note that when a FieldType
|
||||
* is first bound to a Field instance, it is frozen. */
|
||||
/**
|
||||
* Prevents future changes. Note, it is recommended that this is called once
|
||||
* the FieldTypes's properties have been set, to prevent unintential state
|
||||
* changes.
|
||||
*/
|
||||
public void freeze() {
|
||||
this.frozen = true;
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ import java.io.Reader;
|
|||
import java.util.Comparator;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.index.IndexableFieldType;
|
||||
import org.apache.lucene.index.values.PerDocFieldValues;
|
||||
import org.apache.lucene.index.values.ValueType;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -84,11 +85,11 @@ public class IndexDocValuesField extends Field implements PerDocFieldValues {
|
|||
this(name, new FieldType());
|
||||
}
|
||||
|
||||
public IndexDocValuesField(String name, FieldType type) {
|
||||
public IndexDocValuesField(String name, IndexableFieldType type) {
|
||||
this(name, type, null);
|
||||
}
|
||||
|
||||
public IndexDocValuesField(String name, FieldType type, String value) {
|
||||
public IndexDocValuesField(String name, IndexableFieldType type, String value) {
|
||||
super(name, type);
|
||||
fieldsData = value;
|
||||
}
|
||||
|
@ -356,7 +357,7 @@ public class IndexDocValuesField extends Field implements PerDocFieldValues {
|
|||
if (field instanceof IndexDocValuesField) {
|
||||
return (IndexDocValuesField) field;
|
||||
}
|
||||
final IndexDocValuesField valField = new IndexDocValuesField(field.name(), field.getFieldType(), field.stringValue());
|
||||
final IndexDocValuesField valField = new IndexDocValuesField(field.name(), field.fieldType(), field.stringValue());
|
||||
switch (type) {
|
||||
case BYTES_FIXED_DEREF:
|
||||
case BYTES_FIXED_SORTED:
|
||||
|
|
|
@ -104,7 +104,7 @@ import org.apache.lucene.search.FieldCache; // javadocs
|
|||
* default value, 4, was selected for a reasonable tradeoff
|
||||
* of disk space consumption versus performance. You can
|
||||
* use the expert constructor {@link
|
||||
* #NumericField(String,int,FieldType)} if you'd
|
||||
* #NumericField(String,int, FieldType)} if you'd
|
||||
* like to change the value. Note that you must also
|
||||
* specify a congruent value when creating {@link
|
||||
* NumericRangeQuery} or {@link NumericRangeFilter}.
|
||||
|
@ -238,7 +238,7 @@ public final class NumericField extends Field {
|
|||
|
||||
/** Returns a {@link NumericTokenStream} for indexing the numeric value. */
|
||||
public TokenStream tokenStreamValue() {
|
||||
if (!indexed()) return null;
|
||||
if (!type.indexed()) return null;
|
||||
if (numericTS == null) {
|
||||
// lazy init the TokenStream as it is heavy to instantiate
|
||||
// (attributes,...),
|
||||
|
|
|
@ -223,9 +223,7 @@ final class DocFieldProcessor extends DocConsumer {
|
|||
// needs to be more "pluggable" such that if I want
|
||||
// to have a new "thing" my Fields can do, I can
|
||||
// easily add it
|
||||
FieldInfo fi = fieldInfos.addOrUpdate(fieldName, field.indexed(), field.storeTermVectors(),
|
||||
field.storeTermVectorPositions(), field.storeTermVectorOffsets(),
|
||||
field.omitNorms(), false, field.indexOptions(), field.docValuesType());
|
||||
FieldInfo fi = fieldInfos.addOrUpdate(fieldName, field.fieldType(), false, field.docValuesType());
|
||||
|
||||
fp = new DocFieldProcessorPerField(this, fi);
|
||||
fp.next = fieldHash[hashPos];
|
||||
|
@ -236,9 +234,7 @@ final class DocFieldProcessor extends DocConsumer {
|
|||
rehash();
|
||||
}
|
||||
} else {
|
||||
fieldInfos.addOrUpdate(fp.fieldInfo.name, field.indexed(), field.storeTermVectors(),
|
||||
field.storeTermVectorPositions(), field.storeTermVectorOffsets(),
|
||||
field.omitNorms(), false, field.indexOptions(), field.docValuesType());
|
||||
fieldInfos.addOrUpdate(fp.fieldInfo.name, field.fieldType(), false, field.docValuesType());
|
||||
}
|
||||
|
||||
if (thisFieldGen != fp.lastGen) {
|
||||
|
@ -259,7 +255,7 @@ final class DocFieldProcessor extends DocConsumer {
|
|||
|
||||
fp.addField(field);
|
||||
|
||||
if (field.stored()) {
|
||||
if (field.fieldType().stored()) {
|
||||
fieldsWriter.addField(field, fp.fieldInfo);
|
||||
}
|
||||
final PerDocFieldValues docValues = field.docValues();
|
||||
|
|
|
@ -74,7 +74,7 @@ final class DocInverterPerField extends DocFieldConsumerPerField {
|
|||
// TODO FI: this should be "genericized" to querying
|
||||
// consumer if it wants to see this particular field
|
||||
// tokenized.
|
||||
if (field.indexed() && doInvert) {
|
||||
if (field.fieldType().indexed() && doInvert) {
|
||||
|
||||
if (i > 0)
|
||||
fieldState.position += docState.analyzer == null ? 0 : docState.analyzer.getPositionIncrementGap(fieldInfo.name);
|
||||
|
@ -83,7 +83,7 @@ final class DocInverterPerField extends DocFieldConsumerPerField {
|
|||
// outside of indexer -- field should simply give us
|
||||
// a TokenStream, even for multi-valued fields
|
||||
|
||||
if (!field.tokenized()) { // un-tokenized field
|
||||
if (!field.fieldType().tokenized()) { // un-tokenized field
|
||||
final String stringValue = field.stringValue();
|
||||
assert stringValue != null;
|
||||
final int valueLength = stringValue.length();
|
||||
|
|
|
@ -456,6 +456,12 @@ public final class FieldInfos implements Iterable<FieldInfo> {
|
|||
storeOffsetWithTermVector, omitNorms, storePayloads, indexOptions, docValues);
|
||||
}
|
||||
|
||||
synchronized public FieldInfo addOrUpdate(String name, IndexableFieldType fieldType, boolean scorePayloads, ValueType docValues) {
|
||||
return addOrUpdateInternal(name, -1, fieldType.indexed(), fieldType.storeTermVectors(),
|
||||
fieldType.storeTermVectorPositions(), fieldType.storeTermVectorOffsets(), fieldType.omitNorms(), scorePayloads,
|
||||
fieldType.indexOptions(), docValues);
|
||||
}
|
||||
|
||||
synchronized private FieldInfo addOrUpdateInternal(String name, int preferredFieldNumber, boolean isIndexed,
|
||||
boolean storeTermVector, boolean storePositionWithTermVector, boolean storeOffsetWithTermVector,
|
||||
boolean omitNorms, boolean storePayloads, IndexOptions indexOptions, ValueType docValues) {
|
||||
|
|
|
@ -217,14 +217,14 @@ final class FieldsWriter {
|
|||
|
||||
int storedCount = 0;
|
||||
for (IndexableField field : doc) {
|
||||
if (field.stored()) {
|
||||
if (field.fieldType().stored()) {
|
||||
storedCount++;
|
||||
}
|
||||
}
|
||||
fieldsStream.writeVInt(storedCount);
|
||||
|
||||
for (IndexableField field : doc) {
|
||||
if (field.stored()) {
|
||||
if (field.fieldType().stored()) {
|
||||
writeField(fieldInfos.fieldNumber(field.name()), field);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
|
|||
@Override
|
||||
boolean start(IndexableField[] fields, int count) {
|
||||
for(int i=0;i<count;i++) {
|
||||
if (fields[i].indexed()) {
|
||||
if (fields[i].fieldType().indexed()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@ import java.io.Reader;
|
|||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.NumericField;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import org.apache.lucene.index.values.PerDocFieldValues;
|
||||
import org.apache.lucene.index.values.ValueType;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -48,9 +47,6 @@ public interface IndexableField {
|
|||
/** Field boost (you must pre-multiply in any doc boost). */
|
||||
public float boost();
|
||||
|
||||
/* True if the field's value should be stored */
|
||||
public boolean stored();
|
||||
|
||||
/* Non-null if this field has a binary value */
|
||||
public BytesRef binaryValue();
|
||||
|
||||
|
@ -74,27 +70,12 @@ public interface IndexableField {
|
|||
/* Numeric value; only used if the field is numeric */
|
||||
public Number numericValue();
|
||||
|
||||
/* True if this field should be indexed (inverted) */
|
||||
public boolean indexed();
|
||||
|
||||
/* True if this field's value should be analyzed */
|
||||
public boolean tokenized();
|
||||
|
||||
/* True if norms should not be indexed */
|
||||
public boolean omitNorms();
|
||||
|
||||
/* {@link IndexOptions}, describing what should be
|
||||
* recorded into the inverted index */
|
||||
public IndexOptions indexOptions();
|
||||
|
||||
/* True if term vectors should be indexed */
|
||||
public boolean storeTermVectors();
|
||||
|
||||
/* True if term vector offsets should be indexed */
|
||||
public boolean storeTermVectorOffsets();
|
||||
|
||||
/* True if term vector positions should be indexed */
|
||||
public boolean storeTermVectorPositions();
|
||||
/**
|
||||
* Returns the IndexableFieldType describing the properties of this field
|
||||
*
|
||||
* @return IndexableFieldType for this field
|
||||
*/
|
||||
public IndexableFieldType fieldType();
|
||||
|
||||
/* Non-null if doc values should be indexed */
|
||||
public PerDocFieldValues docValues();
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
package org.apache.lucene.index;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
|
||||
public interface IndexableFieldType {
|
||||
|
||||
/* True if this field should be indexed (inverted) */
|
||||
public boolean indexed();
|
||||
|
||||
/* True if the field's value should be stored */
|
||||
public boolean stored();
|
||||
|
||||
/* True if this field's value should be analyzed */
|
||||
public boolean tokenized();
|
||||
|
||||
/* True if term vectors should be indexed */
|
||||
public boolean storeTermVectors();
|
||||
|
||||
/* True if term vector offsets should be indexed */
|
||||
public boolean storeTermVectorOffsets();
|
||||
|
||||
/* True if term vector positions should be indexed */
|
||||
public boolean storeTermVectorPositions();
|
||||
|
||||
/* True if norms should not be indexed */
|
||||
public boolean omitNorms();
|
||||
|
||||
/* {@link IndexOptions}, describing what should be
|
||||
* recorded into the inverted index */
|
||||
public IndexOptions indexOptions();
|
||||
}
|
|
@ -61,10 +61,10 @@ final class TermVectorsTermsWriterPerField extends TermsHashConsumerPerField {
|
|||
|
||||
for(int i=0;i<count;i++) {
|
||||
IndexableField field = fields[i];
|
||||
if (field.indexed() && field.storeTermVectors()) {
|
||||
if (field.fieldType().indexed() && field.fieldType().storeTermVectors()) {
|
||||
doVectors = true;
|
||||
doVectorPositions |= field.storeTermVectorPositions();
|
||||
doVectorOffsets |= field.storeTermVectorOffsets();
|
||||
doVectorPositions |= field.fieldType().storeTermVectorPositions();
|
||||
doVectorOffsets |= field.fieldType().storeTermVectorOffsets();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -205,15 +205,15 @@ class DocHelper {
|
|||
for (int i=0; i<fields.length; i++) {
|
||||
IndexableField f = fields[i];
|
||||
add(all,f);
|
||||
if (f.indexed()) add(indexed,f);
|
||||
if (f.fieldType().indexed()) add(indexed,f);
|
||||
else add(unindexed,f);
|
||||
if (f.storeTermVectors()) add(termvector,f);
|
||||
if (f.indexed() && !f.storeTermVectors()) add(notermvector,f);
|
||||
if (f.stored()) add(stored,f);
|
||||
if (f.fieldType().storeTermVectors()) add(termvector,f);
|
||||
if (f.fieldType().indexed() && !f.fieldType().storeTermVectors()) add(notermvector,f);
|
||||
if (f.fieldType().stored()) add(stored,f);
|
||||
else add(unstored,f);
|
||||
if (f.indexOptions() == IndexOptions.DOCS_ONLY) add(noTf,f);
|
||||
if (f.omitNorms()) add(noNorms,f);
|
||||
if (f.indexOptions() == IndexOptions.DOCS_ONLY) add(noTf,f);
|
||||
if (f.fieldType().indexOptions() == IndexOptions.DOCS_ONLY) add(noTf,f);
|
||||
if (f.fieldType().omitNorms()) add(noNorms,f);
|
||||
if (f.fieldType().indexOptions() == IndexOptions.DOCS_ONLY) add(noTf,f);
|
||||
//if (f.isLazy()) add(lazy, f);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -433,8 +433,7 @@ public class _TestUtil {
|
|||
/** Adds field info for a Document. */
|
||||
public static void add(Document doc, FieldInfos fieldInfos) {
|
||||
for (IndexableField field : doc) {
|
||||
fieldInfos.addOrUpdate(field.name(), field.indexed(), field.storeTermVectors(), field.storeTermVectorPositions(),
|
||||
field.storeTermVectorOffsets(), field.omitNorms(), false, field.indexOptions(), field.docValuesType());
|
||||
fieldInfos.addOrUpdate(field.name(), field.fieldType(), false, field.docValuesType());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -515,7 +514,7 @@ public class _TestUtil {
|
|||
Field field1 = (Field) f;
|
||||
|
||||
Field field2 = new Field(field1.name(),
|
||||
field1.getFieldType(),
|
||||
field1.fieldType(),
|
||||
field1.stringValue()
|
||||
);
|
||||
doc2.add(field2);
|
||||
|
|
|
@ -52,9 +52,9 @@ public class TestDocument extends LuceneTestCase {
|
|||
assertEquals(2, doc.getFields().size());
|
||||
|
||||
assertTrue(binaryFld.binaryValue() != null);
|
||||
assertTrue(binaryFld.stored());
|
||||
assertFalse(binaryFld.indexed());
|
||||
assertFalse(binaryFld.tokenized());
|
||||
assertTrue(binaryFld.fieldType().stored());
|
||||
assertFalse(binaryFld.fieldType().indexed());
|
||||
assertFalse(binaryFld.fieldType().tokenized());
|
||||
|
||||
String binaryTest = doc.getBinaryValue("binary").utf8ToString();
|
||||
assertTrue(binaryTest.equals(binaryVal));
|
||||
|
|
|
@ -303,10 +303,10 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
|
||||
for (FieldInfo fi : fis) {
|
||||
Field expected = getField(Integer.parseInt(fi.name));
|
||||
assertEquals(expected.indexed(), fi.isIndexed);
|
||||
assertEquals(expected.storeTermVectors(), fi.storeTermVector);
|
||||
assertEquals(expected.storeTermVectorPositions(), fi.storePositionWithTermVector);
|
||||
assertEquals(expected.storeTermVectorOffsets(), fi.storeOffsetWithTermVector);
|
||||
assertEquals(expected.fieldType().indexed(), fi.isIndexed);
|
||||
assertEquals(expected.fieldType().storeTermVectors(), fi.storeTermVector);
|
||||
assertEquals(expected.fieldType().storeTermVectorPositions(), fi.storePositionWithTermVector);
|
||||
assertEquals(expected.fieldType().storeTermVectorOffsets(), fi.storeOffsetWithTermVector);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -77,12 +77,12 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
IndexableField [] fields = doc.getFields("textField2");
|
||||
assertTrue(fields != null && fields.length == 1);
|
||||
assertTrue(fields[0].stringValue().equals(DocHelper.FIELD_2_TEXT));
|
||||
assertTrue(fields[0].storeTermVectors());
|
||||
assertTrue(fields[0].fieldType().storeTermVectors());
|
||||
|
||||
fields = doc.getFields("textField1");
|
||||
assertTrue(fields != null && fields.length == 1);
|
||||
assertTrue(fields[0].stringValue().equals(DocHelper.FIELD_1_TEXT));
|
||||
assertFalse(fields[0].storeTermVectors());
|
||||
assertFalse(fields[0].fieldType().storeTermVectors());
|
||||
|
||||
fields = doc.getFields("keyField");
|
||||
assertTrue(fields != null && fields.length == 1);
|
||||
|
|
|
@ -77,28 +77,28 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
|
||||
Field field = (Field) doc.getField(DocHelper.TEXT_FIELD_2_KEY);
|
||||
assertTrue(field != null);
|
||||
assertTrue(field.storeTermVectors() == true);
|
||||
assertTrue(field.fieldType().storeTermVectors());
|
||||
|
||||
assertTrue(field.storeTermVectorOffsets() == true);
|
||||
assertTrue(field.storeTermVectorPositions() == true);
|
||||
assertTrue(field.omitNorms() == false);
|
||||
assertTrue(field.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||
assertTrue(field.fieldType().storeTermVectorOffsets());
|
||||
assertTrue(field.fieldType().storeTermVectorPositions());
|
||||
assertFalse(field.fieldType().omitNorms());
|
||||
assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||
|
||||
field = (Field) doc.getField(DocHelper.TEXT_FIELD_3_KEY);
|
||||
assertTrue(field != null);
|
||||
assertTrue(field.storeTermVectors() == false);
|
||||
assertTrue(field.storeTermVectorOffsets() == false);
|
||||
assertTrue(field.storeTermVectorPositions() == false);
|
||||
assertTrue(field.omitNorms() == true);
|
||||
assertTrue(field.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||
assertFalse(field.fieldType().storeTermVectors());
|
||||
assertFalse(field.fieldType().storeTermVectorOffsets());
|
||||
assertFalse(field.fieldType().storeTermVectorPositions());
|
||||
assertTrue(field.fieldType().omitNorms());
|
||||
assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||
|
||||
field = (Field) doc.getField(DocHelper.NO_TF_KEY);
|
||||
assertTrue(field != null);
|
||||
assertTrue(field.storeTermVectors() == false);
|
||||
assertTrue(field.storeTermVectorOffsets() == false);
|
||||
assertTrue(field.storeTermVectorPositions() == false);
|
||||
assertTrue(field.omitNorms() == false);
|
||||
assertTrue(field.indexOptions() == IndexOptions.DOCS_ONLY);
|
||||
assertFalse(field.fieldType().storeTermVectors());
|
||||
assertFalse(field.fieldType().storeTermVectorOffsets());
|
||||
assertFalse(field.fieldType().storeTermVectorPositions());
|
||||
assertFalse(field.fieldType().omitNorms());
|
||||
assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS_ONLY);
|
||||
|
||||
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(DocHelper.TEXT_FIELD_3_KEY);
|
||||
reader.document(0, visitor);
|
||||
|
@ -308,8 +308,8 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
w.addDocument(doc);
|
||||
IndexReader r = w.getReader();
|
||||
w.close();
|
||||
assertFalse(r.document(0).getField("field").indexed());
|
||||
assertTrue(r.document(0).getField("field2").indexed());
|
||||
assertFalse(r.document(0).getField("field").fieldType().indexed());
|
||||
assertTrue(r.document(0).getField("field2").fieldType().indexed());
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -1257,7 +1257,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
// random TV
|
||||
try {
|
||||
w.addDocument(doc);
|
||||
assertFalse(field.storeTermVectors());
|
||||
assertFalse(field.fieldType().storeTermVectors());
|
||||
} catch (RuntimeException e) {
|
||||
assertTrue(e.getMessage().startsWith(FailOnTermVectors.EXC_MSG));
|
||||
}
|
||||
|
@ -1278,7 +1278,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
// random TV
|
||||
try {
|
||||
w.addDocument(doc);
|
||||
assertFalse(field.storeTermVectors());
|
||||
assertFalse(field.fieldType().storeTermVectors());
|
||||
} catch (RuntimeException e) {
|
||||
assertTrue(e.getMessage().startsWith(FailOnTermVectors.EXC_MSG));
|
||||
}
|
||||
|
|
|
@ -181,8 +181,6 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
|||
|
||||
Document document = new Document();
|
||||
|
||||
document = new Document();
|
||||
|
||||
FieldType customType = new FieldType();
|
||||
customType.setStored(true);
|
||||
|
||||
|
@ -237,8 +235,6 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
|||
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
|
||||
Document document = new Document();
|
||||
|
||||
FieldType customType = new FieldType();
|
||||
customType.setStored(true);
|
||||
|
||||
|
@ -248,7 +244,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
|||
customType1.setStoreTermVectorPositions(true);
|
||||
customType1.setStoreTermVectorOffsets(true);
|
||||
|
||||
document = new Document();
|
||||
Document document = new Document();
|
||||
Field storedField = newField("stored", "stored", customType);
|
||||
document.add(storedField);
|
||||
Field termVectorField = newField("termVector", "termVector", customType1);
|
||||
|
|
|
@ -44,6 +44,47 @@ public class TestIndexableField extends LuceneTestCase {
|
|||
private class MyField implements IndexableField {
|
||||
|
||||
private final int counter;
|
||||
private final IndexableFieldType fieldType = new IndexableFieldType() {
|
||||
@Override
|
||||
public boolean indexed() {
|
||||
return (counter % 10) != 3;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean stored() {
|
||||
return (counter & 1) == 0 || (counter % 10) == 3;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean tokenized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean storeTermVectors() {
|
||||
return counter % 2 == 1 && counter % 10 != 9;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean storeTermVectorOffsets() {
|
||||
return counter % 2 == 1 && counter % 10 != 9;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean storeTermVectorPositions() {
|
||||
return counter % 2 == 1 && counter % 10 != 9;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean omitNorms() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldInfo.IndexOptions indexOptions() {
|
||||
return FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
|
||||
}
|
||||
};
|
||||
|
||||
public MyField(int counter) {
|
||||
this.counter = counter;
|
||||
|
@ -59,11 +100,6 @@ public class TestIndexableField extends LuceneTestCase {
|
|||
return 1.0f + random.nextFloat();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean stored() {
|
||||
return (counter & 1) == 0 || (counter % 10) == 3;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef binaryValue() {
|
||||
if ((counter%10) == 3) {
|
||||
|
@ -121,40 +157,9 @@ public class TestIndexableField extends LuceneTestCase {
|
|||
return counter;
|
||||
}
|
||||
|
||||
// If this returns true then we index this field:
|
||||
@Override
|
||||
public boolean indexed() {
|
||||
return (counter % 10) != 3;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean tokenized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean omitNorms() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldInfo.IndexOptions indexOptions() {
|
||||
return FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean storeTermVectors() {
|
||||
return counter % 2 == 1 && counter%10 != 9;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean storeTermVectorOffsets() {
|
||||
return counter % 2 == 1 && counter%10 != 9;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean storeTermVectorPositions() {
|
||||
return counter % 2 == 1 && counter%10 != 9;
|
||||
public IndexableFieldType fieldType() {
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
// TODO: randomly enable doc values
|
||||
|
|
|
@ -175,8 +175,8 @@ public class TestSegmentReader extends LuceneTestCase {
|
|||
// test omit norms
|
||||
for (int i=0; i<DocHelper.fields.length; i++) {
|
||||
IndexableField f = DocHelper.fields[i];
|
||||
if (f.indexed()) {
|
||||
assertEquals(reader.hasNorms(f.name()), !f.omitNorms());
|
||||
if (f.fieldType().indexed()) {
|
||||
assertEquals(reader.hasNorms(f.name()), !f.fieldType().omitNorms());
|
||||
assertEquals(reader.hasNorms(f.name()), !DocHelper.noNorms.containsKey(f.name()));
|
||||
if (!reader.hasNorms(f.name())) {
|
||||
// test for norms of null
|
||||
|
|
|
@ -391,8 +391,6 @@ public class TestTermVectorsWriter extends LuceneTestCase {
|
|||
new SerialMergeScheduler()).setMergePolicy(new LogDocMergePolicy()));
|
||||
|
||||
Document document = new Document();
|
||||
|
||||
document = new Document();
|
||||
FieldType customType = new FieldType();
|
||||
customType.setStored(true);
|
||||
|
||||
|
|
|
@ -110,7 +110,7 @@ public class TestTermVectors extends LuceneTestCase {
|
|||
public void testTermVectorsFieldOrder() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir, new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
|
||||
Document doc = new Document();;
|
||||
Document doc = new Document();
|
||||
FieldType ft = new FieldType(TextField.TYPE_STORED);
|
||||
ft.setStoreTermVectors(true);
|
||||
ft.setStoreTermVectorOffsets(true);
|
||||
|
|
|
@ -69,7 +69,7 @@ public class ReadTokensTask extends PerfTask {
|
|||
Analyzer analyzer = getRunData().getAnalyzer();
|
||||
int tokenCount = 0;
|
||||
for(final IndexableField field : fields) {
|
||||
if (!field.tokenized() || field instanceof NumericField) continue;
|
||||
if (!field.fieldType().tokenized() || field instanceof NumericField) continue;
|
||||
|
||||
final TokenStream stream;
|
||||
final TokenStream streamValue = field.tokenStreamValue();
|
||||
|
|
|
@ -136,28 +136,28 @@ public class DocMakerTest extends BenchmarkTestCase {
|
|||
|
||||
// Don't set anything, use the defaults
|
||||
doc = createTestNormsDocument(false, false, false, false);
|
||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).omitNorms());
|
||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).omitNorms());
|
||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).fieldType().omitNorms());
|
||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).fieldType().omitNorms());
|
||||
|
||||
// Set norms to false
|
||||
doc = createTestNormsDocument(true, false, false, false);
|
||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).omitNorms());
|
||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).omitNorms());
|
||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).fieldType().omitNorms());
|
||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).fieldType().omitNorms());
|
||||
|
||||
// Set norms to true
|
||||
doc = createTestNormsDocument(true, true, false, false);
|
||||
assertFalse(doc.getField(DocMaker.TITLE_FIELD).omitNorms());
|
||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).omitNorms());
|
||||
assertFalse(doc.getField(DocMaker.TITLE_FIELD).fieldType().omitNorms());
|
||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).fieldType().omitNorms());
|
||||
|
||||
// Set body norms to false
|
||||
doc = createTestNormsDocument(false, false, true, false);
|
||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).omitNorms());
|
||||
assertTrue(doc.getField(DocMaker.BODY_FIELD).omitNorms());
|
||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).fieldType().omitNorms());
|
||||
assertTrue(doc.getField(DocMaker.BODY_FIELD).fieldType().omitNorms());
|
||||
|
||||
// Set body norms to true
|
||||
doc = createTestNormsDocument(false, false, true, true);
|
||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).omitNorms());
|
||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).omitNorms());
|
||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).fieldType().omitNorms());
|
||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).fieldType().omitNorms());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -168,14 +168,14 @@ public class LukeRequestHandler extends RequestHandlerBase
|
|||
private static String getFieldFlags( IndexableField f )
|
||||
{
|
||||
StringBuilder flags = new StringBuilder();
|
||||
flags.append( (f != null && f.indexed()) ? FieldFlag.INDEXED.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.tokenized()) ? FieldFlag.TOKENIZED.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.stored()) ? FieldFlag.STORED.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.fieldType().indexed()) ? FieldFlag.INDEXED.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.fieldType().tokenized()) ? FieldFlag.TOKENIZED.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.fieldType().stored()) ? FieldFlag.STORED.getAbbreviation() : '-' );
|
||||
flags.append( (false) ? FieldFlag.MULTI_VALUED.getAbbreviation() : '-' ); // SchemaField Specific
|
||||
flags.append( (f != null && f.storeTermVectors()) ? FieldFlag.TERM_VECTOR_STORED.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.storeTermVectorOffsets()) ? FieldFlag.TERM_VECTOR_OFFSET.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.storeTermVectorPositions()) ? FieldFlag.TERM_VECTOR_POSITION.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.omitNorms()) ? FieldFlag.OMIT_NORMS.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.fieldType().storeTermVectors()) ? FieldFlag.TERM_VECTOR_STORED.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.fieldType().storeTermVectorOffsets()) ? FieldFlag.TERM_VECTOR_OFFSET.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.fieldType().storeTermVectorPositions()) ? FieldFlag.TERM_VECTOR_POSITION.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.fieldType().omitNorms()) ? FieldFlag.OMIT_NORMS.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.getClass().getSimpleName().equals("LazyField")) ? FieldFlag.LAZY.getAbbreviation() : '-' );
|
||||
flags.append( (f != null && f.binaryValue()!=null) ? FieldFlag.BINARY.getAbbreviation() : '-' );
|
||||
flags.append( (false) ? FieldFlag.SORT_MISSING_FIRST.getAbbreviation() : '-' ); // SchemaField Specific
|
||||
|
@ -264,7 +264,7 @@ public class LukeRequestHandler extends RequestHandlerBase
|
|||
f.add( "docFreq", t.text()==null ? 0 : reader.docFreq( t ) ); // this can be 0 for non-indexed fields
|
||||
|
||||
// If we have a term vector, return that
|
||||
if( field.storeTermVectors() ) {
|
||||
if( field.fieldType().storeTermVectors() ) {
|
||||
try {
|
||||
TermFreqVector v = reader.getTermFreqVector( docId, field.name() );
|
||||
if( v != null ) {
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
/**
|
||||
package org.apache.solr.handler.component;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
|
@ -15,8 +17,6 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.handler.component;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
@ -189,7 +189,7 @@ public class RealTimeGetComponent extends SearchComponent
|
|||
// copy the stored fields only
|
||||
Document out = new Document();
|
||||
for (IndexableField f : doc.getFields()) {
|
||||
if (f.stored()) {
|
||||
if (f.fieldType().stored()) {
|
||||
out.add(f);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
package org.apache.solr.schema;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.queries.function.valuesource.VectorValueSource;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
|
@ -87,7 +88,7 @@ public class PointType extends CoordinateFieldType implements SpatialQueryable {
|
|||
|
||||
if (field.stored()) {
|
||||
String storedVal = externalVal; // normalize or not?
|
||||
org.apache.lucene.document.FieldType customType = new org.apache.lucene.document.FieldType();
|
||||
FieldType customType = new FieldType();
|
||||
customType.setStored(true);
|
||||
f[f.length - 1] = createField(field.getName(), storedVal, customType, boost);
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
*/
|
||||
package org.apache.solr.schema;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.NumericField;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.search.*;
|
||||
|
@ -63,7 +64,7 @@ import java.util.Date;
|
|||
* @see org.apache.lucene.search.NumericRangeQuery
|
||||
* @since solr 1.4
|
||||
*/
|
||||
public class TrieField extends FieldType {
|
||||
public class TrieField extends org.apache.solr.schema.FieldType {
|
||||
public static final int DEFAULT_PRECISION_STEP = 8;
|
||||
|
||||
protected int precisionStepArg = TrieField.DEFAULT_PRECISION_STEP; // the one passed in or defaulted
|
||||
|
@ -482,7 +483,7 @@ public class TrieField extends FieldType {
|
|||
return null;
|
||||
}
|
||||
|
||||
org.apache.lucene.document.FieldType ft = new org.apache.lucene.document.FieldType();
|
||||
FieldType ft = new FieldType();
|
||||
ft.setStored(stored);
|
||||
ft.setTokenized(true);
|
||||
ft.setIndexed(indexed);
|
||||
|
@ -545,7 +546,7 @@ public class TrieField extends FieldType {
|
|||
* Returns null if no prefix or prefix not needed, or the prefix of the main value of a trie field
|
||||
* that indexes multiple precisions per value.
|
||||
*/
|
||||
public static String getMainValuePrefix(FieldType ft) {
|
||||
public static String getMainValuePrefix(org.apache.solr.schema.FieldType ft) {
|
||||
if (ft instanceof TrieDateField)
|
||||
ft = ((TrieDateField) ft).wrappedField;
|
||||
if (ft instanceof TrieField) {
|
||||
|
|
|
@ -336,7 +336,7 @@ public class DocumentBuilder {
|
|||
public SolrDocument loadStoredFields( SolrDocument doc, Document luceneDoc )
|
||||
{
|
||||
for( IndexableField field : luceneDoc) {
|
||||
if( field.stored() ) {
|
||||
if( field.fieldType().stored() ) {
|
||||
SchemaField sf = schema.getField( field.name() );
|
||||
if( !schema.isCopyFieldTarget( sf ) ) {
|
||||
doc.addField( field.name(), sf.getType().toObject( field ) );
|
||||
|
|
|
@ -363,27 +363,27 @@ public class BasicFunctionalityTest extends SolrTestCaseJ4 {
|
|||
f = ischema.getField("test_basictv");
|
||||
luf = f.createField("test", 0f);
|
||||
assertTrue(f.storeTermVector());
|
||||
assertTrue(luf.storeTermVectors());
|
||||
assertTrue(luf.fieldType().storeTermVectors());
|
||||
|
||||
f = ischema.getField("test_notv");
|
||||
luf = f.createField("test", 0f);
|
||||
assertTrue(!f.storeTermVector());
|
||||
assertTrue(!luf.storeTermVectors());
|
||||
assertTrue(!luf.fieldType().storeTermVectors());
|
||||
|
||||
f = ischema.getField("test_postv");
|
||||
luf = f.createField("test", 0f);
|
||||
assertTrue(f.storeTermVector() && f.storeTermPositions());
|
||||
assertTrue(luf.storeTermVectorPositions());
|
||||
assertTrue(luf.fieldType().storeTermVectorPositions());
|
||||
|
||||
f = ischema.getField("test_offtv");
|
||||
luf = f.createField("test", 0f);
|
||||
assertTrue(f.storeTermVector() && f.storeTermOffsets());
|
||||
assertTrue(luf.storeTermVectorOffsets());
|
||||
assertTrue(luf.fieldType().storeTermVectorOffsets());
|
||||
|
||||
f = ischema.getField("test_posofftv");
|
||||
luf = f.createField("test", 0f);
|
||||
assertTrue(f.storeTermVector() && f.storeTermPositions() && f.storeTermOffsets());
|
||||
assertTrue(luf.storeTermVectorOffsets() && luf.storeTermVectorPositions());
|
||||
assertTrue(luf.fieldType().storeTermVectorOffsets() && luf.fieldType().storeTermVectorPositions());
|
||||
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue