mirror of https://github.com/apache/lucene.git
LUCENE-2308: Moved over to using IndexableFieldType interface
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1167668 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
283ba51e19
commit
ffb3cbee57
|
@ -166,6 +166,11 @@ Changes in backwards compatibility policy
|
||||||
arbitrary relationships. To navigate to a scorer's children, call Scorer.getChildren().
|
arbitrary relationships. To navigate to a scorer's children, call Scorer.getChildren().
|
||||||
(Robert Muir)
|
(Robert Muir)
|
||||||
|
|
||||||
|
* LUCENE-2308: Field is now instantiated with an instance of IndexableFieldType, of which there
|
||||||
|
is a core implementation FieldType. Most properties describing a Field have been moved to
|
||||||
|
IndexableFieldType. See MIGRATE.txt for more details.
|
||||||
|
(Nikola Tankovic, Mike McCandless, Chris Male)
|
||||||
|
|
||||||
Changes in Runtime Behavior
|
Changes in Runtime Behavior
|
||||||
|
|
||||||
* LUCENE-2846: omitNorms now behaves like omitTermFrequencyAndPositions, if you
|
* LUCENE-2846: omitNorms now behaves like omitTermFrequencyAndPositions, if you
|
||||||
|
|
|
@ -410,14 +410,14 @@ LUCENE-1458, LUCENE-2111: Flexible Indexing
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
* LUCENE-2308: Separate FieldType from Field instances
|
* LUCENE-2308: Separate IndexableFieldType from Field instances
|
||||||
|
|
||||||
With this change, the indexing details (indexed, tokenized, norms,
|
With this change, the indexing details (indexed, tokenized, norms,
|
||||||
indexOptions, stored, etc.) are moved into a separate FieldType
|
indexOptions, stored, etc.) are moved into a separate FieldType
|
||||||
instance (rather than being stored directly on the Field).
|
instance (rather than being stored directly on the Field).
|
||||||
|
|
||||||
This means you can create the FieldType instance once, up front, for a
|
This means you can create the IndexableFieldType instance once, up front,
|
||||||
given field, and then re-use that instance whenever you instantiate
|
for a given field, and then re-use that instance whenever you instantiate
|
||||||
the Field.
|
the Field.
|
||||||
|
|
||||||
Certain field types are pre-defined since they are common cases:
|
Certain field types are pre-defined since they are common cases:
|
||||||
|
@ -454,9 +454,7 @@ You can of course also create your own FieldType from scratch:
|
||||||
t.setOmitNorms(true);
|
t.setOmitNorms(true);
|
||||||
t.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
|
t.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
|
||||||
|
|
||||||
FieldType has a freeze() method to prevent further changes. Note that
|
FieldType has a freeze() method to prevent further changes.
|
||||||
once a FieldType is bound to a Field, it's frozen, to help prevent
|
|
||||||
confusing bugs.
|
|
||||||
|
|
||||||
When migrating from the 3.x API, if you did this before:
|
When migrating from the 3.x API, if you did this before:
|
||||||
|
|
||||||
|
|
|
@ -26,9 +26,9 @@ import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
|
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
|
||||||
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
||||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||||
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
|
||||||
import org.apache.lucene.document.TextField;
|
import org.apache.lucene.document.TextField;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
|
|
@ -199,7 +199,7 @@ public class InstantiatedIndex
|
||||||
documentsByNumber[i] = document;
|
documentsByNumber[i] = document;
|
||||||
for (IndexableField field : document.getDocument()) {
|
for (IndexableField field : document.getDocument()) {
|
||||||
if (fields == null || fields.contains(field.name())) {
|
if (fields == null || fields.contains(field.name())) {
|
||||||
if (field.storeTermVectors()) {
|
if (field.fieldType().storeTermVectors()) {
|
||||||
if (document.getVectorSpace() == null) {
|
if (document.getVectorSpace() == null) {
|
||||||
document.setVectorSpace(new HashMap<String, List<InstantiatedTermDocumentInformation>>());
|
document.setVectorSpace(new HashMap<String, List<InstantiatedTermDocumentInformation>>());
|
||||||
}
|
}
|
||||||
|
@ -291,7 +291,7 @@ public class InstantiatedIndex
|
||||||
continue; // deleted
|
continue; // deleted
|
||||||
}
|
}
|
||||||
for (IndexableField field : document.getDocument()) {
|
for (IndexableField field : document.getDocument()) {
|
||||||
if (field.storeTermVectors() && field.storeTermVectorOffsets()) {
|
if (field.fieldType().storeTermVectors() && field.fieldType().storeTermVectorOffsets()) {
|
||||||
TermPositionVector termPositionVector = (TermPositionVector) sourceIndexReader.getTermFreqVector(document.getDocumentNumber(), field.name());
|
TermPositionVector termPositionVector = (TermPositionVector) sourceIndexReader.getTermFreqVector(document.getDocumentNumber(), field.name());
|
||||||
if (termPositionVector != null) {
|
if (termPositionVector != null) {
|
||||||
for (int i = 0; i < termPositionVector.getTerms().length; i++) {
|
for (int i = 0; i < termPositionVector.getTerms().length; i++) {
|
||||||
|
|
|
@ -484,28 +484,28 @@ public class InstantiatedIndexWriter implements Closeable {
|
||||||
|
|
||||||
|
|
||||||
// once fieldSettings, always fieldSettings.
|
// once fieldSettings, always fieldSettings.
|
||||||
if (field.omitNorms()) {
|
if (field.fieldType().omitNorms()) {
|
||||||
fieldSetting.omitNorms = true;
|
fieldSetting.omitNorms = true;
|
||||||
}
|
}
|
||||||
if (field.indexed() ) {
|
if (field.fieldType().indexed() ) {
|
||||||
fieldSetting.indexed = true;
|
fieldSetting.indexed = true;
|
||||||
}
|
}
|
||||||
if (field.tokenized()) {
|
if (field.fieldType().tokenized()) {
|
||||||
fieldSetting.tokenized = true;
|
fieldSetting.tokenized = true;
|
||||||
}
|
}
|
||||||
if (field.stored()) {
|
if (field.fieldType().stored()) {
|
||||||
fieldSetting.stored = true;
|
fieldSetting.stored = true;
|
||||||
}
|
}
|
||||||
if (field.binaryValue() != null) {
|
if (field.binaryValue() != null) {
|
||||||
fieldSetting.isBinary = true;
|
fieldSetting.isBinary = true;
|
||||||
}
|
}
|
||||||
if (field.storeTermVectors()) {
|
if (field.fieldType().storeTermVectors()) {
|
||||||
fieldSetting.storeTermVector = true;
|
fieldSetting.storeTermVector = true;
|
||||||
}
|
}
|
||||||
if (field.storeTermVectorPositions()) {
|
if (field.fieldType().storeTermVectorPositions()) {
|
||||||
fieldSetting.storePositionWithTermVector = true;
|
fieldSetting.storePositionWithTermVector = true;
|
||||||
}
|
}
|
||||||
if (field.storeTermVectorOffsets()) {
|
if (field.fieldType().storeTermVectorOffsets()) {
|
||||||
fieldSetting.storeOffsetWithTermVector = true;
|
fieldSetting.storeOffsetWithTermVector = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -519,12 +519,12 @@ public class InstantiatedIndexWriter implements Closeable {
|
||||||
|
|
||||||
FieldSetting fieldSetting = fieldSettingsByFieldName.get(field.name());
|
FieldSetting fieldSetting = fieldSettingsByFieldName.get(field.name());
|
||||||
|
|
||||||
if (field.indexed()) {
|
if (field.fieldType().indexed()) {
|
||||||
|
|
||||||
LinkedList<Token> tokens = new LinkedList<Token>();
|
LinkedList<Token> tokens = new LinkedList<Token>();
|
||||||
tokensByField.put(field, tokens);
|
tokensByField.put(field, tokens);
|
||||||
|
|
||||||
if (field.tokenized()) {
|
if (field.fieldType().tokenized()) {
|
||||||
final TokenStream tokenStream;
|
final TokenStream tokenStream;
|
||||||
// todo readerValue(), binaryValue()
|
// todo readerValue(), binaryValue()
|
||||||
if (field.tokenStreamValue() != null) {
|
if (field.tokenStreamValue() != null) {
|
||||||
|
@ -564,7 +564,7 @@ public class InstantiatedIndexWriter implements Closeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!field.stored()) {
|
if (!field.fieldType().stored()) {
|
||||||
//it.remove();
|
//it.remove();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -610,7 +610,7 @@ public class InstantiatedIndexWriter implements Closeable {
|
||||||
termDocumentInformationFactory.payloads.add(null);
|
termDocumentInformationFactory.payloads.add(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (eField_Tokens.getKey().storeTermVectorOffsets()) {
|
if (eField_Tokens.getKey().fieldType().storeTermVectorOffsets()) {
|
||||||
|
|
||||||
termDocumentInformationFactory.termOffsets.add(new TermVectorOffsetInfo(fieldSetting.offset + token.startOffset(), fieldSetting.offset + token.endOffset()));
|
termDocumentInformationFactory.termOffsets.add(new TermVectorOffsetInfo(fieldSetting.offset + token.startOffset(), fieldSetting.offset + token.endOffset()));
|
||||||
lastOffset = fieldSetting.offset + token.endOffset();
|
lastOffset = fieldSetting.offset + token.endOffset();
|
||||||
|
@ -619,7 +619,7 @@ public class InstantiatedIndexWriter implements Closeable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (eField_Tokens.getKey().storeTermVectorOffsets()) {
|
if (eField_Tokens.getKey().fieldType().storeTermVectorOffsets()) {
|
||||||
fieldSetting.offset = lastOffset + 1;
|
fieldSetting.offset = lastOffset + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -82,7 +82,7 @@ public class TestNRTManager extends LuceneTestCase {
|
||||||
Field field1 = (Field) f;
|
Field field1 = (Field) f;
|
||||||
|
|
||||||
Field field2 = new Field(field1.name(),
|
Field field2 = new Field(field1.name(),
|
||||||
((Field) f).getFieldType(),
|
((Field) f).fieldType(),
|
||||||
field1.stringValue());
|
field1.stringValue());
|
||||||
doc2.add(field2);
|
doc2.add(field2);
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,9 +28,7 @@ public class TestTermVectorAccessor extends LuceneTestCase {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||||
|
|
||||||
Document doc;
|
Document doc = new Document();
|
||||||
|
|
||||||
doc = new Document();
|
|
||||||
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
|
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
|
||||||
customType.setStoreTermVectors(true);
|
customType.setStoreTermVectors(true);
|
||||||
customType.setStoreTermVectorPositions(true);
|
customType.setStoreTermVectorPositions(true);
|
||||||
|
|
|
@ -138,7 +138,7 @@ public abstract class Analyzer implements Closeable {
|
||||||
* @return offset gap, added to the next token emitted from {@link #tokenStream(String,Reader)}
|
* @return offset gap, added to the next token emitted from {@link #tokenStream(String,Reader)}
|
||||||
*/
|
*/
|
||||||
public int getOffsetGap(IndexableField field) {
|
public int getOffsetGap(IndexableField field) {
|
||||||
if (field.tokenized()) {
|
if (field.fieldType().tokenized()) {
|
||||||
return 1;
|
return 1;
|
||||||
} else {
|
} else {
|
||||||
return 0;
|
return 0;
|
||||||
|
|
|
@ -28,12 +28,12 @@ import org.apache.lucene.util.BytesRef;
|
||||||
/** Documents are the unit of indexing and search.
|
/** Documents are the unit of indexing and search.
|
||||||
*
|
*
|
||||||
* A Document is a set of fields. Each field has a name and a textual value.
|
* A Document is a set of fields. Each field has a name and a textual value.
|
||||||
* A field may be {@link IndexableField#stored() stored} with the document, in which
|
* A field may be {@link org.apache.lucene.index.IndexableFieldType#stored() stored} with the document, in which
|
||||||
* case it is returned with search hits on the document. Thus each document
|
* case it is returned with search hits on the document. Thus each document
|
||||||
* should typically contain one or more stored fields which uniquely identify
|
* should typically contain one or more stored fields which uniquely identify
|
||||||
* it.
|
* it.
|
||||||
*
|
*
|
||||||
* <p>Note that fields which are <i>not</i> {@link IndexableField#stored() stored} are
|
* <p>Note that fields which are <i>not</i> {@link org.apache.lucene.index.IndexableFieldType#stored() stored} are
|
||||||
* <i>not</i> available in documents retrieved from the index, e.g. with {@link
|
* <i>not</i> available in documents retrieved from the index, e.g. with {@link
|
||||||
* ScoreDoc#doc} or {@link IndexReader#document(int)}.
|
* ScoreDoc#doc} or {@link IndexReader#document(int)}.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.lucene.document;
|
||||||
import java.io.Reader;
|
import java.io.Reader;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
import org.apache.lucene.index.IndexableFieldType;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.apache.lucene.index.values.PerDocFieldValues;
|
import org.apache.lucene.index.values.PerDocFieldValues;
|
||||||
import org.apache.lucene.index.values.ValueType;
|
import org.apache.lucene.index.values.ValueType;
|
||||||
|
@ -32,11 +32,14 @@ import org.apache.lucene.util.BytesRef;
|
||||||
* may be atomic keywords, which are not further processed. Such keywords may be
|
* may be atomic keywords, which are not further processed. Such keywords may be
|
||||||
* used to represent dates, urls, etc. Fields are optionally stored in the
|
* used to represent dates, urls, etc. Fields are optionally stored in the
|
||||||
* index, so that they may be returned with hits on the document.
|
* index, so that they may be returned with hits on the document.
|
||||||
|
* <p/>
|
||||||
|
* Note, Field instances are instantiated with a {@link IndexableFieldType}. Making changes
|
||||||
|
* to the state of the FieldType will impact any Field it is used in, therefore
|
||||||
|
* it is strongly recommended that no changes are made after Field instantiation.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class Field implements IndexableField {
|
public class Field implements IndexableField {
|
||||||
|
|
||||||
protected FieldType type;
|
protected IndexableFieldType type;
|
||||||
protected String name = "body";
|
protected String name = "body";
|
||||||
// the data object for all different kind of field values
|
// the data object for all different kind of field values
|
||||||
protected Object fieldsData;
|
protected Object fieldsData;
|
||||||
|
@ -47,13 +50,12 @@ public class Field implements IndexableField {
|
||||||
|
|
||||||
protected float boost = 1.0f;
|
protected float boost = 1.0f;
|
||||||
|
|
||||||
public Field(String name, FieldType type) {
|
public Field(String name, IndexableFieldType type) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.type = type;
|
this.type = type;
|
||||||
type.freeze();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Field(String name, FieldType type, Reader reader) {
|
public Field(String name, IndexableFieldType type, Reader reader) {
|
||||||
if (name == null) {
|
if (name == null) {
|
||||||
throw new NullPointerException("name cannot be null");
|
throw new NullPointerException("name cannot be null");
|
||||||
}
|
}
|
||||||
|
@ -64,10 +66,9 @@ public class Field implements IndexableField {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldsData = reader;
|
this.fieldsData = reader;
|
||||||
this.type = type;
|
this.type = type;
|
||||||
type.freeze();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Field(String name, FieldType type, TokenStream tokenStream) {
|
public Field(String name, IndexableFieldType type, TokenStream tokenStream) {
|
||||||
if (name == null) {
|
if (name == null) {
|
||||||
throw new NullPointerException("name cannot be null");
|
throw new NullPointerException("name cannot be null");
|
||||||
}
|
}
|
||||||
|
@ -79,28 +80,25 @@ public class Field implements IndexableField {
|
||||||
this.fieldsData = null;
|
this.fieldsData = null;
|
||||||
this.tokenStream = tokenStream;
|
this.tokenStream = tokenStream;
|
||||||
this.type = type;
|
this.type = type;
|
||||||
type.freeze();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Field(String name, FieldType type, byte[] value) {
|
public Field(String name, IndexableFieldType type, byte[] value) {
|
||||||
this(name, type, value, 0, value.length);
|
this(name, type, value, 0, value.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Field(String name, FieldType type, byte[] value, int offset, int length) {
|
public Field(String name, IndexableFieldType type, byte[] value, int offset, int length) {
|
||||||
this.fieldsData = new BytesRef(value, offset, length);
|
this.fieldsData = new BytesRef(value, offset, length);
|
||||||
this.type = type;
|
this.type = type;
|
||||||
this.name = name;
|
this.name = name;
|
||||||
type.freeze();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Field(String name, FieldType type, BytesRef bytes) {
|
public Field(String name, IndexableFieldType type, BytesRef bytes) {
|
||||||
this.fieldsData = bytes;
|
this.fieldsData = bytes;
|
||||||
this.type = type;
|
this.type = type;
|
||||||
this.name = name;
|
this.name = name;
|
||||||
type.freeze();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Field(String name, FieldType type, String value) {
|
public Field(String name, IndexableFieldType type, String value) {
|
||||||
if (name == null) {
|
if (name == null) {
|
||||||
throw new IllegalArgumentException("name cannot be null");
|
throw new IllegalArgumentException("name cannot be null");
|
||||||
}
|
}
|
||||||
|
@ -119,7 +117,6 @@ public class Field implements IndexableField {
|
||||||
this.type = type;
|
this.type = type;
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldsData = value;
|
this.fieldsData = value;
|
||||||
type.freeze();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -181,7 +178,7 @@ public class Field implements IndexableField {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"cannot set a Reader value on a binary field");
|
"cannot set a Reader value on a binary field");
|
||||||
}
|
}
|
||||||
if (stored()) {
|
if (type.stored()) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"cannot set a Reader value on a stored field");
|
"cannot set a Reader value on a stored field");
|
||||||
}
|
}
|
||||||
|
@ -206,7 +203,7 @@ public class Field implements IndexableField {
|
||||||
* values from stringValue() or getBinaryValue()
|
* values from stringValue() or getBinaryValue()
|
||||||
*/
|
*/
|
||||||
public void setTokenStream(TokenStream tokenStream) {
|
public void setTokenStream(TokenStream tokenStream) {
|
||||||
if (!indexed() || !tokenized()) {
|
if (!type.indexed() || !type.tokenized()) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"cannot set token stream on non indexed and tokenized field");
|
"cannot set token stream on non indexed and tokenized field");
|
||||||
}
|
}
|
||||||
|
@ -259,44 +256,12 @@ public class Field implements IndexableField {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** methods from inner FieldType */
|
/** methods from inner IndexableFieldType */
|
||||||
|
|
||||||
public boolean isBinary() {
|
public boolean isBinary() {
|
||||||
return fieldsData instanceof BytesRef;
|
return fieldsData instanceof BytesRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean stored() {
|
|
||||||
return type.stored();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean indexed() {
|
|
||||||
return type.indexed();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean tokenized() {
|
|
||||||
return type.tokenized();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean omitNorms() {
|
|
||||||
return type.omitNorms();
|
|
||||||
}
|
|
||||||
|
|
||||||
public IndexOptions indexOptions() {
|
|
||||||
return type.indexOptions();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean storeTermVectors() {
|
|
||||||
return type.storeTermVectors();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean storeTermVectorOffsets() {
|
|
||||||
return type.storeTermVectorOffsets();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean storeTermVectorPositions() {
|
|
||||||
return type.storeTermVectorPositions();
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Prints a Field for human consumption. */
|
/** Prints a Field for human consumption. */
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
|
@ -329,7 +294,7 @@ public class Field implements IndexableField {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns FieldType for this field. */
|
/** Returns FieldType for this field. */
|
||||||
public FieldType getFieldType() {
|
public IndexableFieldType fieldType() {
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,8 +18,9 @@ package org.apache.lucene.document;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||||
|
import org.apache.lucene.index.IndexableFieldType;
|
||||||
|
|
||||||
public class FieldType {
|
public class FieldType implements IndexableFieldType {
|
||||||
|
|
||||||
private boolean indexed;
|
private boolean indexed;
|
||||||
private boolean stored;
|
private boolean stored;
|
||||||
|
@ -31,7 +32,7 @@ public class FieldType {
|
||||||
private IndexOptions indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
|
private IndexOptions indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
|
||||||
private boolean frozen;
|
private boolean frozen;
|
||||||
|
|
||||||
public FieldType(FieldType ref) {
|
public FieldType(IndexableFieldType ref) {
|
||||||
this.indexed = ref.indexed();
|
this.indexed = ref.indexed();
|
||||||
this.stored = ref.stored();
|
this.stored = ref.stored();
|
||||||
this.tokenized = ref.tokenized();
|
this.tokenized = ref.tokenized();
|
||||||
|
@ -52,8 +53,11 @@ public class FieldType {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Prevents future changes. Note that when a FieldType
|
/**
|
||||||
* is first bound to a Field instance, it is frozen. */
|
* Prevents future changes. Note, it is recommended that this is called once
|
||||||
|
* the FieldTypes's properties have been set, to prevent unintential state
|
||||||
|
* changes.
|
||||||
|
*/
|
||||||
public void freeze() {
|
public void freeze() {
|
||||||
this.frozen = true;
|
this.frozen = true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ import java.io.Reader;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
import org.apache.lucene.index.IndexableFieldType;
|
||||||
import org.apache.lucene.index.values.PerDocFieldValues;
|
import org.apache.lucene.index.values.PerDocFieldValues;
|
||||||
import org.apache.lucene.index.values.ValueType;
|
import org.apache.lucene.index.values.ValueType;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
@ -84,11 +85,11 @@ public class IndexDocValuesField extends Field implements PerDocFieldValues {
|
||||||
this(name, new FieldType());
|
this(name, new FieldType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public IndexDocValuesField(String name, FieldType type) {
|
public IndexDocValuesField(String name, IndexableFieldType type) {
|
||||||
this(name, type, null);
|
this(name, type, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public IndexDocValuesField(String name, FieldType type, String value) {
|
public IndexDocValuesField(String name, IndexableFieldType type, String value) {
|
||||||
super(name, type);
|
super(name, type);
|
||||||
fieldsData = value;
|
fieldsData = value;
|
||||||
}
|
}
|
||||||
|
@ -356,7 +357,7 @@ public class IndexDocValuesField extends Field implements PerDocFieldValues {
|
||||||
if (field instanceof IndexDocValuesField) {
|
if (field instanceof IndexDocValuesField) {
|
||||||
return (IndexDocValuesField) field;
|
return (IndexDocValuesField) field;
|
||||||
}
|
}
|
||||||
final IndexDocValuesField valField = new IndexDocValuesField(field.name(), field.getFieldType(), field.stringValue());
|
final IndexDocValuesField valField = new IndexDocValuesField(field.name(), field.fieldType(), field.stringValue());
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case BYTES_FIXED_DEREF:
|
case BYTES_FIXED_DEREF:
|
||||||
case BYTES_FIXED_SORTED:
|
case BYTES_FIXED_SORTED:
|
||||||
|
|
|
@ -104,7 +104,7 @@ import org.apache.lucene.search.FieldCache; // javadocs
|
||||||
* default value, 4, was selected for a reasonable tradeoff
|
* default value, 4, was selected for a reasonable tradeoff
|
||||||
* of disk space consumption versus performance. You can
|
* of disk space consumption versus performance. You can
|
||||||
* use the expert constructor {@link
|
* use the expert constructor {@link
|
||||||
* #NumericField(String,int,FieldType)} if you'd
|
* #NumericField(String,int, FieldType)} if you'd
|
||||||
* like to change the value. Note that you must also
|
* like to change the value. Note that you must also
|
||||||
* specify a congruent value when creating {@link
|
* specify a congruent value when creating {@link
|
||||||
* NumericRangeQuery} or {@link NumericRangeFilter}.
|
* NumericRangeQuery} or {@link NumericRangeFilter}.
|
||||||
|
@ -238,7 +238,7 @@ public final class NumericField extends Field {
|
||||||
|
|
||||||
/** Returns a {@link NumericTokenStream} for indexing the numeric value. */
|
/** Returns a {@link NumericTokenStream} for indexing the numeric value. */
|
||||||
public TokenStream tokenStreamValue() {
|
public TokenStream tokenStreamValue() {
|
||||||
if (!indexed()) return null;
|
if (!type.indexed()) return null;
|
||||||
if (numericTS == null) {
|
if (numericTS == null) {
|
||||||
// lazy init the TokenStream as it is heavy to instantiate
|
// lazy init the TokenStream as it is heavy to instantiate
|
||||||
// (attributes,...),
|
// (attributes,...),
|
||||||
|
|
|
@ -223,9 +223,7 @@ final class DocFieldProcessor extends DocConsumer {
|
||||||
// needs to be more "pluggable" such that if I want
|
// needs to be more "pluggable" such that if I want
|
||||||
// to have a new "thing" my Fields can do, I can
|
// to have a new "thing" my Fields can do, I can
|
||||||
// easily add it
|
// easily add it
|
||||||
FieldInfo fi = fieldInfos.addOrUpdate(fieldName, field.indexed(), field.storeTermVectors(),
|
FieldInfo fi = fieldInfos.addOrUpdate(fieldName, field.fieldType(), false, field.docValuesType());
|
||||||
field.storeTermVectorPositions(), field.storeTermVectorOffsets(),
|
|
||||||
field.omitNorms(), false, field.indexOptions(), field.docValuesType());
|
|
||||||
|
|
||||||
fp = new DocFieldProcessorPerField(this, fi);
|
fp = new DocFieldProcessorPerField(this, fi);
|
||||||
fp.next = fieldHash[hashPos];
|
fp.next = fieldHash[hashPos];
|
||||||
|
@ -236,9 +234,7 @@ final class DocFieldProcessor extends DocConsumer {
|
||||||
rehash();
|
rehash();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
fieldInfos.addOrUpdate(fp.fieldInfo.name, field.indexed(), field.storeTermVectors(),
|
fieldInfos.addOrUpdate(fp.fieldInfo.name, field.fieldType(), false, field.docValuesType());
|
||||||
field.storeTermVectorPositions(), field.storeTermVectorOffsets(),
|
|
||||||
field.omitNorms(), false, field.indexOptions(), field.docValuesType());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (thisFieldGen != fp.lastGen) {
|
if (thisFieldGen != fp.lastGen) {
|
||||||
|
@ -259,7 +255,7 @@ final class DocFieldProcessor extends DocConsumer {
|
||||||
|
|
||||||
fp.addField(field);
|
fp.addField(field);
|
||||||
|
|
||||||
if (field.stored()) {
|
if (field.fieldType().stored()) {
|
||||||
fieldsWriter.addField(field, fp.fieldInfo);
|
fieldsWriter.addField(field, fp.fieldInfo);
|
||||||
}
|
}
|
||||||
final PerDocFieldValues docValues = field.docValues();
|
final PerDocFieldValues docValues = field.docValues();
|
||||||
|
|
|
@ -74,7 +74,7 @@ final class DocInverterPerField extends DocFieldConsumerPerField {
|
||||||
// TODO FI: this should be "genericized" to querying
|
// TODO FI: this should be "genericized" to querying
|
||||||
// consumer if it wants to see this particular field
|
// consumer if it wants to see this particular field
|
||||||
// tokenized.
|
// tokenized.
|
||||||
if (field.indexed() && doInvert) {
|
if (field.fieldType().indexed() && doInvert) {
|
||||||
|
|
||||||
if (i > 0)
|
if (i > 0)
|
||||||
fieldState.position += docState.analyzer == null ? 0 : docState.analyzer.getPositionIncrementGap(fieldInfo.name);
|
fieldState.position += docState.analyzer == null ? 0 : docState.analyzer.getPositionIncrementGap(fieldInfo.name);
|
||||||
|
@ -83,7 +83,7 @@ final class DocInverterPerField extends DocFieldConsumerPerField {
|
||||||
// outside of indexer -- field should simply give us
|
// outside of indexer -- field should simply give us
|
||||||
// a TokenStream, even for multi-valued fields
|
// a TokenStream, even for multi-valued fields
|
||||||
|
|
||||||
if (!field.tokenized()) { // un-tokenized field
|
if (!field.fieldType().tokenized()) { // un-tokenized field
|
||||||
final String stringValue = field.stringValue();
|
final String stringValue = field.stringValue();
|
||||||
assert stringValue != null;
|
assert stringValue != null;
|
||||||
final int valueLength = stringValue.length();
|
final int valueLength = stringValue.length();
|
||||||
|
|
|
@ -456,6 +456,12 @@ public final class FieldInfos implements Iterable<FieldInfo> {
|
||||||
storeOffsetWithTermVector, omitNorms, storePayloads, indexOptions, docValues);
|
storeOffsetWithTermVector, omitNorms, storePayloads, indexOptions, docValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
synchronized public FieldInfo addOrUpdate(String name, IndexableFieldType fieldType, boolean scorePayloads, ValueType docValues) {
|
||||||
|
return addOrUpdateInternal(name, -1, fieldType.indexed(), fieldType.storeTermVectors(),
|
||||||
|
fieldType.storeTermVectorPositions(), fieldType.storeTermVectorOffsets(), fieldType.omitNorms(), scorePayloads,
|
||||||
|
fieldType.indexOptions(), docValues);
|
||||||
|
}
|
||||||
|
|
||||||
synchronized private FieldInfo addOrUpdateInternal(String name, int preferredFieldNumber, boolean isIndexed,
|
synchronized private FieldInfo addOrUpdateInternal(String name, int preferredFieldNumber, boolean isIndexed,
|
||||||
boolean storeTermVector, boolean storePositionWithTermVector, boolean storeOffsetWithTermVector,
|
boolean storeTermVector, boolean storePositionWithTermVector, boolean storeOffsetWithTermVector,
|
||||||
boolean omitNorms, boolean storePayloads, IndexOptions indexOptions, ValueType docValues) {
|
boolean omitNorms, boolean storePayloads, IndexOptions indexOptions, ValueType docValues) {
|
||||||
|
|
|
@ -217,14 +217,14 @@ final class FieldsWriter {
|
||||||
|
|
||||||
int storedCount = 0;
|
int storedCount = 0;
|
||||||
for (IndexableField field : doc) {
|
for (IndexableField field : doc) {
|
||||||
if (field.stored()) {
|
if (field.fieldType().stored()) {
|
||||||
storedCount++;
|
storedCount++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fieldsStream.writeVInt(storedCount);
|
fieldsStream.writeVInt(storedCount);
|
||||||
|
|
||||||
for (IndexableField field : doc) {
|
for (IndexableField field : doc) {
|
||||||
if (field.stored()) {
|
if (field.fieldType().stored()) {
|
||||||
writeField(fieldInfos.fieldNumber(field.name()), field);
|
writeField(fieldInfos.fieldNumber(field.name()), field);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -83,7 +83,7 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
|
||||||
@Override
|
@Override
|
||||||
boolean start(IndexableField[] fields, int count) {
|
boolean start(IndexableField[] fields, int count) {
|
||||||
for(int i=0;i<count;i++) {
|
for(int i=0;i<count;i++) {
|
||||||
if (fields[i].indexed()) {
|
if (fields[i].fieldType().indexed()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ import java.io.Reader;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.document.NumericField;
|
import org.apache.lucene.document.NumericField;
|
||||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
|
||||||
import org.apache.lucene.index.values.PerDocFieldValues;
|
import org.apache.lucene.index.values.PerDocFieldValues;
|
||||||
import org.apache.lucene.index.values.ValueType;
|
import org.apache.lucene.index.values.ValueType;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
@ -48,9 +47,6 @@ public interface IndexableField {
|
||||||
/** Field boost (you must pre-multiply in any doc boost). */
|
/** Field boost (you must pre-multiply in any doc boost). */
|
||||||
public float boost();
|
public float boost();
|
||||||
|
|
||||||
/* True if the field's value should be stored */
|
|
||||||
public boolean stored();
|
|
||||||
|
|
||||||
/* Non-null if this field has a binary value */
|
/* Non-null if this field has a binary value */
|
||||||
public BytesRef binaryValue();
|
public BytesRef binaryValue();
|
||||||
|
|
||||||
|
@ -74,27 +70,12 @@ public interface IndexableField {
|
||||||
/* Numeric value; only used if the field is numeric */
|
/* Numeric value; only used if the field is numeric */
|
||||||
public Number numericValue();
|
public Number numericValue();
|
||||||
|
|
||||||
/* True if this field should be indexed (inverted) */
|
/**
|
||||||
public boolean indexed();
|
* Returns the IndexableFieldType describing the properties of this field
|
||||||
|
*
|
||||||
/* True if this field's value should be analyzed */
|
* @return IndexableFieldType for this field
|
||||||
public boolean tokenized();
|
*/
|
||||||
|
public IndexableFieldType fieldType();
|
||||||
/* True if norms should not be indexed */
|
|
||||||
public boolean omitNorms();
|
|
||||||
|
|
||||||
/* {@link IndexOptions}, describing what should be
|
|
||||||
* recorded into the inverted index */
|
|
||||||
public IndexOptions indexOptions();
|
|
||||||
|
|
||||||
/* True if term vectors should be indexed */
|
|
||||||
public boolean storeTermVectors();
|
|
||||||
|
|
||||||
/* True if term vector offsets should be indexed */
|
|
||||||
public boolean storeTermVectorOffsets();
|
|
||||||
|
|
||||||
/* True if term vector positions should be indexed */
|
|
||||||
public boolean storeTermVectorPositions();
|
|
||||||
|
|
||||||
/* Non-null if doc values should be indexed */
|
/* Non-null if doc values should be indexed */
|
||||||
public PerDocFieldValues docValues();
|
public PerDocFieldValues docValues();
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||||
|
|
||||||
|
public interface IndexableFieldType {
|
||||||
|
|
||||||
|
/* True if this field should be indexed (inverted) */
|
||||||
|
public boolean indexed();
|
||||||
|
|
||||||
|
/* True if the field's value should be stored */
|
||||||
|
public boolean stored();
|
||||||
|
|
||||||
|
/* True if this field's value should be analyzed */
|
||||||
|
public boolean tokenized();
|
||||||
|
|
||||||
|
/* True if term vectors should be indexed */
|
||||||
|
public boolean storeTermVectors();
|
||||||
|
|
||||||
|
/* True if term vector offsets should be indexed */
|
||||||
|
public boolean storeTermVectorOffsets();
|
||||||
|
|
||||||
|
/* True if term vector positions should be indexed */
|
||||||
|
public boolean storeTermVectorPositions();
|
||||||
|
|
||||||
|
/* True if norms should not be indexed */
|
||||||
|
public boolean omitNorms();
|
||||||
|
|
||||||
|
/* {@link IndexOptions}, describing what should be
|
||||||
|
* recorded into the inverted index */
|
||||||
|
public IndexOptions indexOptions();
|
||||||
|
}
|
|
@ -61,10 +61,10 @@ final class TermVectorsTermsWriterPerField extends TermsHashConsumerPerField {
|
||||||
|
|
||||||
for(int i=0;i<count;i++) {
|
for(int i=0;i<count;i++) {
|
||||||
IndexableField field = fields[i];
|
IndexableField field = fields[i];
|
||||||
if (field.indexed() && field.storeTermVectors()) {
|
if (field.fieldType().indexed() && field.fieldType().storeTermVectors()) {
|
||||||
doVectors = true;
|
doVectors = true;
|
||||||
doVectorPositions |= field.storeTermVectorPositions();
|
doVectorPositions |= field.fieldType().storeTermVectorPositions();
|
||||||
doVectorOffsets |= field.storeTermVectorOffsets();
|
doVectorOffsets |= field.fieldType().storeTermVectorOffsets();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -205,15 +205,15 @@ class DocHelper {
|
||||||
for (int i=0; i<fields.length; i++) {
|
for (int i=0; i<fields.length; i++) {
|
||||||
IndexableField f = fields[i];
|
IndexableField f = fields[i];
|
||||||
add(all,f);
|
add(all,f);
|
||||||
if (f.indexed()) add(indexed,f);
|
if (f.fieldType().indexed()) add(indexed,f);
|
||||||
else add(unindexed,f);
|
else add(unindexed,f);
|
||||||
if (f.storeTermVectors()) add(termvector,f);
|
if (f.fieldType().storeTermVectors()) add(termvector,f);
|
||||||
if (f.indexed() && !f.storeTermVectors()) add(notermvector,f);
|
if (f.fieldType().indexed() && !f.fieldType().storeTermVectors()) add(notermvector,f);
|
||||||
if (f.stored()) add(stored,f);
|
if (f.fieldType().stored()) add(stored,f);
|
||||||
else add(unstored,f);
|
else add(unstored,f);
|
||||||
if (f.indexOptions() == IndexOptions.DOCS_ONLY) add(noTf,f);
|
if (f.fieldType().indexOptions() == IndexOptions.DOCS_ONLY) add(noTf,f);
|
||||||
if (f.omitNorms()) add(noNorms,f);
|
if (f.fieldType().omitNorms()) add(noNorms,f);
|
||||||
if (f.indexOptions() == IndexOptions.DOCS_ONLY) add(noTf,f);
|
if (f.fieldType().indexOptions() == IndexOptions.DOCS_ONLY) add(noTf,f);
|
||||||
//if (f.isLazy()) add(lazy, f);
|
//if (f.isLazy()) add(lazy, f);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -433,8 +433,7 @@ public class _TestUtil {
|
||||||
/** Adds field info for a Document. */
|
/** Adds field info for a Document. */
|
||||||
public static void add(Document doc, FieldInfos fieldInfos) {
|
public static void add(Document doc, FieldInfos fieldInfos) {
|
||||||
for (IndexableField field : doc) {
|
for (IndexableField field : doc) {
|
||||||
fieldInfos.addOrUpdate(field.name(), field.indexed(), field.storeTermVectors(), field.storeTermVectorPositions(),
|
fieldInfos.addOrUpdate(field.name(), field.fieldType(), false, field.docValuesType());
|
||||||
field.storeTermVectorOffsets(), field.omitNorms(), false, field.indexOptions(), field.docValuesType());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -515,7 +514,7 @@ public class _TestUtil {
|
||||||
Field field1 = (Field) f;
|
Field field1 = (Field) f;
|
||||||
|
|
||||||
Field field2 = new Field(field1.name(),
|
Field field2 = new Field(field1.name(),
|
||||||
field1.getFieldType(),
|
field1.fieldType(),
|
||||||
field1.stringValue()
|
field1.stringValue()
|
||||||
);
|
);
|
||||||
doc2.add(field2);
|
doc2.add(field2);
|
||||||
|
|
|
@ -52,9 +52,9 @@ public class TestDocument extends LuceneTestCase {
|
||||||
assertEquals(2, doc.getFields().size());
|
assertEquals(2, doc.getFields().size());
|
||||||
|
|
||||||
assertTrue(binaryFld.binaryValue() != null);
|
assertTrue(binaryFld.binaryValue() != null);
|
||||||
assertTrue(binaryFld.stored());
|
assertTrue(binaryFld.fieldType().stored());
|
||||||
assertFalse(binaryFld.indexed());
|
assertFalse(binaryFld.fieldType().indexed());
|
||||||
assertFalse(binaryFld.tokenized());
|
assertFalse(binaryFld.fieldType().tokenized());
|
||||||
|
|
||||||
String binaryTest = doc.getBinaryValue("binary").utf8ToString();
|
String binaryTest = doc.getBinaryValue("binary").utf8ToString();
|
||||||
assertTrue(binaryTest.equals(binaryVal));
|
assertTrue(binaryTest.equals(binaryVal));
|
||||||
|
|
|
@ -303,10 +303,10 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
||||||
|
|
||||||
for (FieldInfo fi : fis) {
|
for (FieldInfo fi : fis) {
|
||||||
Field expected = getField(Integer.parseInt(fi.name));
|
Field expected = getField(Integer.parseInt(fi.name));
|
||||||
assertEquals(expected.indexed(), fi.isIndexed);
|
assertEquals(expected.fieldType().indexed(), fi.isIndexed);
|
||||||
assertEquals(expected.storeTermVectors(), fi.storeTermVector);
|
assertEquals(expected.fieldType().storeTermVectors(), fi.storeTermVector);
|
||||||
assertEquals(expected.storeTermVectorPositions(), fi.storePositionWithTermVector);
|
assertEquals(expected.fieldType().storeTermVectorPositions(), fi.storePositionWithTermVector);
|
||||||
assertEquals(expected.storeTermVectorOffsets(), fi.storeOffsetWithTermVector);
|
assertEquals(expected.fieldType().storeTermVectorOffsets(), fi.storeOffsetWithTermVector);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -77,12 +77,12 @@ public class TestDocumentWriter extends LuceneTestCase {
|
||||||
IndexableField [] fields = doc.getFields("textField2");
|
IndexableField [] fields = doc.getFields("textField2");
|
||||||
assertTrue(fields != null && fields.length == 1);
|
assertTrue(fields != null && fields.length == 1);
|
||||||
assertTrue(fields[0].stringValue().equals(DocHelper.FIELD_2_TEXT));
|
assertTrue(fields[0].stringValue().equals(DocHelper.FIELD_2_TEXT));
|
||||||
assertTrue(fields[0].storeTermVectors());
|
assertTrue(fields[0].fieldType().storeTermVectors());
|
||||||
|
|
||||||
fields = doc.getFields("textField1");
|
fields = doc.getFields("textField1");
|
||||||
assertTrue(fields != null && fields.length == 1);
|
assertTrue(fields != null && fields.length == 1);
|
||||||
assertTrue(fields[0].stringValue().equals(DocHelper.FIELD_1_TEXT));
|
assertTrue(fields[0].stringValue().equals(DocHelper.FIELD_1_TEXT));
|
||||||
assertFalse(fields[0].storeTermVectors());
|
assertFalse(fields[0].fieldType().storeTermVectors());
|
||||||
|
|
||||||
fields = doc.getFields("keyField");
|
fields = doc.getFields("keyField");
|
||||||
assertTrue(fields != null && fields.length == 1);
|
assertTrue(fields != null && fields.length == 1);
|
||||||
|
|
|
@ -77,28 +77,28 @@ public class TestFieldsReader extends LuceneTestCase {
|
||||||
|
|
||||||
Field field = (Field) doc.getField(DocHelper.TEXT_FIELD_2_KEY);
|
Field field = (Field) doc.getField(DocHelper.TEXT_FIELD_2_KEY);
|
||||||
assertTrue(field != null);
|
assertTrue(field != null);
|
||||||
assertTrue(field.storeTermVectors() == true);
|
assertTrue(field.fieldType().storeTermVectors());
|
||||||
|
|
||||||
assertTrue(field.storeTermVectorOffsets() == true);
|
assertTrue(field.fieldType().storeTermVectorOffsets());
|
||||||
assertTrue(field.storeTermVectorPositions() == true);
|
assertTrue(field.fieldType().storeTermVectorPositions());
|
||||||
assertTrue(field.omitNorms() == false);
|
assertFalse(field.fieldType().omitNorms());
|
||||||
assertTrue(field.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||||
|
|
||||||
field = (Field) doc.getField(DocHelper.TEXT_FIELD_3_KEY);
|
field = (Field) doc.getField(DocHelper.TEXT_FIELD_3_KEY);
|
||||||
assertTrue(field != null);
|
assertTrue(field != null);
|
||||||
assertTrue(field.storeTermVectors() == false);
|
assertFalse(field.fieldType().storeTermVectors());
|
||||||
assertTrue(field.storeTermVectorOffsets() == false);
|
assertFalse(field.fieldType().storeTermVectorOffsets());
|
||||||
assertTrue(field.storeTermVectorPositions() == false);
|
assertFalse(field.fieldType().storeTermVectorPositions());
|
||||||
assertTrue(field.omitNorms() == true);
|
assertTrue(field.fieldType().omitNorms());
|
||||||
assertTrue(field.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||||
|
|
||||||
field = (Field) doc.getField(DocHelper.NO_TF_KEY);
|
field = (Field) doc.getField(DocHelper.NO_TF_KEY);
|
||||||
assertTrue(field != null);
|
assertTrue(field != null);
|
||||||
assertTrue(field.storeTermVectors() == false);
|
assertFalse(field.fieldType().storeTermVectors());
|
||||||
assertTrue(field.storeTermVectorOffsets() == false);
|
assertFalse(field.fieldType().storeTermVectorOffsets());
|
||||||
assertTrue(field.storeTermVectorPositions() == false);
|
assertFalse(field.fieldType().storeTermVectorPositions());
|
||||||
assertTrue(field.omitNorms() == false);
|
assertFalse(field.fieldType().omitNorms());
|
||||||
assertTrue(field.indexOptions() == IndexOptions.DOCS_ONLY);
|
assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS_ONLY);
|
||||||
|
|
||||||
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(DocHelper.TEXT_FIELD_3_KEY);
|
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(DocHelper.TEXT_FIELD_3_KEY);
|
||||||
reader.document(0, visitor);
|
reader.document(0, visitor);
|
||||||
|
@ -308,8 +308,8 @@ public class TestFieldsReader extends LuceneTestCase {
|
||||||
w.addDocument(doc);
|
w.addDocument(doc);
|
||||||
IndexReader r = w.getReader();
|
IndexReader r = w.getReader();
|
||||||
w.close();
|
w.close();
|
||||||
assertFalse(r.document(0).getField("field").indexed());
|
assertFalse(r.document(0).getField("field").fieldType().indexed());
|
||||||
assertTrue(r.document(0).getField("field2").indexed());
|
assertTrue(r.document(0).getField("field2").fieldType().indexed());
|
||||||
r.close();
|
r.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1257,7 +1257,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
// random TV
|
// random TV
|
||||||
try {
|
try {
|
||||||
w.addDocument(doc);
|
w.addDocument(doc);
|
||||||
assertFalse(field.storeTermVectors());
|
assertFalse(field.fieldType().storeTermVectors());
|
||||||
} catch (RuntimeException e) {
|
} catch (RuntimeException e) {
|
||||||
assertTrue(e.getMessage().startsWith(FailOnTermVectors.EXC_MSG));
|
assertTrue(e.getMessage().startsWith(FailOnTermVectors.EXC_MSG));
|
||||||
}
|
}
|
||||||
|
@ -1278,7 +1278,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
// random TV
|
// random TV
|
||||||
try {
|
try {
|
||||||
w.addDocument(doc);
|
w.addDocument(doc);
|
||||||
assertFalse(field.storeTermVectors());
|
assertFalse(field.fieldType().storeTermVectors());
|
||||||
} catch (RuntimeException e) {
|
} catch (RuntimeException e) {
|
||||||
assertTrue(e.getMessage().startsWith(FailOnTermVectors.EXC_MSG));
|
assertTrue(e.getMessage().startsWith(FailOnTermVectors.EXC_MSG));
|
||||||
}
|
}
|
||||||
|
|
|
@ -181,8 +181,6 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
||||||
|
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
|
||||||
document = new Document();
|
|
||||||
|
|
||||||
FieldType customType = new FieldType();
|
FieldType customType = new FieldType();
|
||||||
customType.setStored(true);
|
customType.setStored(true);
|
||||||
|
|
||||||
|
@ -237,8 +235,6 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
||||||
|
|
||||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||||
|
|
||||||
Document document = new Document();
|
|
||||||
|
|
||||||
FieldType customType = new FieldType();
|
FieldType customType = new FieldType();
|
||||||
customType.setStored(true);
|
customType.setStored(true);
|
||||||
|
|
||||||
|
@ -248,7 +244,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
||||||
customType1.setStoreTermVectorPositions(true);
|
customType1.setStoreTermVectorPositions(true);
|
||||||
customType1.setStoreTermVectorOffsets(true);
|
customType1.setStoreTermVectorOffsets(true);
|
||||||
|
|
||||||
document = new Document();
|
Document document = new Document();
|
||||||
Field storedField = newField("stored", "stored", customType);
|
Field storedField = newField("stored", "stored", customType);
|
||||||
document.add(storedField);
|
document.add(storedField);
|
||||||
Field termVectorField = newField("termVector", "termVector", customType1);
|
Field termVectorField = newField("termVector", "termVector", customType1);
|
||||||
|
|
|
@ -44,6 +44,47 @@ public class TestIndexableField extends LuceneTestCase {
|
||||||
private class MyField implements IndexableField {
|
private class MyField implements IndexableField {
|
||||||
|
|
||||||
private final int counter;
|
private final int counter;
|
||||||
|
private final IndexableFieldType fieldType = new IndexableFieldType() {
|
||||||
|
@Override
|
||||||
|
public boolean indexed() {
|
||||||
|
return (counter % 10) != 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean stored() {
|
||||||
|
return (counter & 1) == 0 || (counter % 10) == 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tokenized() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean storeTermVectors() {
|
||||||
|
return counter % 2 == 1 && counter % 10 != 9;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean storeTermVectorOffsets() {
|
||||||
|
return counter % 2 == 1 && counter % 10 != 9;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean storeTermVectorPositions() {
|
||||||
|
return counter % 2 == 1 && counter % 10 != 9;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean omitNorms() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FieldInfo.IndexOptions indexOptions() {
|
||||||
|
return FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
public MyField(int counter) {
|
public MyField(int counter) {
|
||||||
this.counter = counter;
|
this.counter = counter;
|
||||||
|
@ -58,11 +99,6 @@ public class TestIndexableField extends LuceneTestCase {
|
||||||
public float boost() {
|
public float boost() {
|
||||||
return 1.0f + random.nextFloat();
|
return 1.0f + random.nextFloat();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean stored() {
|
|
||||||
return (counter & 1) == 0 || (counter % 10) == 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef binaryValue() {
|
public BytesRef binaryValue() {
|
||||||
|
@ -121,42 +157,11 @@ public class TestIndexableField extends LuceneTestCase {
|
||||||
return counter;
|
return counter;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If this returns true then we index this field:
|
|
||||||
@Override
|
@Override
|
||||||
public boolean indexed() {
|
public IndexableFieldType fieldType() {
|
||||||
return (counter % 10) != 3;
|
return fieldType;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean tokenized() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean omitNorms() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public FieldInfo.IndexOptions indexOptions() {
|
|
||||||
return FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean storeTermVectors() {
|
|
||||||
return counter % 2 == 1 && counter%10 != 9;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean storeTermVectorOffsets() {
|
|
||||||
return counter % 2 == 1 && counter%10 != 9;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean storeTermVectorPositions() {
|
|
||||||
return counter % 2 == 1 && counter%10 != 9;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: randomly enable doc values
|
// TODO: randomly enable doc values
|
||||||
@Override
|
@Override
|
||||||
public PerDocFieldValues docValues() {
|
public PerDocFieldValues docValues() {
|
||||||
|
|
|
@ -175,8 +175,8 @@ public class TestSegmentReader extends LuceneTestCase {
|
||||||
// test omit norms
|
// test omit norms
|
||||||
for (int i=0; i<DocHelper.fields.length; i++) {
|
for (int i=0; i<DocHelper.fields.length; i++) {
|
||||||
IndexableField f = DocHelper.fields[i];
|
IndexableField f = DocHelper.fields[i];
|
||||||
if (f.indexed()) {
|
if (f.fieldType().indexed()) {
|
||||||
assertEquals(reader.hasNorms(f.name()), !f.omitNorms());
|
assertEquals(reader.hasNorms(f.name()), !f.fieldType().omitNorms());
|
||||||
assertEquals(reader.hasNorms(f.name()), !DocHelper.noNorms.containsKey(f.name()));
|
assertEquals(reader.hasNorms(f.name()), !DocHelper.noNorms.containsKey(f.name()));
|
||||||
if (!reader.hasNorms(f.name())) {
|
if (!reader.hasNorms(f.name())) {
|
||||||
// test for norms of null
|
// test for norms of null
|
||||||
|
|
|
@ -391,8 +391,6 @@ public class TestTermVectorsWriter extends LuceneTestCase {
|
||||||
new SerialMergeScheduler()).setMergePolicy(new LogDocMergePolicy()));
|
new SerialMergeScheduler()).setMergePolicy(new LogDocMergePolicy()));
|
||||||
|
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
|
|
||||||
document = new Document();
|
|
||||||
FieldType customType = new FieldType();
|
FieldType customType = new FieldType();
|
||||||
customType.setStored(true);
|
customType.setStored(true);
|
||||||
|
|
||||||
|
|
|
@ -110,7 +110,7 @@ public class TestTermVectors extends LuceneTestCase {
|
||||||
public void testTermVectorsFieldOrder() throws IOException {
|
public void testTermVectorsFieldOrder() throws IOException {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir, new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
|
RandomIndexWriter writer = new RandomIndexWriter(random, dir, new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
|
||||||
Document doc = new Document();;
|
Document doc = new Document();
|
||||||
FieldType ft = new FieldType(TextField.TYPE_STORED);
|
FieldType ft = new FieldType(TextField.TYPE_STORED);
|
||||||
ft.setStoreTermVectors(true);
|
ft.setStoreTermVectors(true);
|
||||||
ft.setStoreTermVectorOffsets(true);
|
ft.setStoreTermVectorOffsets(true);
|
||||||
|
|
|
@ -69,7 +69,7 @@ public class ReadTokensTask extends PerfTask {
|
||||||
Analyzer analyzer = getRunData().getAnalyzer();
|
Analyzer analyzer = getRunData().getAnalyzer();
|
||||||
int tokenCount = 0;
|
int tokenCount = 0;
|
||||||
for(final IndexableField field : fields) {
|
for(final IndexableField field : fields) {
|
||||||
if (!field.tokenized() || field instanceof NumericField) continue;
|
if (!field.fieldType().tokenized() || field instanceof NumericField) continue;
|
||||||
|
|
||||||
final TokenStream stream;
|
final TokenStream stream;
|
||||||
final TokenStream streamValue = field.tokenStreamValue();
|
final TokenStream streamValue = field.tokenStreamValue();
|
||||||
|
|
|
@ -136,28 +136,28 @@ public class DocMakerTest extends BenchmarkTestCase {
|
||||||
|
|
||||||
// Don't set anything, use the defaults
|
// Don't set anything, use the defaults
|
||||||
doc = createTestNormsDocument(false, false, false, false);
|
doc = createTestNormsDocument(false, false, false, false);
|
||||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).omitNorms());
|
assertTrue(doc.getField(DocMaker.TITLE_FIELD).fieldType().omitNorms());
|
||||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).omitNorms());
|
assertFalse(doc.getField(DocMaker.BODY_FIELD).fieldType().omitNorms());
|
||||||
|
|
||||||
// Set norms to false
|
// Set norms to false
|
||||||
doc = createTestNormsDocument(true, false, false, false);
|
doc = createTestNormsDocument(true, false, false, false);
|
||||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).omitNorms());
|
assertTrue(doc.getField(DocMaker.TITLE_FIELD).fieldType().omitNorms());
|
||||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).omitNorms());
|
assertFalse(doc.getField(DocMaker.BODY_FIELD).fieldType().omitNorms());
|
||||||
|
|
||||||
// Set norms to true
|
// Set norms to true
|
||||||
doc = createTestNormsDocument(true, true, false, false);
|
doc = createTestNormsDocument(true, true, false, false);
|
||||||
assertFalse(doc.getField(DocMaker.TITLE_FIELD).omitNorms());
|
assertFalse(doc.getField(DocMaker.TITLE_FIELD).fieldType().omitNorms());
|
||||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).omitNorms());
|
assertFalse(doc.getField(DocMaker.BODY_FIELD).fieldType().omitNorms());
|
||||||
|
|
||||||
// Set body norms to false
|
// Set body norms to false
|
||||||
doc = createTestNormsDocument(false, false, true, false);
|
doc = createTestNormsDocument(false, false, true, false);
|
||||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).omitNorms());
|
assertTrue(doc.getField(DocMaker.TITLE_FIELD).fieldType().omitNorms());
|
||||||
assertTrue(doc.getField(DocMaker.BODY_FIELD).omitNorms());
|
assertTrue(doc.getField(DocMaker.BODY_FIELD).fieldType().omitNorms());
|
||||||
|
|
||||||
// Set body norms to true
|
// Set body norms to true
|
||||||
doc = createTestNormsDocument(false, false, true, true);
|
doc = createTestNormsDocument(false, false, true, true);
|
||||||
assertTrue(doc.getField(DocMaker.TITLE_FIELD).omitNorms());
|
assertTrue(doc.getField(DocMaker.TITLE_FIELD).fieldType().omitNorms());
|
||||||
assertFalse(doc.getField(DocMaker.BODY_FIELD).omitNorms());
|
assertFalse(doc.getField(DocMaker.BODY_FIELD).fieldType().omitNorms());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -168,14 +168,14 @@ public class LukeRequestHandler extends RequestHandlerBase
|
||||||
private static String getFieldFlags( IndexableField f )
|
private static String getFieldFlags( IndexableField f )
|
||||||
{
|
{
|
||||||
StringBuilder flags = new StringBuilder();
|
StringBuilder flags = new StringBuilder();
|
||||||
flags.append( (f != null && f.indexed()) ? FieldFlag.INDEXED.getAbbreviation() : '-' );
|
flags.append( (f != null && f.fieldType().indexed()) ? FieldFlag.INDEXED.getAbbreviation() : '-' );
|
||||||
flags.append( (f != null && f.tokenized()) ? FieldFlag.TOKENIZED.getAbbreviation() : '-' );
|
flags.append( (f != null && f.fieldType().tokenized()) ? FieldFlag.TOKENIZED.getAbbreviation() : '-' );
|
||||||
flags.append( (f != null && f.stored()) ? FieldFlag.STORED.getAbbreviation() : '-' );
|
flags.append( (f != null && f.fieldType().stored()) ? FieldFlag.STORED.getAbbreviation() : '-' );
|
||||||
flags.append( (false) ? FieldFlag.MULTI_VALUED.getAbbreviation() : '-' ); // SchemaField Specific
|
flags.append( (false) ? FieldFlag.MULTI_VALUED.getAbbreviation() : '-' ); // SchemaField Specific
|
||||||
flags.append( (f != null && f.storeTermVectors()) ? FieldFlag.TERM_VECTOR_STORED.getAbbreviation() : '-' );
|
flags.append( (f != null && f.fieldType().storeTermVectors()) ? FieldFlag.TERM_VECTOR_STORED.getAbbreviation() : '-' );
|
||||||
flags.append( (f != null && f.storeTermVectorOffsets()) ? FieldFlag.TERM_VECTOR_OFFSET.getAbbreviation() : '-' );
|
flags.append( (f != null && f.fieldType().storeTermVectorOffsets()) ? FieldFlag.TERM_VECTOR_OFFSET.getAbbreviation() : '-' );
|
||||||
flags.append( (f != null && f.storeTermVectorPositions()) ? FieldFlag.TERM_VECTOR_POSITION.getAbbreviation() : '-' );
|
flags.append( (f != null && f.fieldType().storeTermVectorPositions()) ? FieldFlag.TERM_VECTOR_POSITION.getAbbreviation() : '-' );
|
||||||
flags.append( (f != null && f.omitNorms()) ? FieldFlag.OMIT_NORMS.getAbbreviation() : '-' );
|
flags.append( (f != null && f.fieldType().omitNorms()) ? FieldFlag.OMIT_NORMS.getAbbreviation() : '-' );
|
||||||
flags.append( (f != null && f.getClass().getSimpleName().equals("LazyField")) ? FieldFlag.LAZY.getAbbreviation() : '-' );
|
flags.append( (f != null && f.getClass().getSimpleName().equals("LazyField")) ? FieldFlag.LAZY.getAbbreviation() : '-' );
|
||||||
flags.append( (f != null && f.binaryValue()!=null) ? FieldFlag.BINARY.getAbbreviation() : '-' );
|
flags.append( (f != null && f.binaryValue()!=null) ? FieldFlag.BINARY.getAbbreviation() : '-' );
|
||||||
flags.append( (false) ? FieldFlag.SORT_MISSING_FIRST.getAbbreviation() : '-' ); // SchemaField Specific
|
flags.append( (false) ? FieldFlag.SORT_MISSING_FIRST.getAbbreviation() : '-' ); // SchemaField Specific
|
||||||
|
@ -264,7 +264,7 @@ public class LukeRequestHandler extends RequestHandlerBase
|
||||||
f.add( "docFreq", t.text()==null ? 0 : reader.docFreq( t ) ); // this can be 0 for non-indexed fields
|
f.add( "docFreq", t.text()==null ? 0 : reader.docFreq( t ) ); // this can be 0 for non-indexed fields
|
||||||
|
|
||||||
// If we have a term vector, return that
|
// If we have a term vector, return that
|
||||||
if( field.storeTermVectors() ) {
|
if( field.fieldType().storeTermVectors() ) {
|
||||||
try {
|
try {
|
||||||
TermFreqVector v = reader.getTermFreqVector( docId, field.name() );
|
TermFreqVector v = reader.getTermFreqVector( docId, field.name() );
|
||||||
if( v != null ) {
|
if( v != null ) {
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
/**
|
package org.apache.solr.handler.component;
|
||||||
|
|
||||||
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
* this work for additional information regarding copyright ownership.
|
* this work for additional information regarding copyright ownership.
|
||||||
|
@ -15,8 +17,6 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.solr.handler.component;
|
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
@ -189,7 +189,7 @@ public class RealTimeGetComponent extends SearchComponent
|
||||||
// copy the stored fields only
|
// copy the stored fields only
|
||||||
Document out = new Document();
|
Document out = new Document();
|
||||||
for (IndexableField f : doc.getFields()) {
|
for (IndexableField f : doc.getFields()) {
|
||||||
if (f.stored()) {
|
if (f.fieldType().stored()) {
|
||||||
out.add(f);
|
out.add(f);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -492,7 +492,7 @@ public abstract class FieldType extends FieldProperties {
|
||||||
public void setQueryAnalyzer(Analyzer analyzer) {
|
public void setQueryAnalyzer(Analyzer analyzer) {
|
||||||
SolrException e = new SolrException
|
SolrException e = new SolrException
|
||||||
(ErrorCode.SERVER_ERROR,
|
(ErrorCode.SERVER_ERROR,
|
||||||
"FieldType: " + this.getClass().getSimpleName() +
|
"FieldType: " + this.getClass().getSimpleName() +
|
||||||
" (" + typeName + ") does not support specifying an analyzer");
|
" (" + typeName + ") does not support specifying an analyzer");
|
||||||
SolrException.logOnce(log,null,e);
|
SolrException.logOnce(log,null,e);
|
||||||
throw e;
|
throw e;
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
|
|
||||||
package org.apache.solr.schema;
|
package org.apache.solr.schema;
|
||||||
|
|
||||||
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.queries.function.ValueSource;
|
import org.apache.lucene.queries.function.ValueSource;
|
||||||
import org.apache.lucene.queries.function.valuesource.VectorValueSource;
|
import org.apache.lucene.queries.function.valuesource.VectorValueSource;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
|
@ -87,7 +88,7 @@ public class PointType extends CoordinateFieldType implements SpatialQueryable {
|
||||||
|
|
||||||
if (field.stored()) {
|
if (field.stored()) {
|
||||||
String storedVal = externalVal; // normalize or not?
|
String storedVal = externalVal; // normalize or not?
|
||||||
org.apache.lucene.document.FieldType customType = new org.apache.lucene.document.FieldType();
|
FieldType customType = new FieldType();
|
||||||
customType.setStored(true);
|
customType.setStored(true);
|
||||||
f[f.length - 1] = createField(field.getName(), storedVal, customType, boost);
|
f[f.length - 1] = createField(field.getName(), storedVal, customType, boost);
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.solr.schema;
|
package org.apache.solr.schema;
|
||||||
|
|
||||||
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.document.NumericField;
|
import org.apache.lucene.document.NumericField;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
|
@ -63,7 +64,7 @@ import java.util.Date;
|
||||||
* @see org.apache.lucene.search.NumericRangeQuery
|
* @see org.apache.lucene.search.NumericRangeQuery
|
||||||
* @since solr 1.4
|
* @since solr 1.4
|
||||||
*/
|
*/
|
||||||
public class TrieField extends FieldType {
|
public class TrieField extends org.apache.solr.schema.FieldType {
|
||||||
public static final int DEFAULT_PRECISION_STEP = 8;
|
public static final int DEFAULT_PRECISION_STEP = 8;
|
||||||
|
|
||||||
protected int precisionStepArg = TrieField.DEFAULT_PRECISION_STEP; // the one passed in or defaulted
|
protected int precisionStepArg = TrieField.DEFAULT_PRECISION_STEP; // the one passed in or defaulted
|
||||||
|
@ -482,7 +483,7 @@ public class TrieField extends FieldType {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
org.apache.lucene.document.FieldType ft = new org.apache.lucene.document.FieldType();
|
FieldType ft = new FieldType();
|
||||||
ft.setStored(stored);
|
ft.setStored(stored);
|
||||||
ft.setTokenized(true);
|
ft.setTokenized(true);
|
||||||
ft.setIndexed(indexed);
|
ft.setIndexed(indexed);
|
||||||
|
@ -545,7 +546,7 @@ public class TrieField extends FieldType {
|
||||||
* Returns null if no prefix or prefix not needed, or the prefix of the main value of a trie field
|
* Returns null if no prefix or prefix not needed, or the prefix of the main value of a trie field
|
||||||
* that indexes multiple precisions per value.
|
* that indexes multiple precisions per value.
|
||||||
*/
|
*/
|
||||||
public static String getMainValuePrefix(FieldType ft) {
|
public static String getMainValuePrefix(org.apache.solr.schema.FieldType ft) {
|
||||||
if (ft instanceof TrieDateField)
|
if (ft instanceof TrieDateField)
|
||||||
ft = ((TrieDateField) ft).wrappedField;
|
ft = ((TrieDateField) ft).wrappedField;
|
||||||
if (ft instanceof TrieField) {
|
if (ft instanceof TrieField) {
|
||||||
|
|
|
@ -336,7 +336,7 @@ public class DocumentBuilder {
|
||||||
public SolrDocument loadStoredFields( SolrDocument doc, Document luceneDoc )
|
public SolrDocument loadStoredFields( SolrDocument doc, Document luceneDoc )
|
||||||
{
|
{
|
||||||
for( IndexableField field : luceneDoc) {
|
for( IndexableField field : luceneDoc) {
|
||||||
if( field.stored() ) {
|
if( field.fieldType().stored() ) {
|
||||||
SchemaField sf = schema.getField( field.name() );
|
SchemaField sf = schema.getField( field.name() );
|
||||||
if( !schema.isCopyFieldTarget( sf ) ) {
|
if( !schema.isCopyFieldTarget( sf ) ) {
|
||||||
doc.addField( field.name(), sf.getType().toObject( field ) );
|
doc.addField( field.name(), sf.getType().toObject( field ) );
|
||||||
|
|
|
@ -363,27 +363,27 @@ public class BasicFunctionalityTest extends SolrTestCaseJ4 {
|
||||||
f = ischema.getField("test_basictv");
|
f = ischema.getField("test_basictv");
|
||||||
luf = f.createField("test", 0f);
|
luf = f.createField("test", 0f);
|
||||||
assertTrue(f.storeTermVector());
|
assertTrue(f.storeTermVector());
|
||||||
assertTrue(luf.storeTermVectors());
|
assertTrue(luf.fieldType().storeTermVectors());
|
||||||
|
|
||||||
f = ischema.getField("test_notv");
|
f = ischema.getField("test_notv");
|
||||||
luf = f.createField("test", 0f);
|
luf = f.createField("test", 0f);
|
||||||
assertTrue(!f.storeTermVector());
|
assertTrue(!f.storeTermVector());
|
||||||
assertTrue(!luf.storeTermVectors());
|
assertTrue(!luf.fieldType().storeTermVectors());
|
||||||
|
|
||||||
f = ischema.getField("test_postv");
|
f = ischema.getField("test_postv");
|
||||||
luf = f.createField("test", 0f);
|
luf = f.createField("test", 0f);
|
||||||
assertTrue(f.storeTermVector() && f.storeTermPositions());
|
assertTrue(f.storeTermVector() && f.storeTermPositions());
|
||||||
assertTrue(luf.storeTermVectorPositions());
|
assertTrue(luf.fieldType().storeTermVectorPositions());
|
||||||
|
|
||||||
f = ischema.getField("test_offtv");
|
f = ischema.getField("test_offtv");
|
||||||
luf = f.createField("test", 0f);
|
luf = f.createField("test", 0f);
|
||||||
assertTrue(f.storeTermVector() && f.storeTermOffsets());
|
assertTrue(f.storeTermVector() && f.storeTermOffsets());
|
||||||
assertTrue(luf.storeTermVectorOffsets());
|
assertTrue(luf.fieldType().storeTermVectorOffsets());
|
||||||
|
|
||||||
f = ischema.getField("test_posofftv");
|
f = ischema.getField("test_posofftv");
|
||||||
luf = f.createField("test", 0f);
|
luf = f.createField("test", 0f);
|
||||||
assertTrue(f.storeTermVector() && f.storeTermPositions() && f.storeTermOffsets());
|
assertTrue(f.storeTermVector() && f.storeTermPositions() && f.storeTermOffsets());
|
||||||
assertTrue(luf.storeTermVectorOffsets() && luf.storeTermVectorPositions());
|
assertTrue(luf.fieldType().storeTermVectorOffsets() && luf.fieldType().storeTermVectorPositions());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue