Commit lucene-3312-patch-06.patch (merged to current trunk and formatting changes in IR removed)

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene3312@1357938 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uwe Schindler 2012-07-05 21:37:19 +00:00
parent 1b8048415a
commit 1db6d571eb
52 changed files with 611 additions and 169 deletions

View File

@ -28,21 +28,23 @@ import org.apache.lucene.document.LongDocValuesField;
import org.apache.lucene.document.PackedLongDocValuesField;
import org.apache.lucene.document.ShortDocValuesField;
import org.apache.lucene.document.SortedBytesDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StraightBytesDocValuesField;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.DocValues.Source;
import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.MergeState;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
/**
* Abstract API that consumes {@link IndexableField}s.
* Abstract API that consumes {@link StorableField}s.
* {@link DocValuesConsumer} are always associated with a specific field and
* segments. Concrete implementations of this API write the given
* {@link IndexableField} into a implementation specific format depending on
* {@link StorableField} into a implementation specific format depending on
* the fields meta-data.
*
* @lucene.experimental
@ -53,7 +55,7 @@ public abstract class DocValuesConsumer {
protected abstract Type getType();
/**
* Adds the given {@link IndexableField} instance to this
* Adds the given {@link StorableField} instance to this
* {@link DocValuesConsumer}
*
* @param docID
@ -64,7 +66,7 @@ public abstract class DocValuesConsumer {
* @throws IOException
* if an {@link IOException} occurs
*/
public abstract void add(int docID, IndexableField value)
public abstract void add(int docID, StorableField value)
throws IOException;
/**
@ -73,7 +75,7 @@ public abstract class DocValuesConsumer {
* @param docCount
* the total number of documents in this {@link DocValuesConsumer}.
* Must be greater than or equal the last given docID to
* {@link #add(int, IndexableField)}.
* {@link #add(int, StorableField)}.
* @throws IOException
*/
public abstract void finish(int docCount) throws IOException;
@ -136,7 +138,7 @@ public abstract class DocValuesConsumer {
assert source != null;
int docID = docBase;
final Type type = getType();
final Field scratchField;
final StoredField scratchField;
switch(type) {
case VAR_INTS:
scratchField = new PackedLongDocValuesField("", (long) 0);
@ -202,7 +204,7 @@ public abstract class DocValuesConsumer {
* ID must always be greater than the previous ID or <tt>0</tt> if called the
* first time.
*/
protected void mergeDoc(Field scratchField, Source source, int docID, int sourceDoc)
protected void mergeDoc(StoredField scratchField, Source source, int docID, int sourceDoc)
throws IOException {
switch(getType()) {
case BYTES_FIXED_DEREF:

View File

@ -1,5 +1,17 @@
package org.apache.lucene.codecs;
import java.io.Closeable;
import java.io.IOException;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.StoredDocument;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.MergeState;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.util.Bits;
/**
* Copyright 2004 The Apache Software Foundation
*
@ -33,7 +45,7 @@ import org.apache.lucene.util.Bits;
* <ol>
* <li>For every document, {@link #startDocument(int)} is called,
* informing the Codec how many fields will be written.
* <li>{@link #writeField(FieldInfo, IndexableField)} is called for
* <li>{@link #writeField(FieldInfo, StorableField)} is called for
* each field in the document.
* <li>After all documents have been written, {@link #finish(FieldInfos, int)}
* is called for verification/sanity-checks.
@ -45,14 +57,14 @@ import org.apache.lucene.util.Bits;
public abstract class StoredFieldsWriter implements Closeable {
/** Called before writing the stored fields of the document.
* {@link #writeField(FieldInfo, IndexableField)} will be called
* {@link #writeField(FieldInfo, StorableField)} will be called
* <code>numStoredFields</code> times. Note that this is
* called even if the document has no stored fields, in
* this case <code>numStoredFields</code> will be zero. */
public abstract void startDocument(int numStoredFields) throws IOException;
/** Writes a single stored field. */
public abstract void writeField(FieldInfo info, IndexableField field) throws IOException;
public abstract void writeField(FieldInfo info, StorableField field) throws IOException;
/** Aborts writing entirely, implementation should remove
* any partially-written files, etc. */
@ -69,7 +81,7 @@ public abstract class StoredFieldsWriter implements Closeable {
/** Merges in the stored fields from the readers in
* <code>mergeState</code>. The default implementation skips
* over deleted documents, and uses {@link #startDocument(int)},
* {@link #writeField(FieldInfo, IndexableField)}, and {@link #finish(FieldInfos, int)},
* {@link #writeField(FieldInfo, StorableField)}, and {@link #finish(int)},
* returning the number of documents that were written.
* Implementations can override this method for more sophisticated
* merging (bulk-byte copying, etc). */
@ -89,7 +101,7 @@ public abstract class StoredFieldsWriter implements Closeable {
// on the fly?
// NOTE: it's very important to first assign to doc then pass it to
// fieldsWriter.addDocument; see LUCENE-1282
Document doc = reader.document(i);
StoredDocument doc = reader.document(i);
addDocument(doc, mergeState.fieldInfos);
docCount++;
mergeState.checkAbort.work(300);
@ -100,20 +112,16 @@ public abstract class StoredFieldsWriter implements Closeable {
}
/** sugar method for startDocument() + writeField() for every stored field in the document */
protected final void addDocument(Iterable<? extends IndexableField> doc, FieldInfos fieldInfos) throws IOException {
protected final void addDocument(Iterable<? extends StorableField> doc, FieldInfos fieldInfos) throws IOException {
int storedCount = 0;
for (IndexableField field : doc) {
if (field.fieldType().stored()) {
for (StorableField field : doc) {
storedCount++;
}
}
startDocument(storedCount);
for (IndexableField field : doc) {
if (field.fieldType().stored()) {
for (StorableField field : doc) {
writeField(fieldInfos.fieldInfo(field.name()), field);
}
}
}
}

View File

@ -22,6 +22,7 @@ import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.codecs.StoredFieldsWriter;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.StoredDocument;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
@ -29,6 +30,7 @@ import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.MergeState;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
@ -131,7 +133,7 @@ public final class Lucene40StoredFieldsWriter extends StoredFieldsWriter {
IndexFileNames.segmentFileName(segment, "", FIELDS_INDEX_EXTENSION));
}
public void writeField(FieldInfo info, IndexableField field) throws IOException {
public void writeField(FieldInfo info, StorableField field) throws IOException {
fieldsStream.writeVInt(info.number);
int bits = 0;
final BytesRef bytes;
@ -297,7 +299,7 @@ public final class Lucene40StoredFieldsWriter extends StoredFieldsWriter {
// on the fly?
// NOTE: it's very important to first assign to doc then pass it to
// fieldsWriter.addDocument; see LUCENE-1282
Document doc = reader.document(j);
StoredDocument doc = reader.document(j);
addDocument(doc, mergeState.fieldInfos);
docCount++;
mergeState.checkAbort.work(300);
@ -324,7 +326,7 @@ public final class Lucene40StoredFieldsWriter extends StoredFieldsWriter {
for (; docCount < maxDoc; docCount++) {
// NOTE: it's very important to first assign to doc then pass it to
// fieldsWriter.addDocument; see LUCENE-1282
Document doc = reader.document(docCount);
StoredDocument doc = reader.document(docCount);
addDocument(doc, mergeState.fieldInfos);
mergeState.checkAbort.work(300);
}

View File

@ -30,6 +30,7 @@ import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
@ -420,7 +421,7 @@ public final class Bytes {
}
@Override
public void add(int docID, IndexableField value) throws IOException {
public void add(int docID, StorableField value) throws IOException {
BytesRef bytes = value.binaryValue();
assert bytes != null;
if (bytes.length == 0) { // default value - skip it

View File

@ -22,12 +22,12 @@ import java.io.IOException;
import org.apache.lucene.codecs.lucene40.values.Bytes.BytesReaderBase;
import org.apache.lucene.codecs.lucene40.values.Bytes.BytesSourceBase;
import org.apache.lucene.codecs.lucene40.values.Bytes.BytesWriterBase;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StraightBytesDocValuesField;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DocValues.Source;
import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
@ -74,7 +74,7 @@ class FixedStraightBytesImpl {
}
@Override
public void add(int docID, IndexableField value) throws IOException {
public void add(int docID, StorableField value) throws IOException {
final BytesRef bytes = value.binaryValue();
assert bytes != null;
assert lastDocID < docID;
@ -201,7 +201,7 @@ class FixedStraightBytesImpl {
}
@Override
protected void mergeDoc(Field scratchField, Source source, int docID, int sourceDoc) throws IOException {
protected void mergeDoc(StoredField scratchField, Source source, int docID, int sourceDoc) throws IOException {
assert lastDocID < docID;
setMergeBytes(source, sourceDoc);
if (size == -1) {

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.DocValues.Source;
import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
@ -88,7 +89,7 @@ public class Floats {
}
@Override
public void add(int docID, IndexableField value) throws IOException {
public void add(int docID, StorableField value) throws IOException {
template.toBytes(value.numericValue().doubleValue(), bytesRef);
bytesSpareField.setBytesValue(bytesRef);
super.add(docID, bytesSpareField);

View File

@ -25,6 +25,7 @@ import org.apache.lucene.index.DocValues.Source;
import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
@ -112,7 +113,7 @@ public final class Ints {
}
@Override
public void add(int docID, IndexableField value) throws IOException {
public void add(int docID, StorableField value) throws IOException {
template.toBytes(value.numericValue().longValue(), bytesRef);
bytesSpareField.setBytesValue(bytesRef);
super.add(docID, bytesSpareField);

View File

@ -26,6 +26,7 @@ import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
@ -126,7 +127,7 @@ class PackedIntValues {
}
@Override
public void add(int docID, IndexableField docValue) throws IOException {
public void add(int docID, StorableField docValue) throws IOException {
final long v = docValue.numericValue().longValue();
assert lastDocId < docID;
if (!started) {

View File

@ -22,11 +22,11 @@ import java.io.IOException;
import org.apache.lucene.codecs.lucene40.values.Bytes.BytesReaderBase;
import org.apache.lucene.codecs.lucene40.values.Bytes.BytesSourceBase;
import org.apache.lucene.codecs.lucene40.values.Bytes.BytesWriterBase;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DocValues.Source;
import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
@ -86,7 +86,7 @@ class VarStraightBytesImpl {
}
@Override
public void add(int docID, IndexableField value) throws IOException {
public void add(int docID, StorableField value) throws IOException {
final BytesRef bytes = value.binaryValue();
assert bytes != null;
assert !merge;
@ -156,7 +156,7 @@ class VarStraightBytesImpl {
}
@Override
protected void mergeDoc(Field scratchField, Source source, int docID, int sourceDoc) throws IOException {
protected void mergeDoc(StoredField scratchField, Source source, int docID, int sourceDoc) throws IOException {
assert merge;
assert lastDocID < docID;
source.getBytes(sourceDoc, bytesRef);

View File

@ -21,7 +21,7 @@ import org.apache.lucene.codecs.DocValuesArraySource;
import org.apache.lucene.codecs.DocValuesConsumer;
import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
@ -72,7 +72,7 @@ public class SimpleTextDocValuesConsumer extends DocValuesConsumer {
}
@Override
public void add(int docID, IndexableField value) throws IOException {
public void add(int docID, StorableField value) throws IOException {
assert docID >= 0;
final int ord, vSize;
switch (type) {

View File

@ -23,7 +23,7 @@ import org.apache.lucene.codecs.StoredFieldsWriter;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
@ -89,7 +89,7 @@ public class SimpleTextStoredFieldsWriter extends StoredFieldsWriter {
}
@Override
public void writeField(FieldInfo info, IndexableField field) throws IOException {
public void writeField(FieldInfo info, StorableField field) throws IOException {
write(FIELD);
write(Integer.toString(info.number));
newLine();

View File

@ -36,7 +36,7 @@ import org.apache.lucene.index.DocValues;
* @see DocValues for further information
* */
public class ByteDocValuesField extends Field {
public class ByteDocValuesField extends StoredField {
public static final FieldType TYPE = new FieldType();
static {

View File

@ -41,7 +41,7 @@ import org.apache.lucene.util.BytesRef;
* @see DocValues for further information
* */
public class DerefBytesDocValuesField extends Field {
public class DerefBytesDocValuesField extends StoredField {
// TODO: ideally indexer figures out var vs fixed on its own!?
public static final FieldType TYPE_FIXED_LEN = new FieldType();

View File

@ -19,11 +19,16 @@ package org.apache.lucene.document;
import java.util.*;
import org.apache.lucene.index.IndexDocument;
import org.apache.lucene.index.IndexReader; // for javadoc
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.search.IndexSearcher; // for javadoc
import org.apache.lucene.search.ScoreDoc; // for javadoc
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FilterIterator;
import com.google.common.collect.AbstractIterator;
/** Documents are the unit of indexing and search.
*
@ -38,18 +43,15 @@ import org.apache.lucene.util.BytesRef;
* ScoreDoc#doc} or {@link IndexReader#document(int)}.
*/
public final class Document implements Iterable<IndexableField> {
public final class Document implements IndexDocument{
private final List<IndexableField> fields = new ArrayList<IndexableField>();
private final List<Field> fields = new ArrayList<Field>();
//private final List<Field> fields
/** Constructs a new document with no fields. */
public Document() {}
@Override
public Iterator<IndexableField> iterator() {
return fields.iterator();
}
/**
* <p>Adds a field to a document. Several fields may be added with
* the same name. In this case, if the fields are indexed, their text is
@ -60,7 +62,7 @@ public final class Document implements Iterable<IndexableField> {
* a document has to be deleted from an index and a new changed version of that
* document has to be added.</p>
*/
public final void add(IndexableField field) {
public final void add(Field field) {
fields.add(field);
}
@ -75,9 +77,9 @@ public final class Document implements Iterable<IndexableField> {
* document has to be added.</p>
*/
public final void removeField(String name) {
Iterator<IndexableField> it = fields.iterator();
Iterator<Field> it = fields.iterator();
while (it.hasNext()) {
IndexableField field = it.next();
Field field = it.next();
if (field.name().equals(name)) {
it.remove();
return;
@ -95,9 +97,9 @@ public final class Document implements Iterable<IndexableField> {
* document has to be added.</p>
*/
public final void removeFields(String name) {
Iterator<IndexableField> it = fields.iterator();
Iterator<Field> it = fields.iterator();
while (it.hasNext()) {
IndexableField field = it.next();
Field field = it.next();
if (field.name().equals(name)) {
it.remove();
}
@ -116,7 +118,10 @@ public final class Document implements Iterable<IndexableField> {
*/
public final BytesRef[] getBinaryValues(String name) {
final List<BytesRef> result = new ArrayList<BytesRef>();
for (IndexableField field : fields) {
Iterator<StorableField> it = storedFieldsIterator();
while (it.hasNext()) {
StorableField field = it.next();
if (field.name().equals(name)) {
final BytesRef bytes = field.binaryValue();
if (bytes != null) {
@ -138,7 +143,10 @@ public final class Document implements Iterable<IndexableField> {
* @return a <code>byte[]</code> containing the binary field value or <code>null</code>
*/
public final BytesRef getBinaryValue(String name) {
for (IndexableField field : fields) {
Iterator<StorableField> it = storedFieldsIterator();
while (it.hasNext()) {
StorableField field = it.next();
if (field.name().equals(name)) {
final BytesRef bytes = field.binaryValue();
if (bytes != null) {
@ -188,7 +196,12 @@ public final class Document implements Iterable<IndexableField> {
* IndexReader#document(int)}.
*/
public final List<IndexableField> getFields() {
return fields;
List<IndexableField> result = new ArrayList<IndexableField>();
for (IndexableField field : fields) {
result.add(field);
}
return result;
}
private final static String[] NO_STRINGS = new String[0];
@ -205,7 +218,10 @@ public final class Document implements Iterable<IndexableField> {
*/
public final String[] getValues(String name) {
List<String> result = new ArrayList<String>();
for (IndexableField field : fields) {
Iterator<StorableField> it = storedFieldsIterator();
while (it.hasNext()) {
StorableField field = it.next();
if (field.name().equals(name) && field.stringValue() != null) {
result.add(field.stringValue());
}
@ -227,7 +243,10 @@ public final class Document implements Iterable<IndexableField> {
* the actual numeric field instance back, use {@link #getField}.
*/
public final String get(String name) {
for (IndexableField field : fields) {
Iterator<StorableField> it = storedFieldsIterator();
while (it.hasNext()) {
StorableField field = it.next();
if (field.name().equals(name) && field.stringValue() != null) {
return field.stringValue();
}
@ -249,4 +268,46 @@ public final class Document implements Iterable<IndexableField> {
buffer.append(">");
return buffer.toString();
}
@Override
public Iterable<? extends IndexableField> indexableFields() {
Iterator<IndexableField> it = indexedFieldsIterator();
List<IndexableField> result = new ArrayList<IndexableField>();
while(it.hasNext()) {
result.add(it.next());
}
return result;
}
@Override
public Iterable<? extends StorableField> storableFields() {
Iterator<StorableField> it = storedFieldsIterator();
List<StorableField> result = new ArrayList<StorableField>();
while(it.hasNext()) {
result.add(it.next());
}
return result;
}
public Iterator<StorableField> storedFieldsIterator() {
return new FilterIterator<StorableField, Field>(fields.iterator()) {
@Override
protected boolean predicateFunction(Field field) {
return field.type.stored();
}
};
}
public Iterator<IndexableField> indexedFieldsIterator() {
return new FilterIterator<IndexableField, Field>(fields.iterator()) {
@Override
protected boolean predicateFunction(Field field) {
return field.type.indexed();
}
};
}
}

View File

@ -34,7 +34,7 @@ import org.apache.lucene.index.StoredFieldVisitor;
* @lucene.experimental */
public class DocumentStoredFieldVisitor extends StoredFieldVisitor {
private final Document doc = new Document();
private final StoredDocument doc = new StoredDocument();
private final Set<String> fieldsToAdd;
/** Load only fields named in the provided <code>Set&lt;String&gt;</code>. */
@ -62,12 +62,15 @@ public class DocumentStoredFieldVisitor extends StoredFieldVisitor {
@Override
public void stringField(FieldInfo fieldInfo, String value) throws IOException {
/*
final FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.setStoreTermVectors(fieldInfo.hasVectors());
ft.setIndexed(fieldInfo.isIndexed());
ft.setOmitNorms(fieldInfo.omitsNorms());
ft.setIndexOptions(fieldInfo.getIndexOptions());
doc.add(new Field(fieldInfo.name, value, ft));
*/
doc.add(new StoredField(fieldInfo.name, value));
//doc.add(new Field(fieldInfo.name, value, ft));
}
@Override
@ -95,7 +98,7 @@ public class DocumentStoredFieldVisitor extends StoredFieldVisitor {
return fieldsToAdd == null || fieldsToAdd.contains(fieldInfo.name) ? Status.YES : Status.NO;
}
public Document getDocument() {
public StoredDocument getDocument() {
return doc;
}
}

View File

@ -36,7 +36,7 @@ import org.apache.lucene.index.DocValues;
* @see DocValues for further information
* */
public class DoubleDocValuesField extends Field {
public class DoubleDocValuesField extends StoredField {
public static final FieldType TYPE = new FieldType();
static {

View File

@ -116,7 +116,6 @@ public final class DoubleField extends Field {
public static final FieldType TYPE_NOT_STORED = new FieldType();
static {
TYPE_NOT_STORED.setIndexed(true);
TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS_ONLY);

View File

@ -31,6 +31,7 @@ import org.apache.lucene.index.IndexWriter; // javadocs
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.index.Norm; // javadocs
import org.apache.lucene.index.StorableField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.index.FieldInvertState; // javadocs
@ -59,7 +60,7 @@ import org.apache.lucene.index.FieldInvertState; // javadocs
* Field it is used in. It is strongly recommended that no
* changes be made after Field instantiation.
*/
public class Field implements IndexableField {
public class Field implements IndexableField, StorableField {
protected final FieldType type;
protected final String name;

View File

@ -20,13 +20,14 @@ package org.apache.lucene.document;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.index.StorableFieldType;
import org.apache.lucene.search.NumericRangeQuery; // javadocs
import org.apache.lucene.util.NumericUtils;
/**
* Describes the properties of a field.
*/
public class FieldType implements IndexableFieldType {
public class FieldType implements IndexableFieldType, StorableFieldType {
/** Data type of the numeric value
* @since 3.2
@ -41,10 +42,10 @@ public class FieldType implements IndexableFieldType {
private boolean storeTermVectorPositions;
private boolean omitNorms;
private IndexOptions indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
private DocValues.Type docValueType;
private NumericType numericType;
private boolean frozen;
private int numericPrecisionStep = NumericUtils.PRECISION_STEP_DEFAULT;
private DocValues.Type docValueType;
public FieldType(FieldType ref) {
this.indexed = ref.indexed();
@ -150,16 +151,6 @@ public class FieldType implements IndexableFieldType {
this.indexOptions = value;
}
public void setDocValueType(DocValues.Type type) {
checkIfFrozen();
docValueType = type;
}
@Override
public DocValues.Type docValueType() {
return docValueType;
}
public void setNumericType(NumericType type) {
checkIfFrozen();
numericType = type;
@ -238,4 +229,16 @@ public class FieldType implements IndexableFieldType {
return result.toString();
}
/* from StorableFieldType */
@Override
public DocValues.Type docValueType() {
return docValueType;
}
public void setDocValueType(DocValues.Type type) {
checkIfFrozen();
docValueType = type;
}
}

View File

@ -35,7 +35,7 @@ import org.apache.lucene.index.DocValues;
* @see DocValues for further information
* */
public class FloatDocValuesField extends Field {
public class FloatDocValuesField extends StoredField {
public static final FieldType TYPE = new FieldType();
static {

View File

@ -116,7 +116,6 @@ public final class FloatField extends Field {
public static final FieldType TYPE_NOT_STORED = new FieldType();
static {
TYPE_NOT_STORED.setIndexed(true);
TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS_ONLY);

View File

@ -35,7 +35,7 @@ import org.apache.lucene.index.DocValues;
* @see DocValues for further information
* */
public class IntDocValuesField extends Field {
public class IntDocValuesField extends StoredField {
public static final FieldType TYPE = new FieldType();
static {

View File

@ -116,7 +116,6 @@ public final class IntField extends Field {
public static final FieldType TYPE_NOT_STORED = new FieldType();
static {
TYPE_NOT_STORED.setIndexed(true);
TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS_ONLY);

View File

@ -35,7 +35,7 @@ import org.apache.lucene.index.DocValues;
* @see DocValues for further information
* */
public class LongDocValuesField extends Field {
public class LongDocValuesField extends StoredField {
public static final FieldType TYPE = new FieldType();
static {

View File

@ -39,7 +39,7 @@ import org.apache.lucene.index.AtomicReader; // javadocs
* @see DocValues for further information
* */
public class PackedLongDocValuesField extends Field {
public class PackedLongDocValuesField extends StoredField {
public static final FieldType TYPE = new FieldType();
static {

View File

@ -36,7 +36,7 @@ import org.apache.lucene.index.DocValues;
* @see DocValues for further information
* */
public class ShortDocValuesField extends Field {
public class ShortDocValuesField extends StoredField {
public static final FieldType TYPE = new FieldType();
static {

View File

@ -37,7 +37,7 @@ import org.apache.lucene.util.BytesRef;
* @see DocValues for further information
* */
public class SortedBytesDocValuesField extends Field {
public class SortedBytesDocValuesField extends StoredField {
// TODO: ideally indexer figures out var vs fixed on its own!?
public static final FieldType TYPE_FIXED_LEN = new FieldType();

View File

@ -0,0 +1,201 @@
package org.apache.lucene.document;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.util.BytesRef;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class StoredDocument implements Iterable<StorableField>{
private final List<StorableField> fields = new ArrayList<StorableField>();
public final void add(StorableField field) {
fields.add(field);
}
public StorableField[] getFields(String name) {
List<StorableField> result = new ArrayList<StorableField>();
for (StorableField field : fields) {
if (field.name().equals(name)) {
result.add(field);
}
}
return result.toArray(new StorableField[result.size()]);
}
public final StorableField getField(String name) {
for (StorableField field : fields) {
if (field.name().equals(name)) {
return field;
}
}
return null;
}
public final void removeField(String name) {
Iterator<StorableField> it = fields.iterator();
while (it.hasNext()) {
StorableField field = it.next();
if (field.name().equals(name)) {
it.remove();
return;
}
}
}
/**
* <p>Removes all fields with the given name from the document.
* If there is no field with the specified name, the document remains unchanged.</p>
* <p> Note that the removeField(s) methods like the add method only make sense
* prior to adding a document to an index. These methods cannot
* be used to change the content of an existing index! In order to achieve this,
* a document has to be deleted from an index and a new changed version of that
* document has to be added.</p>
*/
public final void removeFields(String name) {
Iterator<StorableField> it = fields.iterator();
while (it.hasNext()) {
StorableField field = it.next();
if (field.name().equals(name)) {
it.remove();
}
}
}
public final List<StorableField> getFields() {
return fields;
}
@Override
public Iterator<StorableField> iterator() {
return this.fields.iterator();
}
/**
* Returns an array of byte arrays for of the fields that have the name specified
* as the method parameter. This method returns an empty
* array when there are no matching fields. It never
* returns null.
*
* @param name the name of the field
* @return a <code>byte[][]</code> of binary field values
*/
public final BytesRef[] getBinaryValues(String name) {
final List<BytesRef> result = new ArrayList<BytesRef>();
for (StorableField field : fields) {
if (field.name().equals(name)) {
final BytesRef bytes = field.binaryValue();
if (bytes != null) {
result.add(bytes);
}
}
}
return result.toArray(new BytesRef[result.size()]);
}
/**
* Returns an array of bytes for the first (or only) field that has the name
* specified as the method parameter. This method will return <code>null</code>
* if no binary fields with the specified name are available.
* There may be non-binary fields with the same name.
*
* @param name the name of the field.
* @return a <code>byte[]</code> containing the binary field value or <code>null</code>
*/
public final BytesRef getBinaryValue(String name) {
for (StorableField field : fields) {
if (field.name().equals(name)) {
final BytesRef bytes = field.binaryValue();
if (bytes != null) {
return bytes;
}
}
}
return null;
}
private final static String[] NO_STRINGS = new String[0];
/**
* Returns an array of values of the field specified as the method parameter.
* This method returns an empty array when there are no
* matching fields. It never returns null.
* For {@link IntField}, {@link LongField}, {@link
* FloatField} and {@link DoubleField} it returns the string value of the number. If you want
* the actual numeric field instances back, use {@link #getFields}.
* @param name the name of the field
* @return a <code>String[]</code> of field values
*/
public final String[] getValues(String name) {
List<String> result = new ArrayList<String>();
for (StorableField field : fields) {
if (field.name().equals(name) && field.stringValue() != null) {
result.add(field.stringValue());
}
}
if (result.size() == 0) {
return NO_STRINGS;
}
return result.toArray(new String[result.size()]);
}
/** Returns the string value of the field with the given name if any exist in
* this document, or null. If multiple fields exist with this name, this
* method returns the first value added. If only binary fields with this name
* exist, returns null.
* For {@link IntField}, {@link LongField}, {@link
* FloatField} and {@link DoubleField} it returns the string value of the number. If you want
* the actual numeric field instance back, use {@link #getField}.
*/
public final String get(String name) {
for (StorableField field : fields) {
if (field.name().equals(name) && field.stringValue() != null) {
return field.stringValue();
}
}
return null;
}
public Document asIndexable() {
Document doc = new Document();
for (StorableField field : fields) {
Field newField = new Field(field.name(), field.fieldType());
newField.fieldsData = field.stringValue();
if (newField.fieldsData == null)
newField.fieldsData = field.numericValue();
if (newField.fieldsData == null)
newField.fieldsData = field.binaryValue();
if (newField.fieldsData == null)
newField.fieldsData = field.readerValue();
doc.add(newField);
}
return doc;
}
}

View File

@ -24,7 +24,7 @@ import org.apache.lucene.util.BytesRef;
/** A field whose value is stored so that {@link
* IndexSearcher#doc} and {@link IndexReader#document} will
* return the field and its value. */
public final class StoredField extends Field {
public class StoredField extends Field {
public final static FieldType TYPE;
static {
@ -33,6 +33,16 @@ public final class StoredField extends Field {
TYPE.freeze();
}
protected StoredField(String name, FieldType type) {
super(name, type);
this.type.setStored(true);
}
public StoredField(String name, BytesRef bytes, FieldType type) {
super(name, bytes, type);
this.type.setStored(true);
}
public StoredField(String name, byte[] value) {
super(name, value, TYPE);
}

View File

@ -40,7 +40,7 @@ import org.apache.lucene.util.BytesRef;
* @see DocValues for further information
* */
public class StraightBytesDocValuesField extends Field {
public class StraightBytesDocValuesField extends StoredField {
// TODO: ideally indexer figures out var vs fixed on its own!?
public static final FieldType TYPE_FIXED_LEN = new FieldType();

View File

@ -31,6 +31,7 @@ import org.apache.lucene.codecs.BlockTreeTermsReader;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldType; // for javadocs
import org.apache.lucene.document.StoredDocument;
import org.apache.lucene.index.DocValues.SortedSource;
import org.apache.lucene.index.DocValues.Source;
import org.apache.lucene.search.DocIdSetIterator;
@ -1227,7 +1228,7 @@ public class CheckIndex {
for (int j = 0; j < info.info.getDocCount(); ++j) {
// Intentionally pull even deleted documents to
// make sure they too are not corrupt:
Document doc = reader.document(j);
StoredDocument doc = reader.document(j);
if (liveDocs == null || liveDocs.get(j)) {
status.docCount++;
status.totFields += doc.getFields().size();

View File

@ -218,7 +218,7 @@ final class DocFieldProcessor extends DocConsumer {
// seen before (eg suddenly turning on norms or
// vectors, etc.):
for(IndexableField field : docState.doc) {
for(IndexableField field : docState.doc.indexableFields()) {
final String fieldName = field.name();
// Make sure we have a PerField allocated
@ -266,17 +266,24 @@ final class DocFieldProcessor extends DocConsumer {
}
fp.addField(field);
if (field.fieldType().stored()) {
fieldsWriter.addField(field, fp.fieldInfo);
}
for (StorableField field: docState.doc.storableFields()) {
final String fieldName = field.name();
// Make sure we have a PerField allocated
final int hashPos = fieldName.hashCode() & hashMask;
DocFieldProcessorPerField fp = fieldHash[hashPos];
while(fp != null && !fp.fieldInfo.name.equals(fieldName)) {
fp = fp.next;
}
final DocValues.Type dvType = field.fieldType().docValueType();
if (dvType != null) {
DocValuesConsumerHolder docValuesConsumer = docValuesConsumer(dvType,
docState, fp.fieldInfo);
DocValuesConsumer consumer = docValuesConsumer.docValuesConsumer;
if (docValuesConsumer.compatibility == null) {
consumer.add(docState.docID, field);
consumer.add(docState.docID, (StorableField) field);
docValuesConsumer.compatibility = new TypeCompatibility(dvType,
consumer.getValueSize());
} else if (docValuesConsumer.compatibility.isCompatible(dvType,

View File

@ -75,7 +75,7 @@ final class DocInverterPerField extends DocFieldConsumerPerField {
// TODO FI: this should be "genericized" to querying
// consumer if it wants to see this particular field
// tokenized.
if (fieldType.indexed() && doInvert) {
if (doInvert) {
// if the field omits norms, the boost cannot be indexed.
if (fieldType.omitNorms() && field.boost() != 1.0f) {

View File

@ -322,7 +322,7 @@ final class DocumentsWriter {
return maybeMerge;
}
boolean updateDocuments(final Iterable<? extends Iterable<? extends IndexableField>> docs, final Analyzer analyzer,
boolean updateDocuments(final Iterable<? extends IndexDocument> docs, final Analyzer analyzer,
final Term delTerm) throws IOException {
boolean maybeMerge = preUpdate();
@ -353,7 +353,7 @@ final class DocumentsWriter {
return postUpdate(flushingDWPT, maybeMerge);
}
boolean updateDocument(final Iterable<? extends IndexableField> doc, final Analyzer analyzer,
boolean updateDocument(final IndexDocument doc, final Analyzer analyzer,
final Term delTerm) throws IOException {
boolean maybeMerge = preUpdate();

View File

@ -93,7 +93,7 @@ class DocumentsWriterPerThread {
InfoStream infoStream;
Similarity similarity;
int docID;
Iterable<? extends IndexableField> doc;
IndexDocument doc;
String maxTermPrefix;
DocState(DocumentsWriterPerThread docWriter, InfoStream infoStream) {
@ -224,7 +224,7 @@ class DocumentsWriterPerThread {
return retval;
}
public void updateDocument(Iterable<? extends IndexableField> doc, Analyzer analyzer, Term delTerm) throws IOException {
public void updateDocument(IndexDocument doc, Analyzer analyzer, Term delTerm) throws IOException {
assert writer.testPoint("DocumentsWriterPerThread addDocument start");
assert deleteQueue != null;
docState.doc = doc;
@ -277,7 +277,7 @@ class DocumentsWriterPerThread {
}
}
public int updateDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs, Analyzer analyzer, Term delTerm) throws IOException {
public int updateDocuments(Iterable<? extends IndexDocument> docs, Analyzer analyzer, Term delTerm) throws IOException {
assert writer.testPoint("DocumentsWriterPerThread addDocuments start");
assert deleteQueue != null;
docState.analyzer = analyzer;
@ -289,7 +289,7 @@ class DocumentsWriterPerThread {
}
int docCount = 0;
try {
for(Iterable<? extends IndexableField> doc : docs) {
for(IndexDocument doc : docs) {
docState.doc = doc;
docState.docID = numDocsInRAM;
docCount++;

View File

@ -260,7 +260,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
// rather, each component in the chain should update
// what it "owns". EG fieldType.indexOptions() should
// be updated by maybe FreqProxTermsWriterPerField:
return addOrUpdateInternal(name, -1, fieldType.indexed(), false,
return addOrUpdateInternal(name, -1, true, false,
fieldType.omitNorms(), false,
fieldType.indexOptions(), null, null);
}

View File

@ -103,13 +103,8 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
@Override
boolean start(IndexableField[] fields, int count) {
for(int i=0;i<count;i++) {
if (fields[i].fieldType().indexed()) {
return true;
}
}
return false;
}
@Override
void start(IndexableField f) {

View File

@ -0,0 +1,26 @@
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @lucene.internal
*/
public interface IndexDocument {
public Iterable<? extends IndexableField> indexableFields();
public Iterable<? extends StorableField> storableFields();
}

View File

@ -27,6 +27,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DocumentStoredFieldVisitor;
import org.apache.lucene.document.StoredDocument;
import org.apache.lucene.search.SearcherManager; // javadocs
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.Bits;
@ -340,7 +341,7 @@ public abstract class IndexReader implements Closeable {
// TODO: we need a separate StoredField, so that the
// Document returned here contains that class not
// IndexableField
public final Document document(int docID) throws IOException {
public final StoredDocument document(int docID) throws IOException {
final DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor();
document(docID, visitor);
return visitor.getDocument();
@ -351,8 +352,10 @@ public abstract class IndexReader implements Closeable {
* fields. Note that this is simply sugar for {@link
* DocumentStoredFieldVisitor#DocumentStoredFieldVisitor(Set)}.
*/
public final Document document(int docID, Set<String> fieldsToLoad) throws IOException {
final DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(fieldsToLoad);
public final StoredDocument document(int docID, Set<String> fieldsToLoad)
throws IOException {
final DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(
fieldsToLoad);
document(docID, visitor);
return visitor.getDocument();
}

View File

@ -1050,7 +1050,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public void addDocument(Iterable<? extends IndexableField> doc) throws IOException {
public void addDocument(IndexDocument doc) throws IOException {
addDocument(doc, analyzer);
}
@ -1069,7 +1069,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public void addDocument(Iterable<? extends IndexableField> doc, Analyzer analyzer) throws IOException {
public void addDocument(IndexDocument doc, Analyzer analyzer) throws IOException {
updateDocument(null, doc, analyzer);
}
@ -1114,7 +1114,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
*
* @lucene.experimental
*/
public void addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs) throws IOException {
public void addDocuments(Iterable<? extends IndexDocument> docs) throws IOException {
addDocuments(docs, analyzer);
}
@ -1129,7 +1129,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
*
* @lucene.experimental
*/
public void addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs, Analyzer analyzer) throws IOException {
public void addDocuments(Iterable<? extends IndexDocument> docs, Analyzer analyzer) throws IOException {
updateDocuments(null, docs, analyzer);
}
@ -1146,7 +1146,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
*
* @lucene.experimental
*/
public void updateDocuments(Term delTerm, Iterable<? extends Iterable<? extends IndexableField>> docs) throws IOException {
public void updateDocuments(Term delTerm, Iterable<? extends IndexDocument> docs) throws IOException {
updateDocuments(delTerm, docs, analyzer);
}
@ -1164,7 +1164,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
*
* @lucene.experimental
*/
public void updateDocuments(Term delTerm, Iterable<? extends Iterable<? extends IndexableField>> docs, Analyzer analyzer) throws IOException {
public void updateDocuments(Term delTerm, Iterable<? extends IndexDocument> docs, Analyzer analyzer) throws IOException {
ensureOpen();
try {
boolean success = false;
@ -1289,7 +1289,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public void updateDocument(Term term, Iterable<? extends IndexableField> doc) throws IOException {
public void updateDocument(Term term, IndexDocument doc) throws IOException {
ensureOpen();
updateDocument(term, doc, getAnalyzer());
}
@ -1312,7 +1312,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public void updateDocument(Term term, Iterable<? extends IndexableField> doc, Analyzer analyzer)
public void updateDocument(Term term, IndexDocument doc, Analyzer analyzer)
throws IOException {
ensureOpen();
try {

View File

@ -42,21 +42,6 @@ public interface IndexableField {
* of this field. */
public IndexableFieldType fieldType();
/** Field boost (you must pre-multiply in any doc boost). */
public float boost();
/** Non-null if this field has a binary value */
public BytesRef binaryValue();
/** Non-null if this field has a string value */
public String stringValue();
/** Non-null if this field has a Reader value */
public Reader readerValue();
/** Non-null if this field has a numeric value */
public Number numericValue();
/**
* Creates the TokenStream used for indexing this field. If appropriate,
* implementations should use the given Analyzer to create the TokenStreams.
@ -67,4 +52,7 @@ public interface IndexableField {
* @throws IOException Can be thrown while creating the TokenStream
*/
public TokenStream tokenStream(Analyzer analyzer) throws IOException;
/** Field boost (you must pre-multiply in any doc boost). */
public float boost();
}

View File

@ -25,12 +25,6 @@ import org.apache.lucene.index.FieldInfo.IndexOptions;
*/
public interface IndexableFieldType {
/** True if this field should be indexed (inverted) */
public boolean indexed();
/** True if the field's value should be stored */
public boolean stored();
/** True if this field's value should be analyzed */
public boolean tokenized();
@ -49,8 +43,4 @@ public interface IndexableFieldType {
/** {@link IndexOptions}, describing what should be
* recorded into the inverted index */
public IndexOptions indexOptions();
/** DocValues type; if non-null then the field's value
* will be indexed into docValues */
public DocValues.Type docValueType();
}

View File

@ -26,6 +26,7 @@ import org.apache.lucene.document.LongDocValuesField;
import org.apache.lucene.document.PackedLongDocValuesField;
import org.apache.lucene.document.ShortDocValuesField;
import org.apache.lucene.document.SortedBytesDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StraightBytesDocValuesField;
import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.search.similarities.Similarity;
@ -43,13 +44,13 @@ import org.apache.lucene.util.BytesRef;
* @lucene.internal
*/
public final class Norm {
private Field field;
private StoredField field;
private BytesRef spare;
/**
* Returns the {@link IndexableField} representation for this norm
*/
public IndexableField field() {
public StoredField field() {
return field;
}

View File

@ -51,10 +51,10 @@ final class NormsConsumerPerField extends InvertedDocEndConsumerPerField impleme
similarity.computeNorm(fieldState, norm);
if (norm.type() != null) {
IndexableField field = norm.field();
StorableField field = norm.field();
// some similarity might not compute any norms
DocValuesConsumer consumer = getConsumer(norm.type());
consumer.add(docState.docID, field);
consumer.add(docState.docID, (StorableField) field);
}
}
}

View File

@ -26,6 +26,8 @@ import java.util.Map.Entry;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StoredDocument;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Version;
@ -67,12 +69,12 @@ public class PersistentSnapshotDeletionPolicy extends SnapshotDeletionPolicy {
int numDocs = r.numDocs();
// index is allowed to have exactly one document or 0.
if (numDocs == 1) {
Document doc = r.document(r.maxDoc() - 1);
StoredDocument doc = r.document(r.maxDoc() - 1);
if (doc.getField(SNAPSHOTS_ID) == null) {
throw new IllegalStateException("directory is not a valid snapshots store!");
}
doc.removeField(SNAPSHOTS_ID);
for (IndexableField f : doc) {
for (StorableField f : doc) {
snapshots.put(f.name(), f.stringValue());
}
} else if (numDocs != 0) {
@ -184,14 +186,12 @@ public class PersistentSnapshotDeletionPolicy extends SnapshotDeletionPolicy {
private void persistSnapshotInfos(String id, String segment) throws IOException {
writer.deleteAll();
Document d = new Document();
FieldType ft = new FieldType();
ft.setStored(true);
d.add(new Field(SNAPSHOTS_ID, "", ft));
d.add(new StoredField(SNAPSHOTS_ID, ""));
for (Entry<String, String> e : super.getSnapshots().entrySet()) {
d.add(new Field(e.getKey(), e.getValue(), ft));
d.add(new StoredField(e.getKey(), e.getValue()));
}
if (id != null) {
d.add(new Field(id, segment, ft));
d.add(new StoredField(id, segment));
}
writer.addDocument(d);
writer.commit();

View File

@ -0,0 +1,44 @@
package org.apache.lucene.index;
import java.io.Reader;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.util.BytesRef;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public interface StorableField {
/** Field name */
public String name();
/** Field type */
public FieldType fieldType();
/** Non-null if this field has a binary value */
public BytesRef binaryValue();
/** Non-null if this field has a string value */
public String stringValue();
/** Non-null if this field has a Reader value */
public Reader readerValue();
/** Non-null if this field has a numeric value */
public Number numericValue();
}

View File

@ -0,0 +1,25 @@
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public interface StorableFieldType {
/** DocValues type; if non-null then the field's value
* will be indexed into docValues */
public DocValues.Type docValueType();
}

View File

@ -44,12 +44,12 @@ final class StoredFieldsConsumer {
}
private int numStoredFields;
private IndexableField[] storedFields;
private StorableField[] storedFields;
private FieldInfo[] fieldInfos;
public void reset() {
numStoredFields = 0;
storedFields = new IndexableField[1];
storedFields = new StorableField[1];
fieldInfos = new FieldInfo[1];
}
@ -126,10 +126,10 @@ final class StoredFieldsConsumer {
assert docWriter.writer.testPoint("StoredFieldsWriter.finishDocument end");
}
public void addField(IndexableField field, FieldInfo fieldInfo) {
public void addField(StorableField field, FieldInfo fieldInfo) {
if (numStoredFields == storedFields.length) {
int newSize = ArrayUtil.oversize(numStoredFields + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
IndexableField[] newArray = new IndexableField[newSize];
StorableField[] newArray = new StorableField[newSize];
System.arraycopy(storedFields, 0, newArray, 0, numStoredFields);
storedFields = newArray;

View File

@ -61,7 +61,7 @@ final class TermVectorsConsumerPerField extends TermsHashConsumerPerField {
for(int i=0;i<count;i++) {
IndexableField field = fields[i];
if (field.fieldType().indexed() && field.fieldType().storeTermVectors()) {
if (field.fieldType().storeTermVectors()) {
doVectors = true;
doVectorPositions |= field.fieldType().storeTermVectorPositions();
doVectorOffsets |= field.fieldType().storeTermVectorOffsets();

View File

@ -33,6 +33,7 @@ import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.StoredDocument;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DirectoryReader; // javadocs
import org.apache.lucene.index.IndexReader;
@ -181,7 +182,7 @@ public class IndexSearcher {
}
/** Sugar for <code>.getIndexReader().document(docID)</code> */
public Document doc(int docID) throws IOException {
public StoredDocument doc(int docID) throws IOException {
return reader.document(docID);
}
@ -191,7 +192,7 @@ public class IndexSearcher {
}
/** Sugar for <code>.getIndexReader().document(docID, fieldsToLoad)</code> */
public final Document document(int docID, Set<String> fieldsToLoad) throws IOException {
public final StoredDocument document(int docID, Set<String> fieldsToLoad) throws IOException {
return reader.document(docID, fieldsToLoad);
}

View File

@ -27,6 +27,7 @@ import java.util.concurrent.locks.ReentrantLock;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexDocument;
import org.apache.lucene.index.IndexReader; // javadocs
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexableField;
@ -153,25 +154,25 @@ public class NRTManager extends ReferenceManager<IndexSearcher> {
this.writer = writer;
}
public long updateDocument(Term t, Iterable<? extends IndexableField> d, Analyzer a) throws IOException {
public long updateDocument(Term t, IndexDocument d, Analyzer a) throws IOException {
writer.updateDocument(t, d, a);
// Return gen as of when indexing finished:
return indexingGen.get();
}
public long updateDocument(Term t, Iterable<? extends IndexableField> d) throws IOException {
public long updateDocument(Term t, IndexDocument d) throws IOException {
writer.updateDocument(t, d);
// Return gen as of when indexing finished:
return indexingGen.get();
}
public long updateDocuments(Term t, Iterable<? extends Iterable<? extends IndexableField>> docs, Analyzer a) throws IOException {
public long updateDocuments(Term t, Iterable<? extends IndexDocument> docs, Analyzer a) throws IOException {
writer.updateDocuments(t, docs, a);
// Return gen as of when indexing finished:
return indexingGen.get();
}
public long updateDocuments(Term t, Iterable<? extends Iterable<? extends IndexableField>> docs) throws IOException {
public long updateDocuments(Term t, Iterable<? extends IndexDocument> docs) throws IOException {
writer.updateDocuments(t, docs);
// Return gen as of when indexing finished:
return indexingGen.get();
@ -207,25 +208,25 @@ public class NRTManager extends ReferenceManager<IndexSearcher> {
return indexingGen.get();
}
public long addDocument(Iterable<? extends IndexableField> d, Analyzer a) throws IOException {
public long addDocument(IndexDocument d, Analyzer a) throws IOException {
writer.addDocument(d, a);
// Return gen as of when indexing finished:
return indexingGen.get();
}
public long addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs, Analyzer a) throws IOException {
public long addDocuments(Iterable<? extends IndexDocument> docs, Analyzer a) throws IOException {
writer.addDocuments(docs, a);
// Return gen as of when indexing finished:
return indexingGen.get();
}
public long addDocument(Iterable<? extends IndexableField> d) throws IOException {
public long addDocument(IndexDocument d) throws IOException {
writer.addDocument(d);
// Return gen as of when indexing finished:
return indexingGen.get();
}
public long addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs) throws IOException {
public long addDocuments(Iterable<? extends IndexDocument> docs) throws IOException {
writer.addDocuments(docs);
// Return gen as of when indexing finished:
return indexingGen.get();

View File

@ -0,0 +1,68 @@
package org.apache.lucene.util;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
public abstract class FilterIterator<T, U extends T> implements Iterator<T> {
private Iterator<U> iterator;
private T next = null;
private boolean nextIsSet = false;
protected abstract boolean predicateFunction(U field);
public FilterIterator(Iterator<U> baseIterator) {
this.iterator = baseIterator;
}
public boolean hasNext() {
if (nextIsSet) {
return true;
} else {
return setNext();
}
}
public T next() {
if (!nextIsSet) {
if (!setNext()) {
throw new NoSuchElementException();
}
}
nextIsSet = false;
return next;
}
public void remove() {
throw new UnsupportedOperationException();
}
private boolean setNext() {
while (iterator.hasNext()) {
U object = iterator.next();
if (predicateFunction(object)) {
next = object;
nextIsSet = true;
return true;
}
}
return false;
}
}