LUCENE-3108: removed legacy code, fixed minor javadoc issues and renamed Type to ValueType

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/docvalues@1124144 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Simon Willnauer 2011-05-18 09:38:17 +00:00
parent 8603fac79a
commit 153475ad31
28 changed files with 185 additions and 273 deletions

View File

@ -20,7 +20,7 @@ import org.apache.lucene.search.spans.SpanQuery; // for javadocs
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.values.PerDocFieldValues;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.util.StringHelper; // for javadocs
@ -306,7 +306,7 @@ public abstract class AbstractField implements Fieldable {
return docValues != null;
}
public Type docValuesType() {
public ValueType docValuesType() {
return docValues == null? null : docValues.type();
}
}

View File

@ -24,7 +24,7 @@ import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.values.PerDocFieldValues;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.util.BytesRef;
/**
@ -47,7 +47,7 @@ import org.apache.lucene.util.BytesRef;
*
* for(all documents) {
* ...
* field.setIntValue(value)
* field.setInt(value)
* writer.addDocument(document);
* ...
* }
@ -66,7 +66,7 @@ import org.apache.lucene.util.BytesRef;
* field.set(indexedField);
* for(all documents) {
* ...
* field.setIntValue(value)
* field.setInt(value)
* writer.addDocument(document);
* ...
* }
@ -78,7 +78,7 @@ public class DocValuesField extends AbstractField implements PerDocFieldValues {
protected BytesRef bytes;
protected double doubleValue;
protected long longValue;
protected Type type;
protected ValueType type;
protected Comparator<BytesRef> bytesComparator;
/**
@ -97,60 +97,60 @@ public class DocValuesField extends AbstractField implements PerDocFieldValues {
}
/**
* Sets the given <code>long</code> value and sets the field's {@link Type} to
* {@link Type#INTS} unless already set. If you want to change the
* default type use {@link #setType(Type)}.
* Sets the given <code>long</code> value and sets the field's {@link ValueType} to
* {@link ValueType#INTS} unless already set. If you want to change the
* default type use {@link #setType(ValueType)}.
*/
public void setInt(long value) {
if (type == null) {
type = Type.INTS;
type = ValueType.INTS;
}
longValue = value;
}
/**
* Sets the given <code>float</code> value and sets the field's {@link Type}
* to {@link Type#FLOAT_32} unless already set. If you want to
* change the type use {@link #setType(Type)}.
* Sets the given <code>float</code> value and sets the field's {@link ValueType}
* to {@link ValueType#FLOAT_32} unless already set. If you want to
* change the type use {@link #setType(ValueType)}.
*/
public void setFloat(float value) {
if (type == null) {
type = Type.FLOAT_32;
type = ValueType.FLOAT_32;
}
doubleValue = value;
}
/**
* Sets the given <code>double</code> value and sets the field's {@link Type}
* to {@link Type#FLOAT_64} unless already set. If you want to
* change the default type use {@link #setType(Type)}.
* Sets the given <code>double</code> value and sets the field's {@link ValueType}
* to {@link ValueType#FLOAT_64} unless already set. If you want to
* change the default type use {@link #setType(ValueType)}.
*/
public void setFloat(double value) {
if (type == null) {
type = Type.FLOAT_64;
type = ValueType.FLOAT_64;
}
doubleValue = value;
}
/**
* Sets the given {@link BytesRef} value and the field's {@link Type}. The
* Sets the given {@link BytesRef} value and the field's {@link ValueType}. The
* comparator for this field is set to <code>null</code>. If a
* <code>null</code> comparator is set the default comparator for the given
* {@link Type} is used.
* {@link ValueType} is used.
*/
public void setBytes(BytesRef value, Type type) {
public void setBytes(BytesRef value, ValueType type) {
setBytes(value, type, null);
}
/**
* Sets the given {@link BytesRef} value, the field's {@link Type} and the
* Sets the given {@link BytesRef} value, the field's {@link ValueType} and the
* field's comparator. If the {@link Comparator} is set to <code>null</code>
* the default for the given {@link Type} is used instead.
* the default for the given {@link ValueType} is used instead.
*
* @throws IllegalArgumentException
* if the value or the type are null
*/
public void setBytes(BytesRef value, Type type, Comparator<BytesRef> comp) {
public void setBytes(BytesRef value, ValueType type, Comparator<BytesRef> comp) {
if (value == null) {
throw new IllegalArgumentException("value must not be null");
}
@ -193,16 +193,16 @@ public class DocValuesField extends AbstractField implements PerDocFieldValues {
/**
* Sets the {@link BytesRef} comparator for this field. If the field has a
* numeric {@link Type} the comparator will be ignored.
* numeric {@link ValueType} the comparator will be ignored.
*/
public void setBytesComparator(Comparator<BytesRef> comp) {
this.bytesComparator = comp;
}
/**
* Sets the {@link Type} for this field.
* Sets the {@link ValueType} for this field.
*/
public void setType(Type type) {
public void setType(ValueType type) {
if (type == null) {
throw new IllegalArgumentException("Type must not be null");
}
@ -210,9 +210,9 @@ public class DocValuesField extends AbstractField implements PerDocFieldValues {
}
/**
* Returns the field's {@link Type}
* Returns the field's {@link ValueType}
*/
public Type type() {
public ValueType type() {
return type;
}
@ -252,7 +252,7 @@ public class DocValuesField extends AbstractField implements PerDocFieldValues {
* given type and returns it.
*
*/
public static <T extends AbstractField> T set(T field, Type type) {
public static <T extends AbstractField> T set(T field, ValueType type) {
if (field instanceof DocValuesField)
return field;
final DocValuesField valField = new DocValuesField();

View File

@ -20,7 +20,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.FieldInvertState; // for javadocs
import org.apache.lucene.index.values.DocValues;
import org.apache.lucene.index.values.PerDocFieldValues;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.search.PhraseQuery; // for javadocs
import org.apache.lucene.search.spans.SpanQuery; // for javadocs
@ -230,8 +230,8 @@ public interface Fieldable {
public boolean hasDocValues();
/**
* Returns the {@link Type} of the set {@link PerDocFieldValues} or
* Returns the {@link ValueType} of the set {@link PerDocFieldValues} or
* <code>null</code> if not set.
*/
public Type docValuesType();
public ValueType docValuesType();
}

View File

@ -1,6 +1,6 @@
package org.apache.lucene.index;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.index.values.ValueType;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -26,7 +26,7 @@ public final class FieldInfo {
public final int number;
public boolean isIndexed;
Type docValues;
ValueType docValues;
// true if term vector for this field should be stored
@ -42,7 +42,7 @@ public final class FieldInfo {
FieldInfo(String na, boolean tk, int nu, boolean storeTermVector,
boolean storePositionWithTermVector, boolean storeOffsetWithTermVector,
boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions, Type docValues) {
boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions, ValueType docValues) {
name = na;
isIndexed = tk;
number = nu;
@ -112,7 +112,7 @@ public final class FieldInfo {
}
assert !this.omitTermFreqAndPositions || !this.storePayloads;
}
void setDocValues(Type v) {
void setDocValues(ValueType v) {
if (docValues == null) {
docValues = v;
}
@ -122,7 +122,7 @@ public final class FieldInfo {
return docValues != null;
}
public Type getDocValues() {
public ValueType getDocValues() {
return docValues;
}

View File

@ -31,7 +31,7 @@ import java.util.Map.Entry;
import org.apache.lucene.index.SegmentCodecs; // Required for Java 1.5 javadocs
import org.apache.lucene.index.SegmentCodecs.SegmentCodecsBuilder;
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
@ -433,14 +433,14 @@ public final class FieldInfos implements Iterable<FieldInfo> {
*/
synchronized public FieldInfo addOrUpdate(String name, boolean isIndexed, boolean storeTermVector,
boolean storePositionWithTermVector, boolean storeOffsetWithTermVector,
boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions, Type docValues) {
boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions, ValueType docValues) {
return addOrUpdateInternal(name, -1, isIndexed, storeTermVector, storePositionWithTermVector,
storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions, docValues);
}
synchronized private FieldInfo addOrUpdateInternal(String name, int preferredFieldNumber, boolean isIndexed,
boolean storeTermVector, boolean storePositionWithTermVector, boolean storeOffsetWithTermVector,
boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions, Type docValues) {
boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions, ValueType docValues) {
if (globalFieldNumbers == null) {
throw new IllegalStateException("FieldInfos are read-only, create a new instance with a global field map to make modifications to FieldInfos");
}
@ -473,7 +473,7 @@ public final class FieldInfos implements Iterable<FieldInfo> {
*/
private FieldInfo addInternal(String name, int fieldNumber, boolean isIndexed,
boolean storeTermVector, boolean storePositionWithTermVector,
boolean storeOffsetWithTermVector, boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions, Type docValuesType) {
boolean storeOffsetWithTermVector, boolean omitNorms, boolean storePayloads, boolean omitTermFreqAndPositions, ValueType docValuesType) {
// don't check modifiable here since we use that to initially build up FIs
name = StringHelper.intern(name);
if (globalFieldNumbers != null) {
@ -680,7 +680,7 @@ public final class FieldInfos implements Iterable<FieldInfo> {
}
hasVectors |= storeTermVector;
hasProx |= isIndexed && !omitTermFreqAndPositions;
Type docValuesType = null;
ValueType docValuesType = null;
if (format <= FORMAT_INDEX_VALUES) {
final byte b = input.readByte();
switch(b) {
@ -688,31 +688,31 @@ public final class FieldInfos implements Iterable<FieldInfo> {
docValuesType = null;
break;
case 1:
docValuesType = Type.INTS;
docValuesType = ValueType.INTS;
break;
case 2:
docValuesType = Type.FLOAT_32;
docValuesType = ValueType.FLOAT_32;
break;
case 3:
docValuesType = Type.FLOAT_64;
docValuesType = ValueType.FLOAT_64;
break;
case 4:
docValuesType = Type.BYTES_FIXED_STRAIGHT;
docValuesType = ValueType.BYTES_FIXED_STRAIGHT;
break;
case 5:
docValuesType = Type.BYTES_FIXED_DEREF;
docValuesType = ValueType.BYTES_FIXED_DEREF;
break;
case 6:
docValuesType = Type.BYTES_FIXED_SORTED;
docValuesType = ValueType.BYTES_FIXED_SORTED;
break;
case 7:
docValuesType = Type.BYTES_VAR_STRAIGHT;
docValuesType = ValueType.BYTES_VAR_STRAIGHT;
break;
case 8:
docValuesType = Type.BYTES_VAR_DEREF;
docValuesType = ValueType.BYTES_VAR_DEREF;
break;
case 9:
docValuesType = Type.BYTES_VAR_SORTED;
docValuesType = ValueType.BYTES_VAR_SORTED;
break;
default:
throw new IllegalStateException("unhandled indexValues type " + b);

View File

@ -24,7 +24,7 @@ import java.util.ArrayList;
import org.apache.lucene.index.values.DocValues;
import org.apache.lucene.index.values.MultiDocValues;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.index.values.MultiDocValues.DocValuesIndex;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.lucene.util.ReaderUtil;

View File

@ -26,7 +26,7 @@ import java.util.concurrent.ConcurrentHashMap;
import org.apache.lucene.index.codecs.PerDocValues;
import org.apache.lucene.index.values.DocValues;
import org.apache.lucene.index.values.MultiDocValues;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.index.values.MultiDocValues.DocValuesIndex;
import org.apache.lucene.util.ReaderUtil;
import org.apache.lucene.util.ReaderUtil.Gather;
@ -117,7 +117,7 @@ public class MultiPerDocValues extends PerDocValues {
// create & add to docValues:
final List<MultiDocValues.DocValuesIndex> docValuesIndex = new ArrayList<MultiDocValues.DocValuesIndex>();
int docsUpto = 0;
Type type = null;
ValueType type = null;
// Gather all sub-readers that share this field
for (int i = 0; i < subs.length; i++) {
DocValues values = subs[i].docValues(field);

View File

@ -28,7 +28,7 @@ import org.apache.lucene.index.values.Bytes;
import org.apache.lucene.index.values.DocValues;
import org.apache.lucene.index.values.Floats;
import org.apache.lucene.index.values.Ints;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.store.Directory;
/**
@ -86,8 +86,8 @@ public class DefaultDocValuesProducer extends PerDocValues {
/**
* Loads a {@link DocValues} instance depending on the given {@link Type}.
* Codecs that use different implementations for a certain {@link Type} can
* Loads a {@link DocValues} instance depending on the given {@link ValueType}.
* Codecs that use different implementations for a certain {@link ValueType} can
* simply override this method and return their custom implementations.
*
* @param docCount
@ -102,10 +102,10 @@ public class DefaultDocValuesProducer extends PerDocValues {
* @throws IOException
* if an {@link IOException} occurs
* @throws IllegalArgumentException
* if the given {@link Type} is not supported
* if the given {@link ValueType} is not supported
*/
protected DocValues loadDocValues(int docCount, Directory dir, String id,
Type type) throws IOException {
ValueType type) throws IOException {
switch (type) {
case INTS:
return Ints.getValues(dir, id, false);

View File

@ -126,9 +126,9 @@ public abstract class DocValues implements Closeable {
}
/**
* Returns the {@link Type} of this {@link DocValues} instance
* Returns the {@link ValueType} of this {@link DocValues} instance
*/
public abstract Type type();
public abstract ValueType type();
/**
* Closes this {@link DocValues} instance. This method should only be called
@ -164,11 +164,11 @@ public abstract class DocValues implements Closeable {
/**
* Source of per document values like long, double or {@link BytesRef}
* depending on the {@link DocValues} fields {@link Type}. Source
* depending on the {@link DocValues} fields {@link ValueType}. Source
* implementations provide random access semantics similar to array lookups
* and typically are entirely memory resident.
* <p>
* {@link Source} defines 3 {@link Type} //TODO finish this
* {@link Source} defines 3 {@link ValueType} //TODO finish this
*/
public static abstract class Source {
// TODO we might need a close method here to null out the internal used arrays?!
@ -243,11 +243,11 @@ public abstract class DocValues implements Closeable {
}
/**
* Returns the {@link Type} of this source.
* Returns the {@link ValueType} of this source.
*
* @return the {@link Type} of this source.
* @return the {@link ValueType} of this source.
*/
public abstract Type type();
public abstract ValueType type();
/**
* Returns a {@link DocValuesEnum} for this source which uses the given
@ -272,13 +272,13 @@ public abstract class DocValues implements Closeable {
* @param attrs
* the {@link AttributeSource} for this enum
* @param type
* the enums {@link Type}
* the enums {@link ValueType}
* @param source
* the source this enum operates on
* @param numDocs
* the number of documents within the source
*/
protected SourceEnum(AttributeSource attrs, Type type, Source source,
protected SourceEnum(AttributeSource attrs, ValueType type, Source source,
int numDocs) {
super(attrs, type);
this.source = source;
@ -361,7 +361,7 @@ public abstract class DocValues implements Closeable {
* {@link MissingValue} is used by {@link Source} implementations to define an
* Implementation dependent value for documents that had no value assigned
* during indexing. Its purpose is similar to a default value but since the a
* missing value across {@link Type} and its implementations can be highly
* missing value across {@link ValueType} and its implementations can be highly
* dynamic the actual values are not constant but defined per {@link Source}
* through the {@link MissingValue} struct. The actual value used to indicate
* a missing value can even changed within the same field from one segment to

View File

@ -27,23 +27,23 @@ import org.apache.lucene.util.LongsRef;
/**
* {@link DocValuesEnum} is a {@link DocIdSetIterator} iterating <tt>byte[]</tt>
* , <tt>long</tt> and <tt>double</tt> stored per document. Depending on the
* enum's {@link Type} ({@link #type()}) the enum might skip over documents that
* have no value stored. Types like {@link Type#BYTES_VAR_STRAIGHT} might not
* enum's {@link ValueType} ({@link #type()}) the enum might skip over documents that
* have no value stored. Types like {@link ValueType#BYTES_VAR_STRAIGHT} might not
* skip over documents even if there is no value associated with a document. The
* value for document without values again depends on the types implementation
* although a reference for a {@link Type} returned from a accessor method
* although a reference for a {@link ValueType} returned from a accessor method
* {@link #getFloat()}, {@link #getInt()} or {@link #bytes()} will never be
* <code>null</code> even if a document has no value.
* <p>
* Note: Only the reference for the enum's type are initialized to non
* <code>null</code> ie. {@link #getInt()} will always return <code>null</code>
* if the enum's Type is {@link Type#FLOAT_32}.
* if the enum's Type is {@link ValueType#FLOAT_32}.
*
* @lucene.experimental
*/
public abstract class DocValuesEnum extends DocIdSetIterator {
private AttributeSource source;
private final Type enumType;
private final ValueType enumType;
protected BytesRef bytesRef;
protected FloatsRef floatsRef;
protected LongsRef intsRef;
@ -52,14 +52,14 @@ public abstract class DocValuesEnum extends DocIdSetIterator {
* Creates a new {@link DocValuesEnum} for the given type. The
* {@link AttributeSource} for this enum is set to <code>null</code>
*/
protected DocValuesEnum(Type enumType) {
protected DocValuesEnum(ValueType enumType) {
this(null, enumType);
}
/**
* Creates a new {@link DocValuesEnum} for the given type.
*/
protected DocValuesEnum(AttributeSource source, Type enumType) {
protected DocValuesEnum(AttributeSource source, ValueType enumType) {
this.source = source;
this.enumType = enumType;
switch (enumType) {
@ -84,7 +84,7 @@ public abstract class DocValuesEnum extends DocIdSetIterator {
/**
* Returns the type of this enum
*/
public Type type() {
public ValueType type() {
return enumType;
}
@ -144,9 +144,9 @@ public abstract class DocValuesEnum extends DocIdSetIterator {
public abstract void close() throws IOException;
/**
* Returns an empty {@link DocValuesEnum} for the given {@link Type}.
* Returns an empty {@link DocValuesEnum} for the given {@link ValueType}.
*/
public static DocValuesEnum emptyEnum(Type type) {
public static DocValuesEnum emptyEnum(ValueType type) {
return new DocValuesEnum(type) {
@Override
public int nextDoc() throws IOException {

View File

@ -173,8 +173,8 @@ class FixedDerefBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_FIXED_DEREF;
public ValueType type() {
return ValueType.BYTES_FIXED_DEREF;
}
@Override
@ -198,11 +198,11 @@ class FixedDerefBytesImpl {
public DerefBytesEnum(AttributeSource source, IndexInput datIn,
IndexInput idxIn, int size) throws IOException {
this(source, datIn, idxIn, size, Type.BYTES_FIXED_DEREF);
this(source, datIn, idxIn, size, ValueType.BYTES_FIXED_DEREF);
}
protected DerefBytesEnum(AttributeSource source, IndexInput datIn,
IndexInput idxIn, int size, Type enumType) throws IOException {
IndexInput idxIn, int size, ValueType enumType) throws IOException {
super(source, enumType);
this.datIn = datIn;
this.size = size;
@ -268,8 +268,8 @@ class FixedDerefBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_FIXED_DEREF;
public ValueType type() {
return ValueType.BYTES_FIXED_DEREF;
}
}

View File

@ -218,8 +218,8 @@ class FixedSortedBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_FIXED_SORTED;
public ValueType type() {
return ValueType.BYTES_FIXED_SORTED;
}
@Override
@ -235,8 +235,8 @@ class FixedSortedBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_FIXED_SORTED;
public ValueType type() {
return ValueType.BYTES_FIXED_SORTED;
}
}
}

View File

@ -164,8 +164,8 @@ class FixedStraightBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_FIXED_STRAIGHT;
public ValueType type() {
return ValueType.BYTES_FIXED_STRAIGHT;
}
@Override
@ -188,7 +188,7 @@ class FixedStraightBytesImpl {
public FixedStraightBytesEnum(AttributeSource source, IndexInput datIn,
int size, int maxDoc) throws IOException {
super(source, Type.BYTES_FIXED_STRAIGHT);
super(source, ValueType.BYTES_FIXED_STRAIGHT);
this.datIn = datIn;
this.size = size;
this.maxDoc = maxDoc;
@ -237,8 +237,8 @@ class FixedStraightBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_FIXED_STRAIGHT;
public ValueType type() {
return ValueType.BYTES_FIXED_STRAIGHT;
}
}
}

View File

@ -269,7 +269,12 @@ public class Floats {
*/
@Override
public Source load() throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(precisionBytes * maxDoc);
/*
* the allocated byteBuffer always uses BIG_ENDIAN here
* and since the writer uses DataOutput#writeInt() / writeLong()
* we can allways assume BIGE_ENDIAN
*/
final ByteBuffer buffer = ByteBuffer.allocate(precisionBytes * maxDoc);
IndexInput indexInput = (IndexInput) datIn.clone();
indexInput.seek(CodecUtil.headerLength(CODEC_NAME));
// skip precision:
@ -297,7 +302,7 @@ public class Floats {
public DocValuesEnum getEnum(AttributeSource attrSource)
throws IOException {
final MissingValue missing = getMissing();
return new SourceEnum(attrSource, Type.FLOAT_32, this, maxDoc) {
return new SourceEnum(attrSource, ValueType.FLOAT_32, this, maxDoc) {
@Override
public int advance(int target) throws IOException {
if (target >= numDocs)
@ -314,8 +319,8 @@ public class Floats {
}
@Override
public Type type() {
return Type.FLOAT_32;
public ValueType type() {
return ValueType.FLOAT_32;
}
}
@ -354,8 +359,8 @@ public class Floats {
}
@Override
public Type type() {
return Type.FLOAT_64;
public ValueType type() {
return ValueType.FLOAT_64;
}
}
@ -376,9 +381,9 @@ public class Floats {
}
@Override
public Type type() {
return precisionBytes == 4 ? Type.FLOAT_32
: Type.FLOAT_64;
public ValueType type() {
return precisionBytes == 4 ? ValueType.FLOAT_32
: ValueType.FLOAT_64;
}
}
@ -386,7 +391,7 @@ public class Floats {
Floats4Enum(AttributeSource source, IndexInput dataIn, int maxDoc)
throws IOException {
super(source, dataIn, 4, maxDoc, Type.FLOAT_32);
super(source, dataIn, 4, maxDoc, ValueType.FLOAT_32);
}
@Override
@ -422,7 +427,7 @@ public class Floats {
Floats8EnumImpl(AttributeSource source, IndexInput dataIn, int maxDoc)
throws IOException {
super(source, dataIn, 8, maxDoc, Type.FLOAT_64);
super(source, dataIn, 8, maxDoc, ValueType.FLOAT_64);
}
@Override
@ -463,9 +468,9 @@ public class Floats {
protected final long fp;
FloatsEnumImpl(AttributeSource source, IndexInput dataIn, int precision,
int maxDoc, Type type) throws IOException {
super(source, precision == 4 ? Type.FLOAT_32
: Type.FLOAT_64);
int maxDoc, ValueType type) throws IOException {
super(source, precision == 4 ? ValueType.FLOAT_32
: ValueType.FLOAT_64);
this.dataIn = dataIn;
this.precision = precision;
this.maxDoc = maxDoc;

View File

@ -81,7 +81,7 @@ public class MultiDocValues extends DocValues {
final int maxDoc;
final Source emptySoruce;
public DummyDocValues(int maxDoc, Type type) {
public DummyDocValues(int maxDoc, ValueType type) {
this.maxDoc = maxDoc;
this.emptySoruce = new EmptySource(type);
}
@ -97,7 +97,7 @@ public class MultiDocValues extends DocValues {
}
@Override
public Type type() {
public ValueType type() {
return emptySoruce.type();
}
@ -228,16 +228,16 @@ public class MultiDocValues extends DocValues {
}
@Override
public Type type() {
public ValueType type() {
return docValuesIdx[0].docValues.type();
}
}
private static class EmptySource extends Source {
private final Type type;
private final ValueType type;
public EmptySource(Type type) {
public EmptySource(ValueType type) {
this.type = type;
}
@ -263,13 +263,13 @@ public class MultiDocValues extends DocValues {
}
@Override
public Type type() {
public ValueType type() {
return type;
}
}
@Override
public Type type() {
public ValueType type() {
return this.docValuesIdx[0].docValues.type();
}
}

View File

@ -223,8 +223,8 @@ class PackedIntsImpl {
}
@Override
public Type type() {
return Type.INTS;
public ValueType type() {
return ValueType.INTS;
}
}
@ -240,8 +240,8 @@ class PackedIntsImpl {
}
@Override
public Type type() {
return Type.INTS;
public ValueType type() {
return ValueType.INTS;
}
}
@ -256,7 +256,7 @@ class PackedIntsImpl {
private IntsEnumImpl(AttributeSource source, IndexInput dataIn)
throws IOException {
super(source, Type.INTS);
super(source, ValueType.INTS);
intsRef.offset = 0;
this.dataIn = dataIn;
dataIn.seek(CodecUtil.headerLength(CODEC_NAME));

View File

@ -48,19 +48,19 @@ public interface PerDocFieldValues {
public void setFloat(double value);
/**
* Sets the given {@link BytesRef} value and the field's {@link Type}. The
* Sets the given {@link BytesRef} value and the field's {@link ValueType}. The
* comparator for this field is set to <code>null</code>. If a
* <code>null</code> comparator is set the default comparator for the given
* {@link Type} is used.
* {@link ValueType} is used.
*/
public void setBytes(BytesRef value, Type type);
public void setBytes(BytesRef value, ValueType type);
/**
* Sets the given {@link BytesRef} value, the field's {@link Type} and the
* Sets the given {@link BytesRef} value, the field's {@link ValueType} and the
* field's comparator. If the {@link Comparator} is set to <code>null</code>
* the default for the given {@link Type} is used instead.
* the default for the given {@link ValueType} is used instead.
*/
public void setBytes(BytesRef value, Type type, Comparator<BytesRef> comp);
public void setBytes(BytesRef value, ValueType type, Comparator<BytesRef> comp);
/**
* Returns the set {@link BytesRef} or <code>null</code> if not set.
@ -84,18 +84,18 @@ public interface PerDocFieldValues {
/**
* Sets the {@link BytesRef} comparator for this field. If the field has a
* numeric {@link Type} the comparator will be ignored.
* numeric {@link ValueType} the comparator will be ignored.
*/
public void setBytesComparator(Comparator<BytesRef> comp);
/**
* Sets the {@link Type}
* Sets the {@link ValueType}
*/
public void setType(Type type);
public void setType(ValueType type);
/**
* Returns the {@link Type}
* Returns the {@link ValueType}
*/
public Type type();
public ValueType type();
}

View File

@ -22,8 +22,8 @@ import org.apache.lucene.index.codecs.FieldsConsumer;
import org.apache.lucene.index.values.DocValues.SortedSource;
/**
* {@link Type} specifies the type of the {@link DocValues} for a certain field.
* A {@link Type} only defines the data type for a field while the actual
* {@link ValueType} specifies the type of the {@link DocValues} for a certain field.
* A {@link ValueType} only defines the data type for a field while the actual
* implemenation used to encode and decode the values depends on the field's
* {@link Codec}. It is up to the {@link Codec} implementing
* {@link FieldsConsumer#addValuesField(org.apache.lucene.index.FieldInfo)} and
@ -32,7 +32,7 @@ import org.apache.lucene.index.values.DocValues.SortedSource;
*
* @lucene.experimental
*/
public enum Type {
public enum ValueType {
/*
* TODO: Add INT_32 INT_64 INT_16 & INT_8?!
*/

View File

@ -237,8 +237,8 @@ class VarDerefBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_VAR_DEREF;
public ValueType type() {
return ValueType.BYTES_VAR_DEREF;
}
@Override
@ -256,7 +256,7 @@ class VarDerefBytesImpl {
public VarDerefBytesEnum(AttributeSource source, IndexInput datIn,
IndexInput idxIn) throws IOException {
super(source, datIn, idxIn, -1, Type.BYTES_VAR_DEREF);
super(source, datIn, idxIn, -1, ValueType.BYTES_VAR_DEREF);
}
@Override
@ -279,8 +279,8 @@ class VarDerefBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_VAR_DEREF;
public ValueType type() {
return ValueType.BYTES_VAR_DEREF;
}
}
}

View File

@ -219,8 +219,8 @@ class VarSortedBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_VAR_SORTED;
public ValueType type() {
return ValueType.BYTES_VAR_SORTED;
}
@Override
@ -247,7 +247,7 @@ class VarSortedBytesImpl {
protected VarSortedBytesEnum(AttributeSource source, IndexInput datIn,
IndexInput idxIn) throws IOException {
super(source, Type.BYTES_VAR_SORTED);
super(source, ValueType.BYTES_VAR_SORTED);
totBytes = idxIn.readLong();
// keep that in memory to prevent lots of disk seeks
docToOrdIndex = PackedInts.getReader(idxIn);
@ -308,8 +308,8 @@ class VarSortedBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_VAR_SORTED;
public ValueType type() {
return ValueType.BYTES_VAR_SORTED;
}
}
}

View File

@ -154,8 +154,8 @@ class VarStraightBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_VAR_STRAIGHT;
public ValueType type() {
return ValueType.BYTES_VAR_STRAIGHT;
}
@Override
@ -179,7 +179,7 @@ class VarStraightBytesImpl {
protected VarStraightBytesEnum(AttributeSource source, IndexInput datIn,
IndexInput idxIn) throws IOException {
super(source, Type.BYTES_VAR_STRAIGHT);
super(source, ValueType.BYTES_VAR_STRAIGHT);
totBytes = idxIn.readVLong();
fp = datIn.getFilePointer();
addresses = PackedInts.getReader(idxIn);
@ -227,8 +227,8 @@ class VarStraightBytesImpl {
}
@Override
public Type type() {
return Type.BYTES_VAR_STRAIGHT;
public ValueType type() {
return ValueType.BYTES_VAR_STRAIGHT;
}
}
}

View File

@ -173,10 +173,10 @@ public abstract class Writer extends DocValuesConsumer {
/**
* Factory method to create a {@link Writer} instance for a given type. This
* method returns default implementations for each of the different types
* defined in the {@link Type} enumeration.
* defined in the {@link ValueType} enumeration.
*
* @param type
* the {@link Type} to create the {@link Writer} for
* the {@link ValueType} to create the {@link Writer} for
* @param id
* the file name id used to create files within the writer.
* @param directory
@ -188,10 +188,10 @@ public abstract class Writer extends DocValuesConsumer {
* default.
* @param bytesUsed
* a byte-usage tracking reference
* @return a new {@link Writer} instance for the given {@link Type}
* @return a new {@link Writer} instance for the given {@link ValueType}
* @throws IOException
*/
public static Writer create(Type type, String id, Directory directory,
public static Writer create(ValueType type, String id, Directory directory,
Comparator<BytesRef> comp, AtomicLong bytesUsed) throws IOException {
if (comp == null) {
comp = BytesRef.getUTF8SortedAsUnicodeComparator();

View File

@ -18,7 +18,7 @@ package org.apache.lucene.util;
*/
/**
* Represents float[], as a slice (offset + length) into an existing float[].
* Represents double[], as a slice (offset + length) into an existing float[].
*
* @lucene.internal
*/

View File

@ -1,36 +0,0 @@
package org.apache.lucene.util;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Simple Pair
* @lucene.internal
*/
public class Pair<Cur,Cud> {
public final Cur cur;
public final Cud cud;
/**
* Create a simple pair
* @param cur the first element
* @param cud the second element
*/
public Pair(Cur cur, Cud cud) {
this.cur = cur;
this.cud = cud;
}
}

View File

@ -1,57 +0,0 @@
package org.apache.lucene.util;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.concurrent.atomic.AtomicLong;
/**
*
* @lucene.internal
*/
public abstract class ParallelArray<T extends ParallelArray<?>> {
public final int size;
protected final AtomicLong bytesUsed;
protected ParallelArray(final int size, AtomicLong bytesUsed) {
this.size = size;
this.bytesUsed = bytesUsed;
bytesUsed.addAndGet((size) * bytesPerEntry());
}
protected abstract int bytesPerEntry();
public AtomicLong bytesUsed() {
return bytesUsed;
}
public void deref() {
bytesUsed.addAndGet((-size) * bytesPerEntry());
}
public abstract T newInstance(int size);
public final T grow() {
int newSize = ArrayUtil.oversize(size + 1, bytesPerEntry());
T newArray = newInstance(newSize);
copyTo(newArray, size);
bytesUsed.addAndGet((newSize - size) * bytesPerEntry());
return newArray;
}
protected abstract void copyTo(T toArray, int numToCopy);
}

View File

@ -27,7 +27,7 @@ import org.apache.lucene.document.DocValuesField;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexWriter; // javadoc
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
@ -131,8 +131,8 @@ public class RandomIndexWriter implements Closeable {
private void randomPerDocFieldValues(Random random, Document doc) {
Type[] values = Type.values();
Type type = values[random.nextInt(values.length)];
ValueType[] values = ValueType.values();
ValueType type = values[random.nextInt(values.length)];
String name = "random_" + type.name() + "" + docValuesFieldPrefix;
if ("PreFlex".equals(codecProvider.getFieldCodec(name)) || doc.getFieldable(name) != null)
return;

View File

@ -119,7 +119,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
}
/**
* Tests complete indexing of {@link Type} including deletions, merging and
* Tests complete indexing of {@link ValueType} including deletions, merging and
* sparse value fields on Compound-File
*/
public void testIndexBytesNoDeletesCFS() throws IOException {
@ -139,7 +139,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
}
/**
* Tests complete indexing of {@link Type} including deletions, merging and
* Tests complete indexing of {@link ValueType} including deletions, merging and
* sparse value fields on None-Compound-File
*/
public void testIndexBytesNoDeletes() throws IOException {
@ -160,10 +160,10 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testAddIndexes() throws IOException {
int valuesPerIndex = 10;
List<Type> values = Arrays.asList(Type.values());
List<ValueType> values = Arrays.asList(ValueType.values());
Collections.shuffle(values, random);
Type first = values.get(0);
Type second = values.get(1);
ValueType first = values.get(0);
ValueType second = values.get(1);
String msg = "[first=" + first.name() + ", second=" + second.name() + "]";
// index first index
Directory d_1 = newDirectory();
@ -204,7 +204,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
DocValuesEnum vE_1_merged = getValuesEnum(getDocValues(merged, first.name()));
DocValuesEnum vE_2_merged = getValuesEnum(getDocValues(merged, second
.name()));
if (second == Type.BYTES_VAR_STRAIGHT || second == Type.BYTES_FIXED_STRAIGHT) {
if (second == ValueType.BYTES_VAR_STRAIGHT || second == ValueType.BYTES_FIXED_STRAIGHT) {
assertEquals(msg, valuesPerIndex-1, vE_2_merged.advance(valuesPerIndex-1));
}
for (int i = 0; i < valuesPerIndex; i++) {
@ -246,11 +246,11 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, cfg);
final int numValues = 179 + random.nextInt(151);
final List<Type> numVariantList = new ArrayList<Type>(NUMERICS);
final List<ValueType> numVariantList = new ArrayList<ValueType>(NUMERICS);
// run in random order to test if fill works correctly during merges
Collections.shuffle(numVariantList, random);
for (Type val : numVariantList) {
for (ValueType val : numVariantList) {
OpenBitSet deleted = indexValues(w, numValues, val, numVariantList,
withDeletions, 7);
List<Closeable> closeables = new ArrayList<Closeable>();
@ -337,11 +337,11 @@ public class TestDocValuesIndexing extends LuceneTestCase {
throws CorruptIndexException, LockObtainFailedException, IOException {
final Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, cfg);
final List<Type> byteVariantList = new ArrayList<Type>(BYTES);
final List<ValueType> byteVariantList = new ArrayList<ValueType>(BYTES);
// run in random order to test if fill works correctly during merges
Collections.shuffle(byteVariantList, random);
final int numValues = 179 + random.nextInt(151);
for (Type byteIndexValue : byteVariantList) {
for (ValueType byteIndexValue : byteVariantList) {
List<Closeable> closeables = new ArrayList<Closeable>();
int bytesSize = 7 + random.nextInt(128);
@ -488,19 +488,19 @@ public class TestDocValuesIndexing extends LuceneTestCase {
return valuesEnum;
}
private static EnumSet<Type> BYTES = EnumSet.of(Type.BYTES_FIXED_DEREF,
Type.BYTES_FIXED_SORTED, Type.BYTES_FIXED_STRAIGHT, Type.BYTES_VAR_DEREF,
Type.BYTES_VAR_SORTED, Type.BYTES_VAR_STRAIGHT);
private static EnumSet<ValueType> BYTES = EnumSet.of(ValueType.BYTES_FIXED_DEREF,
ValueType.BYTES_FIXED_SORTED, ValueType.BYTES_FIXED_STRAIGHT, ValueType.BYTES_VAR_DEREF,
ValueType.BYTES_VAR_SORTED, ValueType.BYTES_VAR_STRAIGHT);
private static EnumSet<Type> NUMERICS = EnumSet.of(Type.INTS,
Type.FLOAT_32, Type.FLOAT_64);
private static EnumSet<ValueType> NUMERICS = EnumSet.of(ValueType.INTS,
ValueType.FLOAT_32, ValueType.FLOAT_64);
private static Index[] IDX_VALUES = new Index[] { Index.ANALYZED,
Index.ANALYZED_NO_NORMS, Index.NOT_ANALYZED, Index.NOT_ANALYZED_NO_NORMS,
Index.NO };
private OpenBitSet indexValues(IndexWriter w, int numValues, Type value,
List<Type> valueVarList, boolean withDeletions, int multOfSeven)
private OpenBitSet indexValues(IndexWriter w, int numValues, ValueType value,
List<ValueType> valueVarList, boolean withDeletions, int multOfSeven)
throws CorruptIndexException, IOException {
final boolean isNumeric = NUMERICS.contains(value);
OpenBitSet deleted = new OpenBitSet(numValues);
@ -550,7 +550,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
if (i % 7 == 0) {
if (withDeletions && random.nextBoolean()) {
Type val = valueVarList.get(random.nextInt(1 + valueVarList
ValueType val = valueVarList.get(random.nextInt(1 + valueVarList
.indexOf(value)));
final int randInt = val == value ? random.nextInt(1 + i) : random
.nextInt(numValues);

View File

@ -37,7 +37,7 @@ import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.FieldValueHitQueue.Entry;
@ -124,13 +124,13 @@ public class TestSort extends LuceneTestCase {
doc.add (new Field ("contents", data[i][1], Field.Store.NO, Field.Index.ANALYZED));
if (data[i][2] != null) {
Field f = supportsDocValues ?
DocValuesField.set(new Field ("int", data[i][2], Field.Store.NO, Field.Index.NOT_ANALYZED), Type.INTS)
DocValuesField.set(new Field ("int", data[i][2], Field.Store.NO, Field.Index.NOT_ANALYZED), ValueType.INTS)
: new Field ("int", data[i][2], Field.Store.NO, Field.Index.NOT_ANALYZED);
doc.add(f);
}
if (data[i][3] != null) {
Field f = supportsDocValues ?
DocValuesField.set(new Field ("float", data[i][3], Field.Store.NO, Field.Index.NOT_ANALYZED), Type.FLOAT_32)
DocValuesField.set(new Field ("float", data[i][3], Field.Store.NO, Field.Index.NOT_ANALYZED), ValueType.FLOAT_32)
: new Field ("float", data[i][3], Field.Store.NO, Field.Index.NOT_ANALYZED);
doc.add(f);
}
@ -140,7 +140,7 @@ public class TestSort extends LuceneTestCase {
if (data[i][7] != null) doc.add (new Field ("long", data[i][7], Field.Store.NO, Field.Index.NOT_ANALYZED));
if (data[i][8] != null) {
Field f = supportsDocValues ?
DocValuesField.set(new Field ("double", data[i][8], Field.Store.NO, Field.Index.NOT_ANALYZED), Type.FLOAT_64)
DocValuesField.set(new Field ("double", data[i][8], Field.Store.NO, Field.Index.NOT_ANALYZED), ValueType.FLOAT_64)
: new Field ("double", data[i][8], Field.Store.NO, Field.Index.NOT_ANALYZED);
doc.add(f);
}