LUCENE-5063: Compress integer and long field caches and remove FieldCache.get(Byte|Short)s, default parsers and related class/methods (merged from r1494753 and r1495146).

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1495156 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Adrien Grand 2013-06-20 20:07:57 +00:00
parent 09cc436a74
commit 543d33f941
47 changed files with 611 additions and 2127 deletions

View File

@ -103,6 +103,12 @@ Changes in backwards compatibility policy
on segment size and noCFSRatio. The default implemantion was pulled up from on segment size and noCFSRatio. The default implemantion was pulled up from
TieredMergePolicy. (Simon Willnauer) TieredMergePolicy. (Simon Willnauer)
* LUCENE-5063: FieldCache.get(Bytes|Shorts), SortField.Type.(BYTE|SHORT) and
FieldCache.DEFAULT_(BYTE|SHORT|INT|LONG|FLOAT|DOUBLE)_PARSER are now
deprecated. These methods/types assume that data is stored as strings although
Lucene has much better support for numeric data through (Int|Long)Field,
NumericRangeQuery and FieldCache.get(Int|Long)s. (Adrien Grand)
Bug Fixes Bug Fixes
* LUCENE-4997: Internal test framework's tests are sensitive to previous * LUCENE-4997: Internal test framework's tests are sensitive to previous
@ -233,6 +239,9 @@ New Features
* LUCENE-5025: FST's Builder can now handle more than 2.1 billion * LUCENE-5025: FST's Builder can now handle more than 2.1 billion
"tail nodes" while building a minimal FST. (Aaron Binns, Adrien "tail nodes" while building a minimal FST. (Aaron Binns, Adrien
Grand, Mike McCandless) Grand, Mike McCandless)
* LUCENE-5063: FieldCache.DEFAULT.get(Ints|Longs) now uses bit-packing to save
memory. (Adrien Grand)
Build Build

View File

@ -49,34 +49,6 @@ import org.apache.lucene.util.RamUsageEstimator;
*/ */
public interface FieldCache { public interface FieldCache {
/** Field values as 8-bit signed bytes */
public static abstract class Bytes {
/** Return a single Byte representation of this field's value. */
public abstract byte get(int docID);
/** Zero value for every document */
public static final Bytes EMPTY = new Bytes() {
@Override
public byte get(int docID) {
return 0;
}
};
}
/** Field values as 16-bit signed shorts */
public static abstract class Shorts {
/** Return a short representation of this field's value. */
public abstract short get(int docID);
/** Zero value for every document */
public static final Shorts EMPTY = new Shorts() {
@Override
public short get(int docID) {
return 0;
}
};
}
/** Field values as 32-bit signed integers */ /** Field values as 32-bit signed integers */
public static abstract class Ints { public static abstract class Ints {
/** Return an integer representation of this field's value. */ /** Return an integer representation of this field's value. */
@ -178,22 +150,6 @@ public interface FieldCache {
public TermsEnum termsEnum(Terms terms) throws IOException; public TermsEnum termsEnum(Terms terms) throws IOException;
} }
/** Interface to parse bytes from document fields.
* @see FieldCache#getBytes(AtomicReader, String, FieldCache.ByteParser, boolean)
*/
public interface ByteParser extends Parser {
/** Return a single Byte representation of this field's value. */
public byte parseByte(BytesRef term);
}
/** Interface to parse shorts from document fields.
* @see FieldCache#getShorts(AtomicReader, String, FieldCache.ShortParser, boolean)
*/
public interface ShortParser extends Parser {
/** Return a short representation of this field's value. */
public short parseShort(BytesRef term);
}
/** Interface to parse ints from document fields. /** Interface to parse ints from document fields.
* @see FieldCache#getInts(AtomicReader, String, FieldCache.IntParser, boolean) * @see FieldCache#getInts(AtomicReader, String, FieldCache.IntParser, boolean)
*/ */
@ -229,135 +185,6 @@ public interface FieldCache {
/** Expert: The cache used internally by sorting and range query classes. */ /** Expert: The cache used internally by sorting and range query classes. */
public static FieldCache DEFAULT = new FieldCacheImpl(); public static FieldCache DEFAULT = new FieldCacheImpl();
/** The default parser for byte values, which are encoded by {@link Byte#toString(byte)} */
public static final ByteParser DEFAULT_BYTE_PARSER = new ByteParser() {
@Override
public byte parseByte(BytesRef term) {
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// IntField, instead, which already decodes
// directly from byte[]
return Byte.parseByte(term.utf8ToString());
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_BYTE_PARSER";
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
};
/** The default parser for short values, which are encoded by {@link Short#toString(short)} */
public static final ShortParser DEFAULT_SHORT_PARSER = new ShortParser() {
@Override
public short parseShort(BytesRef term) {
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// IntField, instead, which already decodes
// directly from byte[]
return Short.parseShort(term.utf8ToString());
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_SHORT_PARSER";
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
};
/** The default parser for int values, which are encoded by {@link Integer#toString(int)} */
public static final IntParser DEFAULT_INT_PARSER = new IntParser() {
@Override
public int parseInt(BytesRef term) {
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// IntField, instead, which already decodes
// directly from byte[]
return Integer.parseInt(term.utf8ToString());
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_INT_PARSER";
}
};
/** The default parser for float values, which are encoded by {@link Float#toString(float)} */
public static final FloatParser DEFAULT_FLOAT_PARSER = new FloatParser() {
@Override
public float parseFloat(BytesRef term) {
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// FloatField, instead, which already decodes
// directly from byte[]
return Float.parseFloat(term.utf8ToString());
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_FLOAT_PARSER";
}
};
/** The default parser for long values, which are encoded by {@link Long#toString(long)} */
public static final LongParser DEFAULT_LONG_PARSER = new LongParser() {
@Override
public long parseLong(BytesRef term) {
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// LongField, instead, which already decodes
// directly from byte[]
return Long.parseLong(term.utf8ToString());
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_LONG_PARSER";
}
};
/** The default parser for double values, which are encoded by {@link Double#toString(double)} */
public static final DoubleParser DEFAULT_DOUBLE_PARSER = new DoubleParser() {
@Override
public double parseDouble(BytesRef term) {
// TODO: would be far better to directly parse from
// UTF8 bytes... but really users should use
// DoubleField, instead, which already decodes
// directly from byte[]
return Double.parseDouble(term.utf8ToString());
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_DOUBLE_PARSER";
}
};
/** /**
* A parser instance for int values encoded by {@link NumericUtils}, e.g. when indexed * A parser instance for int values encoded by {@link NumericUtils}, e.g. when indexed
* via {@link IntField}/{@link NumericTokenStream}. * via {@link IntField}/{@link NumericTokenStream}.
@ -449,60 +276,6 @@ public interface FieldCache {
*/ */
public Bits getDocsWithField(AtomicReader reader, String field) throws IOException; public Bits getDocsWithField(AtomicReader reader, String field) throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is
* found, reads the terms in <code>field</code> as a single byte and returns an array
* of size <code>reader.maxDoc()</code> of the value each document
* has in the given field.
* @param reader Used to get field values.
* @param field Which field contains the single byte values.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public Bytes getBytes(AtomicReader reader, String field, boolean setDocsWithField) throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is found,
* reads the terms in <code>field</code> as bytes and returns an array of
* size <code>reader.maxDoc()</code> of the value each document has in the
* given field.
* @param reader Used to get field values.
* @param field Which field contains the bytes.
* @param parser Computes byte for string values.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public Bytes getBytes(AtomicReader reader, String field, ByteParser parser, boolean setDocsWithField) throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is
* found, reads the terms in <code>field</code> as shorts and returns an array
* of size <code>reader.maxDoc()</code> of the value each document
* has in the given field.
* @param reader Used to get field values.
* @param field Which field contains the shorts.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public Shorts getShorts (AtomicReader reader, String field, boolean setDocsWithField) throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is found,
* reads the terms in <code>field</code> as shorts and returns an array of
* size <code>reader.maxDoc()</code> of the value each document has in the
* given field.
* @param reader Used to get field values.
* @param field Which field contains the shorts.
* @param parser Computes short for string values.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public Shorts getShorts (AtomicReader reader, String field, ShortParser parser, boolean setDocsWithField) throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is /** Checks the internal cache for an appropriate entry, and if none is
* found, reads the terms in <code>field</code> as integers and returns an array * found, reads the terms in <code>field</code> as integers and returns an array
* of size <code>reader.maxDoc()</code> of the value each document * of size <code>reader.maxDoc()</code> of the value each document

View File

@ -38,7 +38,6 @@ import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FieldCacheSanityChecker; import org.apache.lucene.util.FieldCacheSanityChecker;
@ -63,8 +62,6 @@ class FieldCacheImpl implements FieldCache {
private synchronized void init() { private synchronized void init() {
caches = new HashMap<Class<?>,Cache>(9); caches = new HashMap<Class<?>,Cache>(9);
caches.put(Byte.TYPE, new ByteCache(this));
caches.put(Short.TYPE, new ShortCache(this));
caches.put(Integer.TYPE, new IntCache(this)); caches.put(Integer.TYPE, new IntCache(this));
caches.put(Float.TYPE, new FloatCache(this)); caches.put(Float.TYPE, new FloatCache(this));
caches.put(Long.TYPE, new LongCache(this)); caches.put(Long.TYPE, new LongCache(this));
@ -355,192 +352,6 @@ class FieldCacheImpl implements FieldCache {
} }
caches.get(DocsWithFieldCache.class).put(reader, new CacheKey(field, null), bits); caches.get(DocsWithFieldCache.class).put(reader, new CacheKey(field, null), bits);
} }
// inherit javadocs
public Bytes getBytes (AtomicReader reader, String field, boolean setDocsWithField) throws IOException {
return getBytes(reader, field, null, setDocsWithField);
}
// inherit javadocs
public Bytes getBytes(AtomicReader reader, String field, ByteParser parser, boolean setDocsWithField)
throws IOException {
final NumericDocValues valuesIn = reader.getNumericDocValues(field);
if (valuesIn != null) {
// Not cached here by FieldCacheImpl (cached instead
// per-thread by SegmentReader):
return new Bytes() {
@Override
public byte get(int docID) {
return (byte) valuesIn.get(docID);
}
};
} else {
final FieldInfo info = reader.getFieldInfos().fieldInfo(field);
if (info == null) {
return Bytes.EMPTY;
} else if (info.hasDocValues()) {
throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType());
} else if (!info.isIndexed()) {
return Bytes.EMPTY;
}
return (Bytes) caches.get(Byte.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField);
}
}
static class BytesFromArray extends Bytes {
private final byte[] values;
public BytesFromArray(byte[] values) {
this.values = values;
}
@Override
public byte get(int docID) {
return values[docID];
}
}
static final class ByteCache extends Cache {
ByteCache(FieldCacheImpl wrapper) {
super(wrapper);
}
@Override
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField)
throws IOException {
int maxDoc = reader.maxDoc();
final byte[] values;
final ByteParser parser = (ByteParser) key.custom;
if (parser == null) {
// Confusing: must delegate to wrapper (vs simply
// setting parser = DEFAULT_SHORT_PARSER) so cache
// key includes DEFAULT_SHORT_PARSER:
return wrapper.getBytes(reader, key.field, DEFAULT_BYTE_PARSER, setDocsWithField);
}
values = new byte[maxDoc];
Uninvert u = new Uninvert() {
private byte currentValue;
@Override
public void visitTerm(BytesRef term) {
currentValue = parser.parseByte(term);
}
@Override
public void visitDoc(int docID) {
values[docID] = currentValue;
}
@Override
protected TermsEnum termsEnum(Terms terms) throws IOException {
return parser.termsEnum(terms);
}
};
u.uninvert(reader, key.field, setDocsWithField);
if (setDocsWithField) {
wrapper.setDocsWithField(reader, key.field, u.docsWithField);
}
return new BytesFromArray(values);
}
}
// inherit javadocs
public Shorts getShorts (AtomicReader reader, String field, boolean setDocsWithField) throws IOException {
return getShorts(reader, field, null, setDocsWithField);
}
// inherit javadocs
public Shorts getShorts(AtomicReader reader, String field, ShortParser parser, boolean setDocsWithField)
throws IOException {
final NumericDocValues valuesIn = reader.getNumericDocValues(field);
if (valuesIn != null) {
// Not cached here by FieldCacheImpl (cached instead
// per-thread by SegmentReader):
return new Shorts() {
@Override
public short get(int docID) {
return (short) valuesIn.get(docID);
}
};
} else {
final FieldInfo info = reader.getFieldInfos().fieldInfo(field);
if (info == null) {
return Shorts.EMPTY;
} else if (info.hasDocValues()) {
throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType());
} else if (!info.isIndexed()) {
return Shorts.EMPTY;
}
return (Shorts) caches.get(Short.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField);
}
}
static class ShortsFromArray extends Shorts {
private final short[] values;
public ShortsFromArray(short[] values) {
this.values = values;
}
@Override
public short get(int docID) {
return values[docID];
}
}
static final class ShortCache extends Cache {
ShortCache(FieldCacheImpl wrapper) {
super(wrapper);
}
@Override
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField)
throws IOException {
int maxDoc = reader.maxDoc();
final short[] values;
final ShortParser parser = (ShortParser) key.custom;
if (parser == null) {
// Confusing: must delegate to wrapper (vs simply
// setting parser = DEFAULT_SHORT_PARSER) so cache
// key includes DEFAULT_SHORT_PARSER:
return wrapper.getShorts(reader, key.field, DEFAULT_SHORT_PARSER, setDocsWithField);
}
values = new short[maxDoc];
Uninvert u = new Uninvert() {
private short currentValue;
@Override
public void visitTerm(BytesRef term) {
currentValue = parser.parseShort(term);
}
@Override
public void visitDoc(int docID) {
values[docID] = currentValue;
}
@Override
protected TermsEnum termsEnum(Terms terms) throws IOException {
return parser.termsEnum(terms);
}
};
u.uninvert(reader, key.field, setDocsWithField);
if (setDocsWithField) {
wrapper.setDocsWithField(reader, key.field, u.docsWithField);
}
return new ShortsFromArray(values);
}
}
// inherit javadocs // inherit javadocs
public Ints getInts (AtomicReader reader, String field, boolean setDocsWithField) throws IOException { public Ints getInts (AtomicReader reader, String field, boolean setDocsWithField) throws IOException {
@ -574,15 +385,19 @@ class FieldCacheImpl implements FieldCache {
} }
static class IntsFromArray extends Ints { static class IntsFromArray extends Ints {
private final int[] values; private final PackedInts.Reader values;
private final int minValue;
public IntsFromArray(int[] values) { public IntsFromArray(PackedInts.Reader values, int minValue) {
assert values.getBitsPerValue() <= 32;
this.values = values; this.values = values;
this.minValue = minValue;
} }
@Override @Override
public int get(int docID) { public int get(int docID) {
return values[docID]; final long delta = values.get(docID);
return minValue + (int) delta;
} }
} }
@ -598,6 +413,15 @@ class FieldCacheImpl implements FieldCache {
} }
} }
private static class GrowableWriterAndMinValue {
GrowableWriterAndMinValue(GrowableWriter array, long minValue) {
this.writer = array;
this.minValue = minValue;
}
public GrowableWriter writer;
public long minValue;
}
static final class IntCache extends Cache { static final class IntCache extends Cache {
IntCache(FieldCacheImpl wrapper) { IntCache(FieldCacheImpl wrapper) {
super(wrapper); super(wrapper);
@ -610,22 +434,17 @@ class FieldCacheImpl implements FieldCache {
final IntParser parser = (IntParser) key.custom; final IntParser parser = (IntParser) key.custom;
if (parser == null) { if (parser == null) {
// Confusing: must delegate to wrapper (vs simply // Confusing: must delegate to wrapper (vs simply
// setting parser = // setting parser = NUMERIC_UTILS_INT_PARSER) so
// DEFAULT_INT_PARSER/NUMERIC_UTILS_INT_PARSER) so // cache key includes NUMERIC_UTILS_INT_PARSER:
// cache key includes return wrapper.getInts(reader, key.field, NUMERIC_UTILS_INT_PARSER, setDocsWithField);
// DEFAULT_INT_PARSER/NUMERIC_UTILS_INT_PARSER:
try {
return wrapper.getInts(reader, key.field, DEFAULT_INT_PARSER, setDocsWithField);
} catch (NumberFormatException ne) {
return wrapper.getInts(reader, key.field, NUMERIC_UTILS_INT_PARSER, setDocsWithField);
}
} }
final HoldsOneThing<int[]> valuesRef = new HoldsOneThing<int[]>(); final HoldsOneThing<GrowableWriterAndMinValue> valuesRef = new HoldsOneThing<GrowableWriterAndMinValue>();
Uninvert u = new Uninvert() { Uninvert u = new Uninvert() {
private int minValue;
private int currentValue; private int currentValue;
private int[] values; private GrowableWriter values;
@Override @Override
public void visitTerm(BytesRef term) { public void visitTerm(BytesRef term) {
@ -635,16 +454,28 @@ class FieldCacheImpl implements FieldCache {
// (which will hit a NumberFormatException // (which will hit a NumberFormatException
// when we first try the DEFAULT_INT_PARSER), // when we first try the DEFAULT_INT_PARSER),
// we don't double-alloc: // we don't double-alloc:
values = new int[reader.maxDoc()]; int startBitsPerValue;
valuesRef.set(values); // Make sure than missing values (0) can be stored without resizing
if (currentValue < 0) {
minValue = currentValue;
startBitsPerValue = PackedInts.bitsRequired((-minValue) & 0xFFFFFFFFL);
} else {
minValue = 0;
startBitsPerValue = PackedInts.bitsRequired(currentValue);
}
values = new GrowableWriter(startBitsPerValue, reader.maxDoc(), PackedInts.FAST);
if (minValue != 0) {
values.fill(0, values.size(), (-minValue) & 0xFFFFFFFFL); // default value must be 0
}
valuesRef.set(new GrowableWriterAndMinValue(values, minValue));
} }
} }
@Override @Override
public void visitDoc(int docID) { public void visitDoc(int docID) {
values[docID] = currentValue; values.set(docID, (currentValue - minValue) & 0xFFFFFFFFL);
} }
@Override @Override
protected TermsEnum termsEnum(Terms terms) throws IOException { protected TermsEnum termsEnum(Terms terms) throws IOException {
return parser.termsEnum(terms); return parser.termsEnum(terms);
@ -656,11 +487,11 @@ class FieldCacheImpl implements FieldCache {
if (setDocsWithField) { if (setDocsWithField) {
wrapper.setDocsWithField(reader, key.field, u.docsWithField); wrapper.setDocsWithField(reader, key.field, u.docsWithField);
} }
int[] values = valuesRef.get(); GrowableWriterAndMinValue values = valuesRef.get();
if (values == null) { if (values == null) {
values = new int[reader.maxDoc()]; return new IntsFromArray(new PackedInts.NullReader(reader.maxDoc()), 0);
} }
return new IntsFromArray(values); return new IntsFromArray(values.writer.getMutable(), (int) values.minValue);
} }
} }
@ -792,15 +623,9 @@ class FieldCacheImpl implements FieldCache {
final FloatParser parser = (FloatParser) key.custom; final FloatParser parser = (FloatParser) key.custom;
if (parser == null) { if (parser == null) {
// Confusing: must delegate to wrapper (vs simply // Confusing: must delegate to wrapper (vs simply
// setting parser = // setting parser = NUMERIC_UTILS_FLOAT_PARSER) so
// DEFAULT_FLOAT_PARSER/NUMERIC_UTILS_FLOAT_PARSER) so // cache key includes NUMERIC_UTILS_FLOAT_PARSER:
// cache key includes return wrapper.getFloats(reader, key.field, NUMERIC_UTILS_FLOAT_PARSER, setDocsWithField);
// DEFAULT_FLOAT_PARSER/NUMERIC_UTILS_FLOAT_PARSER:
try {
return wrapper.getFloats(reader, key.field, DEFAULT_FLOAT_PARSER, setDocsWithField);
} catch (NumberFormatException ne) {
return wrapper.getFloats(reader, key.field, NUMERIC_UTILS_FLOAT_PARSER, setDocsWithField);
}
} }
final HoldsOneThing<float[]> valuesRef = new HoldsOneThing<float[]>(); final HoldsOneThing<float[]> valuesRef = new HoldsOneThing<float[]>();
@ -879,15 +704,17 @@ class FieldCacheImpl implements FieldCache {
} }
static class LongsFromArray extends Longs { static class LongsFromArray extends Longs {
private final long[] values; private final PackedInts.Reader values;
private final long minValue;
public LongsFromArray(long[] values) { public LongsFromArray(PackedInts.Reader values, long minValue) {
this.values = values; this.values = values;
this.minValue = minValue;
} }
@Override @Override
public long get(int docID) { public long get(int docID) {
return values[docID]; return minValue + values.get(docID);
} }
} }
@ -903,22 +730,17 @@ class FieldCacheImpl implements FieldCache {
final LongParser parser = (LongParser) key.custom; final LongParser parser = (LongParser) key.custom;
if (parser == null) { if (parser == null) {
// Confusing: must delegate to wrapper (vs simply // Confusing: must delegate to wrapper (vs simply
// setting parser = // setting parser = NUMERIC_UTILS_LONG_PARSER) so
// DEFAULT_LONG_PARSER/NUMERIC_UTILS_LONG_PARSER) so // cache key includes NUMERIC_UTILS_LONG_PARSER:
// cache key includes return wrapper.getLongs(reader, key.field, NUMERIC_UTILS_LONG_PARSER, setDocsWithField);
// DEFAULT_LONG_PARSER/NUMERIC_UTILS_LONG_PARSER:
try {
return wrapper.getLongs(reader, key.field, DEFAULT_LONG_PARSER, setDocsWithField);
} catch (NumberFormatException ne) {
return wrapper.getLongs(reader, key.field, NUMERIC_UTILS_LONG_PARSER, setDocsWithField);
}
} }
final HoldsOneThing<long[]> valuesRef = new HoldsOneThing<long[]>(); final HoldsOneThing<GrowableWriterAndMinValue> valuesRef = new HoldsOneThing<GrowableWriterAndMinValue>();
Uninvert u = new Uninvert() { Uninvert u = new Uninvert() {
private long minValue;
private long currentValue; private long currentValue;
private long[] values; private GrowableWriter values;
@Override @Override
public void visitTerm(BytesRef term) { public void visitTerm(BytesRef term) {
@ -928,14 +750,26 @@ class FieldCacheImpl implements FieldCache {
// (which will hit a NumberFormatException // (which will hit a NumberFormatException
// when we first try the DEFAULT_INT_PARSER), // when we first try the DEFAULT_INT_PARSER),
// we don't double-alloc: // we don't double-alloc:
values = new long[reader.maxDoc()]; int startBitsPerValue;
valuesRef.set(values); // Make sure than missing values (0) can be stored without resizing
if (currentValue < 0) {
minValue = currentValue;
startBitsPerValue = minValue == Long.MIN_VALUE ? 64 : PackedInts.bitsRequired(-minValue);
} else {
minValue = 0;
startBitsPerValue = PackedInts.bitsRequired(currentValue);
}
values = new GrowableWriter(startBitsPerValue, reader.maxDoc(), PackedInts.FAST);
if (minValue != 0) {
values.fill(0, values.size(), -minValue); // default value must be 0
}
valuesRef.set(new GrowableWriterAndMinValue(values, minValue));
} }
} }
@Override @Override
public void visitDoc(int docID) { public void visitDoc(int docID) {
values[docID] = currentValue; values.set(docID, currentValue - minValue);
} }
@Override @Override
@ -949,11 +783,11 @@ class FieldCacheImpl implements FieldCache {
if (setDocsWithField) { if (setDocsWithField) {
wrapper.setDocsWithField(reader, key.field, u.docsWithField); wrapper.setDocsWithField(reader, key.field, u.docsWithField);
} }
long[] values = valuesRef.get(); GrowableWriterAndMinValue values = valuesRef.get();
if (values == null) { if (values == null) {
values = new long[reader.maxDoc()]; return new LongsFromArray(new PackedInts.NullReader(reader.maxDoc()), 0L);
} }
return new LongsFromArray(values); return new LongsFromArray(values.writer.getMutable(), values.minValue);
} }
} }
@ -1014,15 +848,9 @@ class FieldCacheImpl implements FieldCache {
final DoubleParser parser = (DoubleParser) key.custom; final DoubleParser parser = (DoubleParser) key.custom;
if (parser == null) { if (parser == null) {
// Confusing: must delegate to wrapper (vs simply // Confusing: must delegate to wrapper (vs simply
// setting parser = // setting parser = NUMERIC_UTILS_DOUBLE_PARSER) so
// DEFAULT_DOUBLE_PARSER/NUMERIC_UTILS_DOUBLE_PARSER) so // cache key includes NUMERIC_UTILS_DOUBLE_PARSER:
// cache key includes return wrapper.getDoubles(reader, key.field, NUMERIC_UTILS_DOUBLE_PARSER, setDocsWithField);
// DEFAULT_DOUBLE_PARSER/NUMERIC_UTILS_DOUBLE_PARSER:
try {
return wrapper.getDoubles(reader, key.field, DEFAULT_DOUBLE_PARSER, setDocsWithField);
} catch (NumberFormatException ne) {
return wrapper.getDoubles(reader, key.field, NUMERIC_UTILS_DOUBLE_PARSER, setDocsWithField);
}
} }
final HoldsOneThing<double[]> valuesRef = new HoldsOneThing<double[]>(); final HoldsOneThing<double[]> valuesRef = new HoldsOneThing<double[]>();
@ -1146,7 +974,6 @@ class FieldCacheImpl implements FieldCache {
final PagedBytes bytes = new PagedBytes(15); final PagedBytes bytes = new PagedBytes(15);
int startTermsBPV; int startTermsBPV;
int startNumUniqueTerms;
final int termCountHardLimit; final int termCountHardLimit;
if (maxDoc == Integer.MAX_VALUE) { if (maxDoc == Integer.MAX_VALUE) {
@ -1170,15 +997,11 @@ class FieldCacheImpl implements FieldCache {
} }
startTermsBPV = PackedInts.bitsRequired(numUniqueTerms); startTermsBPV = PackedInts.bitsRequired(numUniqueTerms);
startNumUniqueTerms = (int) numUniqueTerms;
} else { } else {
startTermsBPV = 1; startTermsBPV = 1;
startNumUniqueTerms = 1;
} }
} else { } else {
startTermsBPV = 1; startTermsBPV = 1;
startNumUniqueTerms = 1;
} }
MonotonicAppendingLongBuffer termOrdToBytesOffset = new MonotonicAppendingLongBuffer(); MonotonicAppendingLongBuffer termOrdToBytesOffset = new MonotonicAppendingLongBuffer();

View File

@ -193,109 +193,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
} }
}; };
} }
/**
* Creates a numeric range filter using {@link FieldCache#getBytes(AtomicReader,String,boolean)}. This works with all
* byte fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
public static FieldCacheRangeFilter<Byte> newByteRange(String field, Byte lowerVal, Byte upperVal, boolean includeLower, boolean includeUpper) {
return newByteRange(field, null, lowerVal, upperVal, includeLower, includeUpper);
}
/**
* Creates a numeric range filter using {@link FieldCache#getBytes(AtomicReader,String,FieldCache.ByteParser,boolean)}. This works with all
* byte fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
public static FieldCacheRangeFilter<Byte> newByteRange(String field, FieldCache.ByteParser parser, Byte lowerVal, Byte upperVal, boolean includeLower, boolean includeUpper) {
return new FieldCacheRangeFilter<Byte>(field, parser, lowerVal, upperVal, includeLower, includeUpper) {
@Override
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
final byte inclusiveLowerPoint, inclusiveUpperPoint;
if (lowerVal != null) {
final byte i = lowerVal.byteValue();
if (!includeLower && i == Byte.MAX_VALUE)
return null;
inclusiveLowerPoint = (byte) (includeLower ? i : (i + 1));
} else {
inclusiveLowerPoint = Byte.MIN_VALUE;
}
if (upperVal != null) {
final byte i = upperVal.byteValue();
if (!includeUpper && i == Byte.MIN_VALUE)
return null;
inclusiveUpperPoint = (byte) (includeUpper ? i : (i - 1));
} else {
inclusiveUpperPoint = Byte.MAX_VALUE;
}
if (inclusiveLowerPoint > inclusiveUpperPoint)
return null;
final FieldCache.Bytes values = FieldCache.DEFAULT.getBytes(context.reader(), field, (FieldCache.ByteParser) parser, false);
return new FieldCacheDocIdSet(context.reader().maxDoc(), acceptDocs) {
@Override
protected boolean matchDoc(int doc) {
final byte value = values.get(doc);
return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
}
};
}
};
}
/**
* Creates a numeric range filter using {@link FieldCache#getShorts(AtomicReader,String,boolean)}. This works with all
* short fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
public static FieldCacheRangeFilter<Short> newShortRange(String field, Short lowerVal, Short upperVal, boolean includeLower, boolean includeUpper) {
return newShortRange(field, null, lowerVal, upperVal, includeLower, includeUpper);
}
/**
* Creates a numeric range filter using {@link FieldCache#getShorts(AtomicReader,String,FieldCache.ShortParser,boolean)}. This works with all
* short fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
public static FieldCacheRangeFilter<Short> newShortRange(String field, FieldCache.ShortParser parser, Short lowerVal, Short upperVal, boolean includeLower, boolean includeUpper) {
return new FieldCacheRangeFilter<Short>(field, parser, lowerVal, upperVal, includeLower, includeUpper) {
@Override
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
final short inclusiveLowerPoint, inclusiveUpperPoint;
if (lowerVal != null) {
short i = lowerVal.shortValue();
if (!includeLower && i == Short.MAX_VALUE)
return null;
inclusiveLowerPoint = (short) (includeLower ? i : (i + 1));
} else {
inclusiveLowerPoint = Short.MIN_VALUE;
}
if (upperVal != null) {
short i = upperVal.shortValue();
if (!includeUpper && i == Short.MIN_VALUE)
return null;
inclusiveUpperPoint = (short) (includeUpper ? i : (i - 1));
} else {
inclusiveUpperPoint = Short.MAX_VALUE;
}
if (inclusiveLowerPoint > inclusiveUpperPoint)
return null;
final FieldCache.Shorts values = FieldCache.DEFAULT.getShorts(context.reader(), field, (FieldCache.ShortParser) parser, false);
return new FieldCacheDocIdSet(context.reader().maxDoc(), acceptDocs) {
@Override
protected boolean matchDoc(int doc) {
final short value = values.get(doc);
return value >= inclusiveLowerPoint && value <= inclusiveUpperPoint;
}
};
}
};
}
/** /**
* Creates a numeric range filter using {@link FieldCache#getInts(AtomicReader,String,boolean)}. This works with all * Creates a numeric range filter using {@link FieldCache#getInts(AtomicReader,String,boolean)}. This works with all
* int fields containing exactly one numeric term in the field. The range can be half-open by setting one * int fields containing exactly one numeric term in the field. The range can be half-open by setting one
@ -521,7 +419,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
} }
@Override @Override
@SuppressWarnings({"unchecked","rawtypes"}) @SuppressWarnings({"rawtypes"})
public final boolean equals(Object o) { public final boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
if (!(o instanceof FieldCacheRangeFilter)) return false; if (!(o instanceof FieldCacheRangeFilter)) return false;

View File

@ -22,12 +22,10 @@ import java.io.IOException;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.search.FieldCache.ByteParser;
import org.apache.lucene.search.FieldCache.DoubleParser; import org.apache.lucene.search.FieldCache.DoubleParser;
import org.apache.lucene.search.FieldCache.FloatParser; import org.apache.lucene.search.FieldCache.FloatParser;
import org.apache.lucene.search.FieldCache.IntParser; import org.apache.lucene.search.FieldCache.IntParser;
import org.apache.lucene.search.FieldCache.LongParser; import org.apache.lucene.search.FieldCache.LongParser;
import org.apache.lucene.search.FieldCache.ShortParser;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -218,78 +216,6 @@ public abstract class FieldComparator<T> {
} }
} }
/** Parses field's values as byte (using {@link
* FieldCache#getBytes} and sorts by ascending value */
public static final class ByteComparator extends NumericComparator<Byte> {
private final byte[] values;
private final ByteParser parser;
private FieldCache.Bytes currentReaderValues;
private byte bottom;
ByteComparator(int numHits, String field, FieldCache.Parser parser, Byte missingValue) {
super(field, missingValue);
values = new byte[numHits];
this.parser = (ByteParser) parser;
}
@Override
public int compare(int slot1, int slot2) {
return Byte.compare(values[slot1], values[slot2]);
}
@Override
public int compareBottom(int doc) {
byte v2 = currentReaderValues.get(doc);
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
return Byte.compare(bottom, v2);
}
@Override
public void copy(int slot, int doc) {
byte v2 = currentReaderValues.get(doc);
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
values[slot] = v2;
}
@Override
public FieldComparator<Byte> setNextReader(AtomicReaderContext context) throws IOException {
// NOTE: must do this before calling super otherwise
// we compute the docsWithField Bits twice!
currentReaderValues = FieldCache.DEFAULT.getBytes(context.reader(), field, parser, missingValue != null);
return super.setNextReader(context);
}
@Override
public void setBottom(final int bottom) {
this.bottom = values[bottom];
}
@Override
public Byte value(int slot) {
return Byte.valueOf(values[slot]);
}
@Override
public int compareDocToValue(int doc, Byte value) {
byte docValue = currentReaderValues.get(doc);
// Test for docValue == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && docValue == 0 && !docsWithField.get(doc)) {
docValue = missingValue;
}
return Byte.compare(docValue, value.byteValue());
}
}
/** Parses field's values as double (using {@link /** Parses field's values as double (using {@link
* FieldCache#getDoubles} and sorts by ascending value */ * FieldCache#getDoubles} and sorts by ascending value */
public static final class DoubleComparator extends NumericComparator<Double> { public static final class DoubleComparator extends NumericComparator<Double> {
@ -439,80 +365,6 @@ public abstract class FieldComparator<T> {
} }
} }
/** Parses field's values as short (using {@link
* FieldCache#getShorts} and sorts by ascending value */
public static final class ShortComparator extends NumericComparator<Short> {
private final short[] values;
private final ShortParser parser;
private FieldCache.Shorts currentReaderValues;
private short bottom;
ShortComparator(int numHits, String field, FieldCache.Parser parser, Short missingValue) {
super(field, missingValue);
values = new short[numHits];
this.parser = (ShortParser) parser;
}
@Override
public int compare(int slot1, int slot2) {
return Short.compare(values[slot1], values[slot2]);
}
@Override
public int compareBottom(int doc) {
short v2 = currentReaderValues.get(doc);
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
return Short.compare(bottom, v2);
}
@Override
public void copy(int slot, int doc) {
short v2 = currentReaderValues.get(doc);
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
values[slot] = v2;
}
@Override
public FieldComparator<Short> setNextReader(AtomicReaderContext context) throws IOException {
// NOTE: must do this before calling super otherwise
// we compute the docsWithField Bits twice!
currentReaderValues = FieldCache.DEFAULT.getShorts(context.reader(), field, parser, missingValue != null);
return super.setNextReader(context);
}
@Override
public void setBottom(final int bottom) {
this.bottom = values[bottom];
}
@Override
public Short value(int slot) {
return Short.valueOf(values[slot]);
}
@Override
public int compareDocToValue(int doc, Short valueObj) {
final short value = valueObj.shortValue();
short docValue = currentReaderValues.get(doc);
// Test for docValue == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && docValue == 0 && !docsWithField.get(doc)) {
docValue = missingValue;
}
return Short.compare(docValue, value);
}
}
/** Parses field's values as int (using {@link /** Parses field's values as int (using {@link
* FieldCache#getInts} and sorts by ascending value */ * FieldCache#getInts} and sorts by ascending value */
public static final class IntComparator extends NumericComparator<Integer> { public static final class IntComparator extends NumericComparator<Integer> {

View File

@ -71,18 +71,10 @@ public class SortField {
* lower values are at the front. */ * lower values are at the front. */
DOUBLE, DOUBLE,
/** Sort using term values as encoded Shorts. Sort values are Short and
* lower values are at the front. */
SHORT,
/** Sort using a custom Comparator. Sort values are any Comparable and /** Sort using a custom Comparator. Sort values are any Comparable and
* sorting is done according to natural order. */ * sorting is done according to natural order. */
CUSTOM, CUSTOM,
/** Sort using term values as encoded Bytes. Sort values are Byte and
* lower values are at the front. */
BYTE,
/** Sort using term values as Strings, but comparing by /** Sort using term values as Strings, but comparing by
* value (using String.compareTo) for all comparisons. * value (using String.compareTo) for all comparisons.
* This is typically slower than {@link #STRING}, which * This is typically slower than {@link #STRING}, which
@ -164,8 +156,6 @@ public class SortField {
public SortField(String field, FieldCache.Parser parser, boolean reverse) { public SortField(String field, FieldCache.Parser parser, boolean reverse) {
if (parser instanceof FieldCache.IntParser) initFieldType(field, Type.INT); if (parser instanceof FieldCache.IntParser) initFieldType(field, Type.INT);
else if (parser instanceof FieldCache.FloatParser) initFieldType(field, Type.FLOAT); else if (parser instanceof FieldCache.FloatParser) initFieldType(field, Type.FLOAT);
else if (parser instanceof FieldCache.ShortParser) initFieldType(field, Type.SHORT);
else if (parser instanceof FieldCache.ByteParser) initFieldType(field, Type.BYTE);
else if (parser instanceof FieldCache.LongParser) initFieldType(field, Type.LONG); else if (parser instanceof FieldCache.LongParser) initFieldType(field, Type.LONG);
else if (parser instanceof FieldCache.DoubleParser) initFieldType(field, Type.DOUBLE); else if (parser instanceof FieldCache.DoubleParser) initFieldType(field, Type.DOUBLE);
else { else {
@ -177,7 +167,7 @@ public class SortField {
} }
public SortField setMissingValue(Object missingValue) { public SortField setMissingValue(Object missingValue) {
if (type != Type.BYTE && type != Type.SHORT && type != Type.INT && type != Type.FLOAT && type != Type.LONG && type != Type.DOUBLE) { if (type != Type.INT && type != Type.FLOAT && type != Type.LONG && type != Type.DOUBLE) {
throw new IllegalArgumentException( "Missing value only works for numeric types" ); throw new IllegalArgumentException( "Missing value only works for numeric types" );
} }
this.missingValue = missingValue; this.missingValue = missingValue;
@ -274,14 +264,6 @@ public class SortField {
buffer.append("<string_val" + ": \"").append(field).append("\">"); buffer.append("<string_val" + ": \"").append(field).append("\">");
break; break;
case BYTE:
buffer.append("<byte: \"").append(field).append("\">");
break;
case SHORT:
buffer.append("<short: \"").append(field).append("\">");
break;
case INT: case INT:
buffer.append("<int" + ": \"").append(field).append("\">"); buffer.append("<int" + ": \"").append(field).append("\">");
break; break;
@ -389,12 +371,6 @@ public class SortField {
case DOUBLE: case DOUBLE:
return new FieldComparator.DoubleComparator(numHits, field, parser, (Double) missingValue); return new FieldComparator.DoubleComparator(numHits, field, parser, (Double) missingValue);
case BYTE:
return new FieldComparator.ByteComparator(numHits, field, parser, (Byte) missingValue);
case SHORT:
return new FieldComparator.ShortComparator(numHits, field, parser, (Short) missingValue);
case CUSTOM: case CUSTOM:
assert comparatorSource != null; assert comparatorSource != null;
return comparatorSource.newComparator(field, numHits, sortPos, reverse); return comparatorSource.newComparator(field, numHits, sortPos, reverse);

View File

@ -25,20 +25,30 @@ import org.apache.lucene.util.RamUsageEstimator;
/** /**
* Implements {@link PackedInts.Mutable}, but grows the * Implements {@link PackedInts.Mutable}, but grows the
* bit count of the underlying packed ints on-demand. * bit count of the underlying packed ints on-demand.
* <p>Beware that this class will accept to set negative values but in order
* to do this, it will grow the number of bits per value to 64.
* *
* <p>@lucene.internal</p> * <p>@lucene.internal</p>
*/ */
public class GrowableWriter implements PackedInts.Mutable { public class GrowableWriter implements PackedInts.Mutable {
private long currentMaxValue; private long currentMask;
private PackedInts.Mutable current; private PackedInts.Mutable current;
private final float acceptableOverheadRatio; private final float acceptableOverheadRatio;
/**
* @param startBitsPerValue the initial number of bits per value, may grow depending on the data
* @param valueCount the number of values
* @param acceptableOverheadRatio an acceptable overhead ratio
*/
public GrowableWriter(int startBitsPerValue, int valueCount, float acceptableOverheadRatio) { public GrowableWriter(int startBitsPerValue, int valueCount, float acceptableOverheadRatio) {
this.acceptableOverheadRatio = acceptableOverheadRatio; this.acceptableOverheadRatio = acceptableOverheadRatio;
current = PackedInts.getMutable(valueCount, startBitsPerValue, this.acceptableOverheadRatio); current = PackedInts.getMutable(valueCount, startBitsPerValue, this.acceptableOverheadRatio);
currentMaxValue = PackedInts.maxValue(current.getBitsPerValue()); currentMask = mask(current.getBitsPerValue());
}
private static long mask(int bitsPerValue) {
return bitsPerValue == 64 ? ~0L : PackedInts.maxValue(bitsPerValue);
} }
@Override @Override
@ -71,16 +81,16 @@ public class GrowableWriter implements PackedInts.Mutable {
} }
private void ensureCapacity(long value) { private void ensureCapacity(long value) {
assert value >= 0; if ((value & currentMask) == value) {
if (value <= currentMaxValue) {
return; return;
} }
final int bitsRequired = PackedInts.bitsRequired(value); final int bitsRequired = value < 0 ? 64 : PackedInts.bitsRequired(value);
assert bitsRequired > current.getBitsPerValue();
final int valueCount = size(); final int valueCount = size();
PackedInts.Mutable next = PackedInts.getMutable(valueCount, bitsRequired, acceptableOverheadRatio); PackedInts.Mutable next = PackedInts.getMutable(valueCount, bitsRequired, acceptableOverheadRatio);
PackedInts.copy(current, 0, next, 0, valueCount, PackedInts.DEFAULT_BUFFER_SIZE); PackedInts.copy(current, 0, next, 0, valueCount, PackedInts.DEFAULT_BUFFER_SIZE);
current = next; current = next;
currentMaxValue = PackedInts.maxValue(current.getBitsPerValue()); currentMask = mask(current.getBitsPerValue());
} }
@Override @Override
@ -110,6 +120,10 @@ public class GrowableWriter implements PackedInts.Mutable {
public int set(int index, long[] arr, int off, int len) { public int set(int index, long[] arr, int off, int len) {
long max = 0; long max = 0;
for (int i = off, end = off + len; i < end; ++i) { for (int i = off, end = off + len; i < end; ++i) {
// bitwise or is nice because either all values are positive and the
// or-ed result will require as many bits per value as the max of the
// values, or one of them is negative and the result will be negative,
// forcing GrowableWriter to use 64 bits per value
max |= arr[i]; max |= arr[i];
} }
ensureCapacity(max); ensureCapacity(max);

View File

@ -127,7 +127,7 @@ public class TestSearch extends LuceneTestCase {
for (int j = 0; j < docs.length; j++) { for (int j = 0; j < docs.length; j++) {
Document d = new Document(); Document d = new Document();
d.add(newTextField("contents", docs[j], Field.Store.YES)); d.add(newTextField("contents", docs[j], Field.Store.YES));
d.add(newStringField("id", ""+j, Field.Store.NO)); d.add(new IntField("id", j, Field.Store.NO));
writer.addDocument(d); writer.addDocument(d);
} }
writer.close(); writer.close();

View File

@ -81,7 +81,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
for (int j = 0; j < MAX_DOCS; j++) { for (int j = 0; j < MAX_DOCS; j++) {
Document d = new Document(); Document d = new Document();
d.add(newTextField(PRIORITY_FIELD, HIGH_PRIORITY, Field.Store.YES)); d.add(newTextField(PRIORITY_FIELD, HIGH_PRIORITY, Field.Store.YES));
d.add(newTextField(ID_FIELD, Integer.toString(j), Field.Store.YES)); d.add(new IntField(ID_FIELD, j, Field.Store.YES));
writer.addDocument(d); writer.addDocument(d);
} }
writer.close(); writer.close();

View File

@ -33,6 +33,7 @@ import org.apache.lucene.codecs.lucene41.Lucene41PostingsFormat;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
@ -764,7 +765,7 @@ public void testFilesOpenClose() throws IOException {
setMergePolicy(newLogMergePolicy(10)) setMergePolicy(newLogMergePolicy(10))
); );
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("number", "17", Field.Store.NO)); doc.add(new IntField("number", 17, Field.Store.NO));
writer.addDocument(doc); writer.addDocument(doc);
writer.commit(); writer.commit();

View File

@ -87,23 +87,17 @@ public class TestDocValuesWithThreads extends LuceneTestCase {
BytesRef scratch2 = new BytesRef(); BytesRef scratch2 = new BytesRef();
for(int iter=0;iter<iters;iter++) { for(int iter=0;iter<iters;iter++) {
int docID = threadRandom.nextInt(numDocs); int docID = threadRandom.nextInt(numDocs);
switch(threadRandom.nextInt(6)) { switch(threadRandom.nextInt(4)) {
case 0: case 0:
assertEquals((byte) numbers.get(docID).longValue(), FieldCache.DEFAULT.getBytes(ar, "number", false).get(docID));
break;
case 1:
assertEquals((short) numbers.get(docID).longValue(), FieldCache.DEFAULT.getShorts(ar, "number", false).get(docID));
break;
case 2:
assertEquals((int) numbers.get(docID).longValue(), FieldCache.DEFAULT.getInts(ar, "number", false).get(docID)); assertEquals((int) numbers.get(docID).longValue(), FieldCache.DEFAULT.getInts(ar, "number", false).get(docID));
break; break;
case 3: case 1:
assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getLongs(ar, "number", false).get(docID)); assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getLongs(ar, "number", false).get(docID));
break; break;
case 4: case 2:
assertEquals(Float.intBitsToFloat((int) numbers.get(docID).longValue()), FieldCache.DEFAULT.getFloats(ar, "number", false).get(docID), 0.0f); assertEquals(Float.intBitsToFloat((int) numbers.get(docID).longValue()), FieldCache.DEFAULT.getFloats(ar, "number", false).get(docID), 0.0f);
break; break;
case 5: case 3:
assertEquals(Double.longBitsToDouble(numbers.get(docID).longValue()), FieldCache.DEFAULT.getDoubles(ar, "number", false).get(docID), 0.0); assertEquals(Double.longBitsToDouble(numbers.get(docID).longValue()), FieldCache.DEFAULT.getDoubles(ar, "number", false).get(docID), 0.0);
break; break;
} }

View File

@ -59,7 +59,7 @@ public class TestRollingUpdates extends LuceneTestCase {
// TODO: sometimes update ids not in order... // TODO: sometimes update ids not in order...
for(int docIter=0;docIter<numUpdates;docIter++) { for(int docIter=0;docIter<numUpdates;docIter++) {
final Document doc = docs.nextDoc(); final Document doc = docs.nextDoc();
final String myID = ""+id; final String myID = Integer.toString(id);
if (id == SIZE-1) { if (id == SIZE-1) {
id = 0; id = 0;
} else { } else {

View File

@ -22,7 +22,12 @@ import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.LongField;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
@ -115,9 +120,17 @@ public class BaseTestRangeFilter extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
Field idField = newStringField(random, "id", "", Field.Store.YES); Field idField = newStringField(random, "id", "", Field.Store.YES);
Field intIdField = new IntField("id_int", 0, Store.YES);
Field floatIdField = new FloatField("id_float", 0, Store.YES);
Field longIdField = new LongField("id_long", 0, Store.YES);
Field doubleIdField = new DoubleField("id_double", 0, Store.YES);
Field randField = newStringField(random, "rand", "", Field.Store.YES); Field randField = newStringField(random, "rand", "", Field.Store.YES);
Field bodyField = newStringField(random, "body", "", Field.Store.NO); Field bodyField = newStringField(random, "body", "", Field.Store.NO);
doc.add(idField); doc.add(idField);
doc.add(intIdField);
doc.add(floatIdField);
doc.add(longIdField);
doc.add(doubleIdField);
doc.add(randField); doc.add(randField);
doc.add(bodyField); doc.add(bodyField);
@ -133,6 +146,10 @@ public class BaseTestRangeFilter extends LuceneTestCase {
for (int d = minId; d <= maxId; d++) { for (int d = minId; d <= maxId; d++) {
idField.setStringValue(pad(d)); idField.setStringValue(pad(d));
intIdField.setIntValue(d);
floatIdField.setFloatValue(d);
longIdField.setLongValue(d);
doubleIdField.setDoubleValue(d);
int r = index.allowNegativeRandomInts ? random.nextInt() : random int r = index.allowNegativeRandomInts ? random.nextInt() : random
.nextInt(Integer.MAX_VALUE); .nextInt(Integer.MAX_VALUE);
if (index.maxR < r) { if (index.maxR < r) {

View File

@ -23,33 +23,45 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.concurrent.CyclicBarrier; import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.IntField; import org.apache.lucene.document.IntField;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField; import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.*; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.search.FieldCache.Bytes; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocTermOrds;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.FieldCache.Doubles; import org.apache.lucene.search.FieldCache.Doubles;
import org.apache.lucene.search.FieldCache.Floats; import org.apache.lucene.search.FieldCache.Floats;
import org.apache.lucene.search.FieldCache.Ints; import org.apache.lucene.search.FieldCache.Ints;
import org.apache.lucene.search.FieldCache.Longs; import org.apache.lucene.search.FieldCache.Longs;
import org.apache.lucene.search.FieldCache.Shorts;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -70,8 +82,6 @@ public class TestFieldCache extends LuceneTestCase {
RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
long theLong = Long.MAX_VALUE; long theLong = Long.MAX_VALUE;
double theDouble = Double.MAX_VALUE; double theDouble = Double.MAX_VALUE;
byte theByte = Byte.MAX_VALUE;
short theShort = Short.MAX_VALUE;
int theInt = Integer.MAX_VALUE; int theInt = Integer.MAX_VALUE;
float theFloat = Float.MAX_VALUE; float theFloat = Float.MAX_VALUE;
unicodeStrings = new String[NUM_DOCS]; unicodeStrings = new String[NUM_DOCS];
@ -81,14 +91,12 @@ public class TestFieldCache extends LuceneTestCase {
} }
for (int i = 0; i < NUM_DOCS; i++){ for (int i = 0; i < NUM_DOCS; i++){
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("theLong", String.valueOf(theLong--), Field.Store.NO)); doc.add(new LongField("theLong", theLong--, Field.Store.NO));
doc.add(newStringField("theDouble", String.valueOf(theDouble--), Field.Store.NO)); doc.add(new DoubleField("theDouble", theDouble--, Field.Store.NO));
doc.add(newStringField("theByte", String.valueOf(theByte--), Field.Store.NO)); doc.add(new IntField("theInt", theInt--, Field.Store.NO));
doc.add(newStringField("theShort", String.valueOf(theShort--), Field.Store.NO)); doc.add(new FloatField("theFloat", theFloat--, Field.Store.NO));
doc.add(newStringField("theInt", String.valueOf(theInt--), Field.Store.NO));
doc.add(newStringField("theFloat", String.valueOf(theFloat--), Field.Store.NO));
if (i%2 == 0) { if (i%2 == 0) {
doc.add(newStringField("sparse", String.valueOf(i), Field.Store.NO)); doc.add(new IntField("sparse", i, Field.Store.NO));
} }
if (i%2 == 0) { if (i%2 == 0) {
@ -133,7 +141,16 @@ public class TestFieldCache extends LuceneTestCase {
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
cache.setInfoStream(new PrintStream(bos, false, "UTF-8")); cache.setInfoStream(new PrintStream(bos, false, "UTF-8"));
cache.getDoubles(reader, "theDouble", false); cache.getDoubles(reader, "theDouble", false);
cache.getFloats(reader, "theDouble", false); cache.getFloats(reader, "theDouble", new FieldCache.FloatParser() {
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return NumericUtils.filterPrefixCodedLongs(terms.iterator(null));
}
@Override
public float parseFloat(BytesRef term) {
return NumericUtils.sortableIntToFloat((int) NumericUtils.prefixCodedToLong(term));
}
}, false);
assertTrue(bos.toString("UTF-8").indexOf("WARNING") != -1); assertTrue(bos.toString("UTF-8").indexOf("WARNING") != -1);
} finally { } finally {
FieldCache.DEFAULT.purgeAllCaches(); FieldCache.DEFAULT.purgeAllCaches();
@ -144,42 +161,28 @@ public class TestFieldCache extends LuceneTestCase {
FieldCache cache = FieldCache.DEFAULT; FieldCache cache = FieldCache.DEFAULT;
FieldCache.Doubles doubles = cache.getDoubles(reader, "theDouble", random().nextBoolean()); FieldCache.Doubles doubles = cache.getDoubles(reader, "theDouble", random().nextBoolean());
assertSame("Second request to cache return same array", doubles, cache.getDoubles(reader, "theDouble", random().nextBoolean())); assertSame("Second request to cache return same array", doubles, cache.getDoubles(reader, "theDouble", random().nextBoolean()));
assertSame("Second request with explicit parser return same array", doubles, cache.getDoubles(reader, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, random().nextBoolean())); assertSame("Second request with explicit parser return same array", doubles, cache.getDoubles(reader, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, random().nextBoolean()));
for (int i = 0; i < NUM_DOCS; i++) { for (int i = 0; i < NUM_DOCS; i++) {
assertTrue(doubles.get(i) + " does not equal: " + (Double.MAX_VALUE - i), doubles.get(i) == (Double.MAX_VALUE - i)); assertTrue(doubles.get(i) + " does not equal: " + (Double.MAX_VALUE - i), doubles.get(i) == (Double.MAX_VALUE - i));
} }
FieldCache.Longs longs = cache.getLongs(reader, "theLong", random().nextBoolean()); FieldCache.Longs longs = cache.getLongs(reader, "theLong", random().nextBoolean());
assertSame("Second request to cache return same array", longs, cache.getLongs(reader, "theLong", random().nextBoolean())); assertSame("Second request to cache return same array", longs, cache.getLongs(reader, "theLong", random().nextBoolean()));
assertSame("Second request with explicit parser return same array", longs, cache.getLongs(reader, "theLong", FieldCache.DEFAULT_LONG_PARSER, random().nextBoolean())); assertSame("Second request with explicit parser return same array", longs, cache.getLongs(reader, "theLong", FieldCache.NUMERIC_UTILS_LONG_PARSER, random().nextBoolean()));
for (int i = 0; i < NUM_DOCS; i++) { for (int i = 0; i < NUM_DOCS; i++) {
assertTrue(longs.get(i) + " does not equal: " + (Long.MAX_VALUE - i) + " i=" + i, longs.get(i) == (Long.MAX_VALUE - i)); assertTrue(longs.get(i) + " does not equal: " + (Long.MAX_VALUE - i) + " i=" + i, longs.get(i) == (Long.MAX_VALUE - i));
} }
FieldCache.Bytes bytes = cache.getBytes(reader, "theByte", random().nextBoolean());
assertSame("Second request to cache return same array", bytes, cache.getBytes(reader, "theByte", random().nextBoolean()));
assertSame("Second request with explicit parser return same array", bytes, cache.getBytes(reader, "theByte", FieldCache.DEFAULT_BYTE_PARSER, random().nextBoolean()));
for (int i = 0; i < NUM_DOCS; i++) {
assertTrue(bytes.get(i) + " does not equal: " + (Byte.MAX_VALUE - i), bytes.get(i) == (byte) (Byte.MAX_VALUE - i));
}
FieldCache.Shorts shorts = cache.getShorts(reader, "theShort", random().nextBoolean());
assertSame("Second request to cache return same array", shorts, cache.getShorts(reader, "theShort", random().nextBoolean()));
assertSame("Second request with explicit parser return same array", shorts, cache.getShorts(reader, "theShort", FieldCache.DEFAULT_SHORT_PARSER, random().nextBoolean()));
for (int i = 0; i < NUM_DOCS; i++) {
assertTrue(shorts.get(i) + " does not equal: " + (Short.MAX_VALUE - i), shorts.get(i) == (short) (Short.MAX_VALUE - i));
}
FieldCache.Ints ints = cache.getInts(reader, "theInt", random().nextBoolean()); FieldCache.Ints ints = cache.getInts(reader, "theInt", random().nextBoolean());
assertSame("Second request to cache return same array", ints, cache.getInts(reader, "theInt", random().nextBoolean())); assertSame("Second request to cache return same array", ints, cache.getInts(reader, "theInt", random().nextBoolean()));
assertSame("Second request with explicit parser return same array", ints, cache.getInts(reader, "theInt", FieldCache.DEFAULT_INT_PARSER, random().nextBoolean())); assertSame("Second request with explicit parser return same array", ints, cache.getInts(reader, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, random().nextBoolean()));
for (int i = 0; i < NUM_DOCS; i++) { for (int i = 0; i < NUM_DOCS; i++) {
assertTrue(ints.get(i) + " does not equal: " + (Integer.MAX_VALUE - i), ints.get(i) == (Integer.MAX_VALUE - i)); assertTrue(ints.get(i) + " does not equal: " + (Integer.MAX_VALUE - i), ints.get(i) == (Integer.MAX_VALUE - i));
} }
FieldCache.Floats floats = cache.getFloats(reader, "theFloat", random().nextBoolean()); FieldCache.Floats floats = cache.getFloats(reader, "theFloat", random().nextBoolean());
assertSame("Second request to cache return same array", floats, cache.getFloats(reader, "theFloat", random().nextBoolean())); assertSame("Second request to cache return same array", floats, cache.getFloats(reader, "theFloat", random().nextBoolean()));
assertSame("Second request with explicit parser return same array", floats, cache.getFloats(reader, "theFloat", FieldCache.DEFAULT_FLOAT_PARSER, random().nextBoolean())); assertSame("Second request with explicit parser return same array", floats, cache.getFloats(reader, "theFloat", FieldCache.NUMERIC_UTILS_FLOAT_PARSER, random().nextBoolean()));
for (int i = 0; i < NUM_DOCS; i++) { for (int i = 0; i < NUM_DOCS; i++) {
assertTrue(floats.get(i) + " does not equal: " + (Float.MAX_VALUE - i), floats.get(i) == (Float.MAX_VALUE - i)); assertTrue(floats.get(i) + " does not equal: " + (Float.MAX_VALUE - i), floats.get(i) == (Float.MAX_VALUE - i));
} }
@ -587,12 +590,6 @@ public class TestFieldCache extends LuceneTestCase {
cache.purgeAllCaches(); cache.purgeAllCaches();
assertEquals(0, cache.getCacheEntries().length); assertEquals(0, cache.getCacheEntries().length);
Bytes bytes = cache.getBytes(ar, "bogusbytes", true);
assertEquals(0, bytes.get(0));
Shorts shorts = cache.getShorts(ar, "bogusshorts", true);
assertEquals(0, shorts.get(0));
Ints ints = cache.getInts(ar, "bogusints", true); Ints ints = cache.getInts(ar, "bogusints", true);
assertEquals(0, ints.get(0)); assertEquals(0, ints.get(0));
@ -652,12 +649,6 @@ public class TestFieldCache extends LuceneTestCase {
cache.purgeAllCaches(); cache.purgeAllCaches();
assertEquals(0, cache.getCacheEntries().length); assertEquals(0, cache.getCacheEntries().length);
Bytes bytes = cache.getBytes(ar, "bogusbytes", true);
assertEquals(0, bytes.get(0));
Shorts shorts = cache.getShorts(ar, "bogusshorts", true);
assertEquals(0, shorts.get(0));
Ints ints = cache.getInts(ar, "bogusints", true); Ints ints = cache.getInts(ar, "bogusints", true);
assertEquals(0, ints.get(0)); assertEquals(0, ints.get(0));
@ -692,4 +683,97 @@ public class TestFieldCache extends LuceneTestCase {
ir.close(); ir.close();
dir.close(); dir.close();
} }
// Make sure that the use of GrowableWriter doesn't prevent from using the full long range
public void testLongFieldCache() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
cfg.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
Document doc = new Document();
LongField field = new LongField("f", 0L, Store.YES);
doc.add(field);
final long[] values = new long[_TestUtil.nextInt(random(), 1, 10)];
for (int i = 0; i < values.length; ++i) {
final long v;
switch (random().nextInt(10)) {
case 0:
v = Long.MIN_VALUE;
break;
case 1:
v = 0;
break;
case 2:
v = Long.MAX_VALUE;
break;
default:
v = _TestUtil.nextLong(random(), -10, 10);
break;
}
values[i] = v;
if (v == 0 && random().nextBoolean()) {
// missing
iw.addDocument(new Document());
} else {
field.setLongValue(v);
iw.addDocument(doc);
}
}
iw.forceMerge(1);
final DirectoryReader reader = iw.getReader();
final FieldCache.Longs longs = FieldCache.DEFAULT.getLongs(getOnlySegmentReader(reader), "f", false);
for (int i = 0; i < values.length; ++i) {
assertEquals(values[i], longs.get(i));
}
reader.close();
iw.close();
dir.close();
}
// Make sure that the use of GrowableWriter doesn't prevent from using the full int range
public void testIntFieldCache() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
cfg.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
Document doc = new Document();
IntField field = new IntField("f", 0, Store.YES);
doc.add(field);
final int[] values = new int[_TestUtil.nextInt(random(), 1, 10)];
for (int i = 0; i < values.length; ++i) {
final int v;
switch (random().nextInt(10)) {
case 0:
v = Integer.MIN_VALUE;
break;
case 1:
v = 0;
break;
case 2:
v = Integer.MAX_VALUE;
break;
default:
v = _TestUtil.nextInt(random(), -10, 10);
break;
}
values[i] = v;
if (v == 0 && random().nextBoolean()) {
// missing
iw.addDocument(new Document());
} else {
field.setIntValue(v);
iw.addDocument(doc);
}
}
iw.forceMerge(1);
final DirectoryReader reader = iw.getReader();
final FieldCache.Ints ints = FieldCache.DEFAULT.getInts(getOnlySegmentReader(reader), "f", false);
for (int i = 0; i < values.length; ++i) {
assertEquals(values[i], ints.get(i));
}
reader.close();
iw.close();
dir.close();
}
} }

View File

@ -19,15 +19,17 @@ package org.apache.lucene.search;
import java.io.IOException; import java.io.IOException;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.junit.Test; import org.junit.Test;
/** /**
@ -187,98 +189,6 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
assertEquals("max,nul,T,T", 1, result.length); assertEquals("max,nul,T,T", 1, result.length);
} }
// byte-ranges cannot be tested, because all ranges are too big for bytes, need an extra range for that
@Test
public void testFieldCacheRangeFilterShorts() throws IOException {
IndexReader reader = signedIndexReader;
IndexSearcher search = newSearcher(reader);
int numDocs = reader.numDocs();
int medId = ((maxId - minId) / 2);
Short minIdO = Short.valueOf((short) minId);
Short maxIdO = Short.valueOf((short) maxId);
Short medIdO = Short.valueOf((short) medId);
assertEquals("num of docs", numDocs, 1+ maxId - minId);
ScoreDoc[] result;
Query q = new TermQuery(new Term("body","body"));
// test id, bounded on both ends
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,maxIdO,T,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,maxIdO,T,F), numDocs).scoreDocs;
assertEquals("all but last", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,maxIdO,F,T), numDocs).scoreDocs;
assertEquals("all but first", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,maxIdO,F,F), numDocs).scoreDocs;
assertEquals("all but ends", numDocs-2, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",medIdO,maxIdO,T,T), numDocs).scoreDocs;
assertEquals("med and up", 1+ maxId-medId, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,medIdO,T,T), numDocs).scoreDocs;
assertEquals("up to med", 1+ medId-minId, result.length);
// unbounded id
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",null,null,T,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,null,T,F), numDocs).scoreDocs;
assertEquals("min and up", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",null,maxIdO,F,T), numDocs).scoreDocs;
assertEquals("max and down", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,null,F,F), numDocs).scoreDocs;
assertEquals("not min, but up", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",null,maxIdO,F,F), numDocs).scoreDocs;
assertEquals("not max, but down", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",medIdO,maxIdO,T,F), numDocs).scoreDocs;
assertEquals("med and up, not max", maxId-medId, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,medIdO,F,T), numDocs).scoreDocs;
assertEquals("not min, up to med", medId-minId, result.length);
// very small sets
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,minIdO,F,F), numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",medIdO,medIdO,F,F), numDocs).scoreDocs;
assertEquals("med,med,F,F", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",maxIdO,maxIdO,F,F), numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",minIdO,minIdO,T,T), numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",null,minIdO,F,T), numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",maxIdO,maxIdO,T,T), numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",maxIdO,null,T,F), numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",medIdO,medIdO,T,T), numDocs).scoreDocs;
assertEquals("med,med,T,T", 1, result.length);
// special cases
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",Short.valueOf(Short.MAX_VALUE),null,F,F), numDocs).scoreDocs;
assertEquals("overflow special case", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",null,Short.valueOf(Short.MIN_VALUE),F,F), numDocs).scoreDocs;
assertEquals("overflow special case", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",maxIdO,minIdO,T,T), numDocs).scoreDocs;
assertEquals("inverse range", 0, result.length);
}
@Test @Test
public void testFieldCacheRangeFilterInts() throws IOException { public void testFieldCacheRangeFilterInts() throws IOException {
@ -298,75 +208,75 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
// test id, bounded on both ends // test id, bounded on both ends
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,maxIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,maxIdO,T,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length); assertEquals("find all", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,maxIdO,T,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,maxIdO,T,F), numDocs).scoreDocs;
assertEquals("all but last", numDocs-1, result.length); assertEquals("all but last", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,maxIdO,F,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,maxIdO,F,T), numDocs).scoreDocs;
assertEquals("all but first", numDocs-1, result.length); assertEquals("all but first", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,maxIdO,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,maxIdO,F,F), numDocs).scoreDocs;
assertEquals("all but ends", numDocs-2, result.length); assertEquals("all but ends", numDocs-2, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",medIdO,maxIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",medIdO,maxIdO,T,T), numDocs).scoreDocs;
assertEquals("med and up", 1+ maxId-medId, result.length); assertEquals("med and up", 1+ maxId-medId, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,medIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,medIdO,T,T), numDocs).scoreDocs;
assertEquals("up to med", 1+ medId-minId, result.length); assertEquals("up to med", 1+ medId-minId, result.length);
// unbounded id // unbounded id
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",null,null,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",null,null,T,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length); assertEquals("find all", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,null,T,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,null,T,F), numDocs).scoreDocs;
assertEquals("min and up", numDocs, result.length); assertEquals("min and up", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",null,maxIdO,F,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",null,maxIdO,F,T), numDocs).scoreDocs;
assertEquals("max and down", numDocs, result.length); assertEquals("max and down", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,null,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,null,F,F), numDocs).scoreDocs;
assertEquals("not min, but up", numDocs-1, result.length); assertEquals("not min, but up", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",null,maxIdO,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",null,maxIdO,F,F), numDocs).scoreDocs;
assertEquals("not max, but down", numDocs-1, result.length); assertEquals("not max, but down", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",medIdO,maxIdO,T,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",medIdO,maxIdO,T,F), numDocs).scoreDocs;
assertEquals("med and up, not max", maxId-medId, result.length); assertEquals("med and up, not max", maxId-medId, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,medIdO,F,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,medIdO,F,T), numDocs).scoreDocs;
assertEquals("not min, up to med", medId-minId, result.length); assertEquals("not min, up to med", medId-minId, result.length);
// very small sets // very small sets
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,minIdO,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,minIdO,F,F), numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length); assertEquals("min,min,F,F", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",medIdO,medIdO,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",medIdO,medIdO,F,F), numDocs).scoreDocs;
assertEquals("med,med,F,F", 0, result.length); assertEquals("med,med,F,F", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",maxIdO,maxIdO,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",maxIdO,maxIdO,F,F), numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length); assertEquals("max,max,F,F", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",minIdO,minIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",minIdO,minIdO,T,T), numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length); assertEquals("min,min,T,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",null,minIdO,F,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",null,minIdO,F,T), numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length); assertEquals("nul,min,F,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",maxIdO,maxIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",maxIdO,maxIdO,T,T), numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length); assertEquals("max,max,T,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",maxIdO,null,T,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",maxIdO,null,T,F), numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length); assertEquals("max,nul,T,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",medIdO,medIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",medIdO,medIdO,T,T), numDocs).scoreDocs;
assertEquals("med,med,T,T", 1, result.length); assertEquals("med,med,T,T", 1, result.length);
// special cases // special cases
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",Integer.valueOf(Integer.MAX_VALUE),null,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",Integer.valueOf(Integer.MAX_VALUE),null,F,F), numDocs).scoreDocs;
assertEquals("overflow special case", 0, result.length); assertEquals("overflow special case", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",null,Integer.valueOf(Integer.MIN_VALUE),F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",null,Integer.valueOf(Integer.MIN_VALUE),F,F), numDocs).scoreDocs;
assertEquals("overflow special case", 0, result.length); assertEquals("overflow special case", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",maxIdO,minIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",maxIdO,minIdO,T,T), numDocs).scoreDocs;
assertEquals("inverse range", 0, result.length); assertEquals("inverse range", 0, result.length);
} }
@ -389,75 +299,75 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
// test id, bounded on both ends // test id, bounded on both ends
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,maxIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,maxIdO,T,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length); assertEquals("find all", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,maxIdO,T,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,maxIdO,T,F), numDocs).scoreDocs;
assertEquals("all but last", numDocs-1, result.length); assertEquals("all but last", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,maxIdO,F,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,maxIdO,F,T), numDocs).scoreDocs;
assertEquals("all but first", numDocs-1, result.length); assertEquals("all but first", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,maxIdO,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,maxIdO,F,F), numDocs).scoreDocs;
assertEquals("all but ends", numDocs-2, result.length); assertEquals("all but ends", numDocs-2, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",medIdO,maxIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",medIdO,maxIdO,T,T), numDocs).scoreDocs;
assertEquals("med and up", 1+ maxId-medId, result.length); assertEquals("med and up", 1+ maxId-medId, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,medIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,medIdO,T,T), numDocs).scoreDocs;
assertEquals("up to med", 1+ medId-minId, result.length); assertEquals("up to med", 1+ medId-minId, result.length);
// unbounded id // unbounded id
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",null,null,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",null,null,T,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length); assertEquals("find all", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,null,T,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,null,T,F), numDocs).scoreDocs;
assertEquals("min and up", numDocs, result.length); assertEquals("min and up", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",null,maxIdO,F,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",null,maxIdO,F,T), numDocs).scoreDocs;
assertEquals("max and down", numDocs, result.length); assertEquals("max and down", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,null,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,null,F,F), numDocs).scoreDocs;
assertEquals("not min, but up", numDocs-1, result.length); assertEquals("not min, but up", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",null,maxIdO,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",null,maxIdO,F,F), numDocs).scoreDocs;
assertEquals("not max, but down", numDocs-1, result.length); assertEquals("not max, but down", numDocs-1, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",medIdO,maxIdO,T,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",medIdO,maxIdO,T,F), numDocs).scoreDocs;
assertEquals("med and up, not max", maxId-medId, result.length); assertEquals("med and up, not max", maxId-medId, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,medIdO,F,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,medIdO,F,T), numDocs).scoreDocs;
assertEquals("not min, up to med", medId-minId, result.length); assertEquals("not min, up to med", medId-minId, result.length);
// very small sets // very small sets
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,minIdO,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,minIdO,F,F), numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length); assertEquals("min,min,F,F", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",medIdO,medIdO,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",medIdO,medIdO,F,F), numDocs).scoreDocs;
assertEquals("med,med,F,F", 0, result.length); assertEquals("med,med,F,F", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",maxIdO,maxIdO,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",maxIdO,maxIdO,F,F), numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length); assertEquals("max,max,F,F", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",minIdO,minIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",minIdO,minIdO,T,T), numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length); assertEquals("min,min,T,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",null,minIdO,F,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",null,minIdO,F,T), numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length); assertEquals("nul,min,F,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",maxIdO,maxIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",maxIdO,maxIdO,T,T), numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length); assertEquals("max,max,T,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",maxIdO,null,T,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",maxIdO,null,T,F), numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length); assertEquals("max,nul,T,T", 1, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",medIdO,medIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",medIdO,medIdO,T,T), numDocs).scoreDocs;
assertEquals("med,med,T,T", 1, result.length); assertEquals("med,med,T,T", 1, result.length);
// special cases // special cases
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",Long.valueOf(Long.MAX_VALUE),null,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",Long.valueOf(Long.MAX_VALUE),null,F,F), numDocs).scoreDocs;
assertEquals("overflow special case", 0, result.length); assertEquals("overflow special case", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",null,Long.valueOf(Long.MIN_VALUE),F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",null,Long.valueOf(Long.MIN_VALUE),F,F), numDocs).scoreDocs;
assertEquals("overflow special case", 0, result.length); assertEquals("overflow special case", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",maxIdO,minIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newLongRange("id_long",maxIdO,minIdO,T,T), numDocs).scoreDocs;
assertEquals("inverse range", 0, result.length); assertEquals("inverse range", 0, result.length);
} }
@ -476,19 +386,19 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
ScoreDoc[] result; ScoreDoc[] result;
Query q = new TermQuery(new Term("body","body")); Query q = new TermQuery(new Term("body","body"));
result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",minIdO,medIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",minIdO,medIdO,T,T), numDocs).scoreDocs;
assertEquals("find all", numDocs/2, result.length); assertEquals("find all", numDocs/2, result.length);
int count = 0; int count = 0;
result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",null,medIdO,F,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",null,medIdO,F,T), numDocs).scoreDocs;
count += result.length; count += result.length;
result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",medIdO,null,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",medIdO,null,F,F), numDocs).scoreDocs;
count += result.length; count += result.length;
assertEquals("sum of two concenatted ranges", numDocs, count); assertEquals("sum of two concenatted ranges", numDocs, count);
result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",null,null,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",null,null,T,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length); assertEquals("find all", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",Float.valueOf(Float.POSITIVE_INFINITY),null,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",Float.valueOf(Float.POSITIVE_INFINITY),null,F,F), numDocs).scoreDocs;
assertEquals("infinity special case", 0, result.length); assertEquals("infinity special case", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",null,Float.valueOf(Float.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newFloatRange("id_float",null,Float.valueOf(Float.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs;
assertEquals("infinity special case", 0, result.length); assertEquals("infinity special case", 0, result.length);
} }
@ -505,19 +415,19 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
ScoreDoc[] result; ScoreDoc[] result;
Query q = new TermQuery(new Term("body","body")); Query q = new TermQuery(new Term("body","body"));
result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",minIdO,medIdO,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",minIdO,medIdO,T,T), numDocs).scoreDocs;
assertEquals("find all", numDocs/2, result.length); assertEquals("find all", numDocs/2, result.length);
int count = 0; int count = 0;
result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",null,medIdO,F,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",null,medIdO,F,T), numDocs).scoreDocs;
count += result.length; count += result.length;
result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",medIdO,null,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",medIdO,null,F,F), numDocs).scoreDocs;
count += result.length; count += result.length;
assertEquals("sum of two concenatted ranges", numDocs, count); assertEquals("sum of two concenatted ranges", numDocs, count);
result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",null,null,T,T), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",null,null,T,T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length); assertEquals("find all", numDocs, result.length);
result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",Double.valueOf(Double.POSITIVE_INFINITY),null,F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",Double.valueOf(Double.POSITIVE_INFINITY),null,F,F), numDocs).scoreDocs;
assertEquals("infinity special case", 0, result.length); assertEquals("infinity special case", 0, result.length);
result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",null, Double.valueOf(Double.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id_double",null, Double.valueOf(Double.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs;
assertEquals("infinity special case", 0, result.length); assertEquals("infinity special case", 0, result.length);
} }
@ -529,13 +439,15 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
for (int d = -20; d <= 20; d++) { for (int d = -20; d <= 20; d++) {
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("id", Integer.toString(d), Field.Store.NO)); doc.add(new IntField("id_int", d, Field.Store.NO));
doc.add(newStringField("body", "body", Field.Store.NO)); doc.add(newStringField("body", "body", Field.Store.NO));
writer.addDocument(doc); writer.addDocument(doc);
} }
writer.forceMerge(1); writer.forceMerge(1);
writer.deleteDocuments(new Term("id","0")); BytesRef term0 = new BytesRef();
NumericUtils.intToPrefixCoded(0, 0, term0);
writer.deleteDocuments(new Term("id_int", term0));
writer.close(); writer.close();
IndexReader reader = DirectoryReader.open(dir); IndexReader reader = DirectoryReader.open(dir);
@ -545,19 +457,19 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
ScoreDoc[] result; ScoreDoc[] result;
Query q = new TermQuery(new Term("body","body")); Query q = new TermQuery(new Term("body","body"));
result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) -20),Byte.valueOf((byte) 20),T,T), 100).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",-20,20,T,T), 100).scoreDocs;
assertEquals("find all", 40, result.length); assertEquals("find all", 40, result.length);
result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) 0),Byte.valueOf((byte) 20),T,T), 100).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",0,20,T,T), 100).scoreDocs;
assertEquals("find all", 20, result.length); assertEquals("find all", 20, result.length);
result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) -20),Byte.valueOf((byte) 0),T,T), 100).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",-20,0,T,T), 100).scoreDocs;
assertEquals("find all", 20, result.length); assertEquals("find all", 20, result.length);
result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) 10),Byte.valueOf((byte) 20),T,T), 100).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",10,20,T,T), 100).scoreDocs;
assertEquals("find all", 11, result.length); assertEquals("find all", 11, result.length);
result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) -20),Byte.valueOf((byte) -10),T,T), 100).scoreDocs; result = search.search(q,FieldCacheRangeFilter.newIntRange("id_int",-20,-10,T,T), 100).scoreDocs;
assertEquals("find all", 11, result.length); assertEquals("find all", 11, result.length);
reader.close(); reader.close();
dir.close(); dir.close();

View File

@ -111,8 +111,6 @@ public class TestSearchAfter extends LuceneTestCase {
assertQuery(query, filter, Sort.INDEXORDER); assertQuery(query, filter, Sort.INDEXORDER);
for(int rev=0;rev<2;rev++) { for(int rev=0;rev<2;rev++) {
boolean reversed = rev == 1; boolean reversed = rev == 1;
assertQuery(query, filter, new Sort(new SortField[] {new SortField("byte", SortField.Type.BYTE, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("short", SortField.Type.SHORT, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("int", SortField.Type.INT, reversed)})); assertQuery(query, filter, new Sort(new SortField[] {new SortField("int", SortField.Type.INT, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("long", SortField.Type.LONG, reversed)})); assertQuery(query, filter, new Sort(new SortField[] {new SortField("long", SortField.Type.LONG, reversed)}));
assertQuery(query, filter, new Sort(new SortField[] {new SortField("float", SortField.Type.FLOAT, reversed)})); assertQuery(query, filter, new Sort(new SortField[] {new SortField("float", SortField.Type.FLOAT, reversed)}));

View File

@ -219,7 +219,7 @@ public class TestShardSearching extends ShardSearchingTestBase {
//sort = new Sort(SortField.FIELD_DOC); //sort = new Sort(SortField.FIELD_DOC);
sort = null; sort = null;
} else if (what == 2) { } else if (what == 2) {
sort = new Sort(new SortField[] {new SortField("docid", SortField.Type.INT, random().nextBoolean())}); sort = new Sort(new SortField[] {new SortField("docid_int", SortField.Type.INT, random().nextBoolean())});
} else { } else {
sort = new Sort(new SortField[] {new SortField("title", SortField.Type.STRING, random().nextBoolean())}); sort = new Sort(new SortField[] {new SortField("title", SortField.Type.STRING, random().nextBoolean())});
} }

View File

@ -24,7 +24,11 @@ import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
@ -329,259 +333,19 @@ public class TestSort extends LuceneTestCase {
ir.close(); ir.close();
dir.close(); dir.close();
} }
/** Tests sorting on type byte */
public void testByte() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newStringField("value", "23", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("value", SortField.Type.BYTE));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// numeric order
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("23", searcher.doc(td.scoreDocs[2].doc).get("value"));
ir.close();
dir.close();
}
/** Tests sorting on type byte with a missing value */
public void testByteMissing() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("value", SortField.Type.BYTE));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// null value is treated as a 0
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value"));
ir.close();
dir.close();
}
/** Tests sorting on type byte, specifying the missing value should be treated as Byte.MAX_VALUE */
public void testByteMissingLast() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
SortField sortField = new SortField("value", SortField.Type.BYTE);
sortField.setMissingValue(Byte.MAX_VALUE);
Sort sort = new Sort(sortField);
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// null value is treated Byte.MAX_VALUE
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
ir.close();
dir.close();
}
/** Tests sorting on type byte in reverse */
public void testByteReverse() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newStringField("value", "23", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("value", SortField.Type.BYTE, true));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// reverse numeric order
assertEquals("23", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
ir.close();
dir.close();
}
/** Tests sorting on type short */
public void testShort() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newStringField("value", "300", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("value", SortField.Type.SHORT));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// numeric order
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("300", searcher.doc(td.scoreDocs[2].doc).get("value"));
ir.close();
dir.close();
}
/** Tests sorting on type short with a missing value */
public void testShortMissing() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("value", SortField.Type.SHORT));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// null is treated as a 0
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value"));
ir.close();
dir.close();
}
/** Tests sorting on type short, specifying the missing value should be treated as Short.MAX_VALUE */
public void testShortMissingLast() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
SortField sortField = new SortField("value", SortField.Type.SHORT);
sortField.setMissingValue(Short.MAX_VALUE);
Sort sort = new Sort(sortField);
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// null is treated as Short.MAX_VALUE
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertNull(searcher.doc(td.scoreDocs[2].doc).get("value"));
ir.close();
dir.close();
}
/** Tests sorting on type short in reverse */
public void testShortReverse() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newStringField("value", "300", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("value", SortField.Type.SHORT, true));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// reverse numeric order
assertEquals("300", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
ir.close();
dir.close();
}
/** Tests sorting on type int */ /** Tests sorting on type int */
public void testInt() throws IOException { public void testInt() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir); RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("value", "300000", Field.Store.YES)); doc.add(new IntField("value", 300000, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES)); doc.add(new IntField("value", -1, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES)); doc.add(new IntField("value", 4, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -607,10 +371,10 @@ public class TestSort extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES)); doc.add(new IntField("value", -1, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES)); doc.add(new IntField("value", 4, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -636,10 +400,10 @@ public class TestSort extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES)); doc.add(new IntField("value", -1, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES)); doc.add(new IntField("value", 4, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -665,13 +429,13 @@ public class TestSort extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir); RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("value", "300000", Field.Store.YES)); doc.add(new IntField("value", 300000, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES)); doc.add(new IntField("value", -1, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES)); doc.add(new IntField("value", 4, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -695,13 +459,13 @@ public class TestSort extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir); RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("value", "3000000000", Field.Store.YES)); doc.add(new LongField("value", 3000000000L, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES)); doc.add(new LongField("value", -1, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES)); doc.add(new LongField("value", 4, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -727,10 +491,10 @@ public class TestSort extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES)); doc.add(new LongField("value", -1, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES)); doc.add(new LongField("value", 4, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -756,10 +520,10 @@ public class TestSort extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES)); doc.add(new LongField("value", -1, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES)); doc.add(new LongField("value", 4, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -785,13 +549,13 @@ public class TestSort extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir); RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("value", "3000000000", Field.Store.YES)); doc.add(new LongField("value", 3000000000L, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1", Field.Store.YES)); doc.add(new LongField("value", -1, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4", Field.Store.YES)); doc.add(new LongField("value", 4, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -815,13 +579,13 @@ public class TestSort extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir); RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("value", "30.1", Field.Store.YES)); doc.add(new FloatField("value", 30.1f, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1.3", Field.Store.YES)); doc.add(new FloatField("value", -1.3f, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2", Field.Store.YES)); doc.add(new FloatField("value", 4.2f, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -847,10 +611,10 @@ public class TestSort extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1.3", Field.Store.YES)); doc.add(new FloatField("value", -1.3f, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2", Field.Store.YES)); doc.add(new FloatField("value", 4.2f, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -876,10 +640,10 @@ public class TestSort extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1.3", Field.Store.YES)); doc.add(new FloatField("value", -1.3f, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2", Field.Store.YES)); doc.add(new FloatField("value", 4.2f, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -905,13 +669,13 @@ public class TestSort extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir); RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("value", "30.1", Field.Store.YES)); doc.add(new FloatField("value", 30.1f, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1.3", Field.Store.YES)); doc.add(new FloatField("value", -1.3f, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2", Field.Store.YES)); doc.add(new FloatField("value", 4.2f, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -935,16 +699,16 @@ public class TestSort extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir); RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("value", "30.1", Field.Store.YES)); doc.add(new DoubleField("value", 30.1, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1.3", Field.Store.YES)); doc.add(new DoubleField("value", -1.3, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2333333333333", Field.Store.YES)); doc.add(new DoubleField("value", 4.2333333333333, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2333333333332", Field.Store.YES)); doc.add(new DoubleField("value", 4.2333333333332, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -969,10 +733,10 @@ public class TestSort extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir); RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("value", "+0", Field.Store.YES)); doc.add(new DoubleField("value", +0d, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-0", Field.Store.YES)); doc.add(new DoubleField("value", -0d, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
@ -984,8 +748,13 @@ public class TestSort extends LuceneTestCase {
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(2, td.totalHits); assertEquals(2, td.totalHits);
// numeric order // numeric order
assertEquals("-0", searcher.doc(td.scoreDocs[0].doc).get("value")); double v0 = searcher.doc(td.scoreDocs[0].doc).getField("value").numericValue().doubleValue();
assertEquals("+0", searcher.doc(td.scoreDocs[1].doc).get("value")); double v1 = searcher.doc(td.scoreDocs[1].doc).getField("value").numericValue().doubleValue();
assertEquals(0, v0, 0d);
assertEquals(0, v1, 0d);
// check sign bits
assertEquals(1, Double.doubleToLongBits(v0) >>> 63);
assertEquals(0, Double.doubleToLongBits(v1) >>> 63);
ir.close(); ir.close();
dir.close(); dir.close();
@ -998,13 +767,13 @@ public class TestSort extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1.3", Field.Store.YES)); doc.add(new DoubleField("value", -1.3, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2333333333333", Field.Store.YES)); doc.add(new DoubleField("value", 4.2333333333333, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2333333333332", Field.Store.YES)); doc.add(new DoubleField("value", 4.2333333333332, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -1031,13 +800,13 @@ public class TestSort extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1.3", Field.Store.YES)); doc.add(new DoubleField("value", -1.3, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2333333333333", Field.Store.YES)); doc.add(new DoubleField("value", 4.2333333333333, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2333333333332", Field.Store.YES)); doc.add(new DoubleField("value", 4.2333333333332, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -1064,16 +833,16 @@ public class TestSort extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir); RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("value", "30.1", Field.Store.YES)); doc.add(new DoubleField("value", 30.1, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "-1.3", Field.Store.YES)); doc.add(new DoubleField("value", -1.3, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2333333333333", Field.Store.YES)); doc.add(new DoubleField("value", 4.2333333333333, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(newStringField("value", "4.2333333333332", Field.Store.YES)); doc.add(new DoubleField("value", 4.2333333333332, Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader ir = writer.getReader(); IndexReader ir = writer.getReader();
writer.close(); writer.close();
@ -1150,7 +919,7 @@ public class TestSort extends LuceneTestCase {
for(int seg=0;seg<2;seg++) { for(int seg=0;seg<2;seg++) {
for(int docIDX=0;docIDX<10;docIDX++) { for(int docIDX=0;docIDX<10;docIDX++) {
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("id", ""+docIDX, Field.Store.YES)); doc.add(new IntField("id", docIDX, Field.Store.YES));
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
for(int i=0;i<id;i++) { for(int i=0;i<id;i++) {
sb.append(' '); sb.append(' ');
@ -1250,94 +1019,6 @@ public class TestSort extends LuceneTestCase {
dir.close(); dir.close();
} }
/**
* test sorts for a custom byte parser that uses a simple char encoding
*/
public void testCustomByteParser() throws Exception {
List<String> letters = Arrays.asList(new String[] { "A", "B", "C", "D", "E", "F", "G", "H", "I", "J" });
Collections.shuffle(letters, random());
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
for (String letter : letters) {
Document doc = new Document();
doc.add(newStringField("parser", letter, Field.Store.YES));
iw.addDocument(doc);
}
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("parser", new FieldCache.ByteParser() {
@Override
public byte parseByte(BytesRef term) {
return (byte) (term.bytes[term.offset]-'A');
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
}), SortField.FIELD_DOC );
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
// results should be in alphabetical order
assertEquals(10, td.totalHits);
Collections.sort(letters);
for (int i = 0; i < letters.size(); i++) {
assertEquals(letters.get(i), searcher.doc(td.scoreDocs[i].doc).get("parser"));
}
ir.close();
dir.close();
}
/**
* test sorts for a custom short parser that uses a simple char encoding
*/
public void testCustomShortParser() throws Exception {
List<String> letters = Arrays.asList(new String[] { "A", "B", "C", "D", "E", "F", "G", "H", "I", "J" });
Collections.shuffle(letters, random());
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
for (String letter : letters) {
Document doc = new Document();
doc.add(newStringField("parser", letter, Field.Store.YES));
iw.addDocument(doc);
}
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("parser", new FieldCache.ShortParser() {
@Override
public short parseShort(BytesRef term) {
return (short) (term.bytes[term.offset]-'A');
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
}), SortField.FIELD_DOC );
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
// results should be in alphabetical order
assertEquals(10, td.totalHits);
Collections.sort(letters);
for (int i = 0; i < letters.size(); i++) {
assertEquals(letters.get(i), searcher.doc(td.scoreDocs[i].doc).get("parser"));
}
ir.close();
dir.close();
}
/** /**
* test sorts for a custom long parser that uses a simple char encoding * test sorts for a custom long parser that uses a simple char encoding
*/ */

View File

@ -223,142 +223,6 @@ public class TestSortDocValues extends LuceneTestCase {
dir.close(); dir.close();
} }
/** Tests sorting on type byte */
public void testByte() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new NumericDocValuesField("value", 23));
doc.add(newStringField("value", "23", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new NumericDocValuesField("value", -1));
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new NumericDocValuesField("value", 4));
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("value", SortField.Type.BYTE));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// numeric order
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("23", searcher.doc(td.scoreDocs[2].doc).get("value"));
assertNoFieldCaches();
ir.close();
dir.close();
}
/** Tests sorting on type byte in reverse */
public void testByteReverse() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new NumericDocValuesField("value", 23));
doc.add(newStringField("value", "23", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new NumericDocValuesField("value", -1));
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new NumericDocValuesField("value", 4));
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("value", SortField.Type.BYTE, true));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// reverse numeric order
assertEquals("23", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
assertNoFieldCaches();
ir.close();
dir.close();
}
/** Tests sorting on type short */
public void testShort() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new NumericDocValuesField("value", 300));
doc.add(newStringField("value", "300", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new NumericDocValuesField("value", -1));
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new NumericDocValuesField("value", 4));
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("value", SortField.Type.SHORT));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// numeric order
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("300", searcher.doc(td.scoreDocs[2].doc).get("value"));
assertNoFieldCaches();
ir.close();
dir.close();
}
/** Tests sorting on type short in reverse */
public void testShortReverse() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new NumericDocValuesField("value", 300));
doc.add(newStringField("value", "300", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new NumericDocValuesField("value", -1));
doc.add(newStringField("value", "-1", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new NumericDocValuesField("value", 4));
doc.add(newStringField("value", "4", Field.Store.YES));
writer.addDocument(doc);
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Sort sort = new Sort(new SortField("value", SortField.Type.SHORT, true));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// reverse numeric order
assertEquals("300", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("-1", searcher.doc(td.scoreDocs[2].doc).get("value"));
assertNoFieldCaches();
ir.close();
dir.close();
}
/** Tests sorting on type int */ /** Tests sorting on type int */
public void testInt() throws IOException { public void testInt() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();

View File

@ -16,21 +16,25 @@ package org.apache.lucene.util;
* limitations under the License. * limitations under the License.
*/ */
import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.search.FieldCache; import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.LongField;
import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FieldCacheSanityChecker.Insanity; import org.apache.lucene.util.FieldCacheSanityChecker.Insanity;
import org.apache.lucene.util.FieldCacheSanityChecker.InsanityType; import org.apache.lucene.util.FieldCacheSanityChecker.InsanityType;
import java.io.IOException;
public class TestFieldCacheSanityChecker extends LuceneTestCase { public class TestFieldCacheSanityChecker extends LuceneTestCase {
protected AtomicReader readerA; protected AtomicReader readerA;
@ -51,18 +55,14 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
long theLong = Long.MAX_VALUE; long theLong = Long.MAX_VALUE;
double theDouble = Double.MAX_VALUE; double theDouble = Double.MAX_VALUE;
byte theByte = Byte.MAX_VALUE;
short theShort = Short.MAX_VALUE;
int theInt = Integer.MAX_VALUE; int theInt = Integer.MAX_VALUE;
float theFloat = Float.MAX_VALUE; float theFloat = Float.MAX_VALUE;
for (int i = 0; i < NUM_DOCS; i++){ for (int i = 0; i < NUM_DOCS; i++){
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("theLong", String.valueOf(theLong--), Field.Store.NO)); doc.add(new LongField("theLong", theLong--, Field.Store.NO));
doc.add(newStringField("theDouble", String.valueOf(theDouble--), Field.Store.NO)); doc.add(new DoubleField("theDouble", theDouble--, Field.Store.NO));
doc.add(newStringField("theByte", String.valueOf(theByte--), Field.Store.NO)); doc.add(new IntField("theInt", theInt--, Field.Store.NO));
doc.add(newStringField("theShort", String.valueOf(theShort--), Field.Store.NO)); doc.add(new FloatField("theFloat", theFloat--, Field.Store.NO));
doc.add(newStringField("theInt", String.valueOf(theInt--), Field.Store.NO));
doc.add(newStringField("theFloat", String.valueOf(theFloat--), Field.Store.NO));
if (0 == i % 3) { if (0 == i % 3) {
wA.addDocument(doc); wA.addDocument(doc);
} else { } else {
@ -95,12 +95,12 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
cache.purgeAllCaches(); cache.purgeAllCaches();
cache.getDoubles(readerA, "theDouble", false); cache.getDoubles(readerA, "theDouble", false);
cache.getDoubles(readerA, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false); cache.getDoubles(readerA, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false);
cache.getDoubles(readerAclone, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false); cache.getDoubles(readerAclone, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false);
cache.getDoubles(readerB, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false); cache.getDoubles(readerB, "theDouble", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false);
cache.getInts(readerX, "theInt", false); cache.getInts(readerX, "theInt", false);
cache.getInts(readerX, "theInt", FieldCache.DEFAULT_INT_PARSER, false); cache.getInts(readerX, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
// // // // // //
@ -119,9 +119,8 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
FieldCache cache = FieldCache.DEFAULT; FieldCache cache = FieldCache.DEFAULT;
cache.purgeAllCaches(); cache.purgeAllCaches();
cache.getInts(readerX, "theInt", FieldCache.DEFAULT_INT_PARSER, false); cache.getInts(readerX, "theInt", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
cache.getTerms(readerX, "theInt"); cache.getTerms(readerX, "theInt");
cache.getBytes(readerX, "theByte", false);
// // // // // //
@ -147,8 +146,6 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
cache.getTerms(readerB, "theInt"); cache.getTerms(readerB, "theInt");
cache.getTerms(readerX, "theInt"); cache.getTerms(readerX, "theInt");
cache.getBytes(readerX, "theByte", false);
// // // // // //

View File

@ -650,8 +650,11 @@ public class TestPackedInts extends LuceneTestCase {
wrt.set(99, (1 << 23) - 1); wrt.set(99, (1 << 23) - 1);
assertEquals(1 << 10, wrt.get(valueCount - 1)); assertEquals(1 << 10, wrt.get(valueCount - 1));
wrt.set(1, Long.MAX_VALUE); wrt.set(1, Long.MAX_VALUE);
wrt.set(2, -3);
assertEquals(64, wrt.getBitsPerValue());
assertEquals(1 << 10, wrt.get(valueCount - 1)); assertEquals(1 << 10, wrt.get(valueCount - 1));
assertEquals(Long.MAX_VALUE, wrt.get(1)); assertEquals(Long.MAX_VALUE, wrt.get(1));
assertEquals(-3L, wrt.get(2));
assertEquals(2, wrt.get(4)); assertEquals(2, wrt.get(4));
assertEquals((1 << 23) - 1, wrt.get(99)); assertEquals((1 << 23) - 1, wrt.get(99));
assertEquals(10, wrt.get(7)); assertEquals(10, wrt.get(7));

View File

@ -78,7 +78,7 @@ public class AllGroupHeadsCollectorTest extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
addGroupField(doc, groupField, "author1", valueType); addGroupField(doc, groupField, "author1", valueType);
doc.add(newTextField("content", "random text", Field.Store.NO)); doc.add(newTextField("content", "random text", Field.Store.NO));
doc.add(newStringField("id_1", "1", Field.Store.NO)); doc.add(new IntField("id_1", 1, Field.Store.NO));
doc.add(newStringField("id_2", "1", Field.Store.NO)); doc.add(newStringField("id_2", "1", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
@ -86,7 +86,7 @@ public class AllGroupHeadsCollectorTest extends LuceneTestCase {
doc = new Document(); doc = new Document();
addGroupField(doc, groupField, "author1", valueType); addGroupField(doc, groupField, "author1", valueType);
doc.add(newTextField("content", "some more random text blob", Field.Store.NO)); doc.add(newTextField("content", "some more random text blob", Field.Store.NO));
doc.add(newStringField("id_1", "2", Field.Store.NO)); doc.add(new IntField("id_1", 2, Field.Store.NO));
doc.add(newStringField("id_2", "2", Field.Store.NO)); doc.add(newStringField("id_2", "2", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
@ -94,7 +94,7 @@ public class AllGroupHeadsCollectorTest extends LuceneTestCase {
doc = new Document(); doc = new Document();
addGroupField(doc, groupField, "author1", valueType); addGroupField(doc, groupField, "author1", valueType);
doc.add(newTextField("content", "some more random textual data", Field.Store.NO)); doc.add(newTextField("content", "some more random textual data", Field.Store.NO));
doc.add(newStringField("id_1", "3", Field.Store.NO)); doc.add(new IntField("id_1", 3, Field.Store.NO));
doc.add(newStringField("id_2", "3", Field.Store.NO)); doc.add(newStringField("id_2", "3", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
w.commit(); // To ensure a second segment w.commit(); // To ensure a second segment
@ -103,7 +103,7 @@ public class AllGroupHeadsCollectorTest extends LuceneTestCase {
doc = new Document(); doc = new Document();
addGroupField(doc, groupField, "author2", valueType); addGroupField(doc, groupField, "author2", valueType);
doc.add(newTextField("content", "some random text", Field.Store.NO)); doc.add(newTextField("content", "some random text", Field.Store.NO));
doc.add(newStringField("id_1", "4", Field.Store.NO)); doc.add(new IntField("id_1", 4, Field.Store.NO));
doc.add(newStringField("id_2", "4", Field.Store.NO)); doc.add(newStringField("id_2", "4", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
@ -111,7 +111,7 @@ public class AllGroupHeadsCollectorTest extends LuceneTestCase {
doc = new Document(); doc = new Document();
addGroupField(doc, groupField, "author3", valueType); addGroupField(doc, groupField, "author3", valueType);
doc.add(newTextField("content", "some more random text", Field.Store.NO)); doc.add(newTextField("content", "some more random text", Field.Store.NO));
doc.add(newStringField("id_1", "5", Field.Store.NO)); doc.add(new IntField("id_1", 5, Field.Store.NO));
doc.add(newStringField("id_2", "5", Field.Store.NO)); doc.add(newStringField("id_2", "5", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
@ -119,21 +119,21 @@ public class AllGroupHeadsCollectorTest extends LuceneTestCase {
doc = new Document(); doc = new Document();
addGroupField(doc, groupField, "author3", valueType); addGroupField(doc, groupField, "author3", valueType);
doc.add(newTextField("content", "random blob", Field.Store.NO)); doc.add(newTextField("content", "random blob", Field.Store.NO));
doc.add(newStringField("id_1", "6", Field.Store.NO)); doc.add(new IntField("id_1", 6, Field.Store.NO));
doc.add(newStringField("id_2", "6", Field.Store.NO)); doc.add(newStringField("id_2", "6", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
// 6 -- no author field // 6 -- no author field
doc = new Document(); doc = new Document();
doc.add(newTextField("content", "random word stuck in alot of other text", Field.Store.NO)); doc.add(newTextField("content", "random word stuck in alot of other text", Field.Store.NO));
doc.add(newStringField("id_1", "6", Field.Store.NO)); doc.add(new IntField("id_1", 6, Field.Store.NO));
doc.add(newStringField("id_2", "6", Field.Store.NO)); doc.add(newStringField("id_2", "6", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
// 7 -- no author field // 7 -- no author field
doc = new Document(); doc = new Document();
doc.add(newTextField("content", "random word stuck in alot of other text", Field.Store.NO)); doc.add(newTextField("content", "random word stuck in alot of other text", Field.Store.NO));
doc.add(newStringField("id_1", "7", Field.Store.NO)); doc.add(new IntField("id_1", 7, Field.Store.NO));
doc.add(newStringField("id_2", "7", Field.Store.NO)); doc.add(newStringField("id_2", "7", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);

View File

@ -459,7 +459,7 @@ public class TestBlockJoin extends LuceneTestCase {
for(int parentDocID=0;parentDocID<numParentDocs;parentDocID++) { for(int parentDocID=0;parentDocID<numParentDocs;parentDocID++) {
Document parentDoc = new Document(); Document parentDoc = new Document();
Document parentJoinDoc = new Document(); Document parentJoinDoc = new Document();
Field id = newStringField("parentID", ""+parentDocID, Field.Store.YES); Field id = new IntField("parentID", parentDocID, Field.Store.YES);
parentDoc.add(id); parentDoc.add(id);
parentJoinDoc.add(id); parentJoinDoc.add(id);
parentJoinDoc.add(newStringField("isParent", "x", Field.Store.NO)); parentJoinDoc.add(newStringField("isParent", "x", Field.Store.NO));
@ -472,8 +472,8 @@ public class TestBlockJoin extends LuceneTestCase {
} }
if (doDeletes) { if (doDeletes) {
parentDoc.add(newStringField("blockID", ""+parentDocID, Field.Store.NO)); parentDoc.add(new IntField("blockID", parentDocID, Field.Store.NO));
parentJoinDoc.add(newStringField("blockID", ""+parentDocID, Field.Store.NO)); parentJoinDoc.add(new IntField("blockID", parentDocID, Field.Store.NO));
} }
final List<Document> joinDocs = new ArrayList<>(); final List<Document> joinDocs = new ArrayList<>();
@ -497,7 +497,7 @@ public class TestBlockJoin extends LuceneTestCase {
Document joinChildDoc = new Document(); Document joinChildDoc = new Document();
joinDocs.add(joinChildDoc); joinDocs.add(joinChildDoc);
Field childID = newStringField("childID", ""+childDocID, Field.Store.YES); Field childID = new IntField("childID", childDocID, Field.Store.YES);
childDoc.add(childID); childDoc.add(childID);
joinChildDoc.add(childID); joinChildDoc.add(childID);
@ -522,7 +522,7 @@ public class TestBlockJoin extends LuceneTestCase {
} }
if (doDeletes) { if (doDeletes) {
joinChildDoc.add(newStringField("blockID", ""+parentDocID, Field.Store.NO)); joinChildDoc.add(new IntField("blockID", parentDocID, Field.Store.NO));
} }
w.addDocument(childDoc); w.addDocument(childDoc);
@ -541,8 +541,10 @@ public class TestBlockJoin extends LuceneTestCase {
if (VERBOSE) { if (VERBOSE) {
System.out.println("DELETE parentID=" + deleteID); System.out.println("DELETE parentID=" + deleteID);
} }
w.deleteDocuments(new Term("blockID", ""+deleteID)); BytesRef term = new BytesRef();
joinW.deleteDocuments(new Term("blockID", ""+deleteID)); NumericUtils.intToPrefixCodedBytes(deleteID, 0, term);
w.deleteDocuments(new Term("blockID", term));
joinW.deleteDocuments(new Term("blockID", term));
} }
final IndexReader r = w.getReader(); final IndexReader r = w.getReader();

View File

@ -1,121 +0,0 @@
package org.apache.lucene.queries.function.valuesource;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.search.FieldCache;
/**
* Obtains int field values from the {@link org.apache.lucene.search.FieldCache}
* using <code>getInts()</code>
* and makes those values available as other numeric types, casting as needed. *
*
*
*/
public class ByteFieldSource extends FieldCacheSource {
private final FieldCache.ByteParser parser;
public ByteFieldSource(String field) {
this(field, null);
}
public ByteFieldSource(String field, FieldCache.ByteParser parser) {
super(field);
this.parser = parser;
}
@Override
public String description() {
return "byte(" + field + ')';
}
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FieldCache.Bytes arr = cache.getBytes(readerContext.reader(), field, parser, false);
return new FunctionValues() {
@Override
public byte byteVal(int doc) {
return arr.get(doc);
}
@Override
public short shortVal(int doc) {
return (short) arr.get(doc);
}
@Override
public float floatVal(int doc) {
return (float) arr.get(doc);
}
@Override
public int intVal(int doc) {
return (int) arr.get(doc);
}
@Override
public long longVal(int doc) {
return (long) arr.get(doc);
}
@Override
public double doubleVal(int doc) {
return (double) arr.get(doc);
}
@Override
public String strVal(int doc) {
return Byte.toString(arr.get(doc));
}
@Override
public String toString(int doc) {
return description() + '=' + byteVal(doc);
}
@Override
public Object objectVal(int doc) {
return arr.get(doc); // TODO: valid?
}
};
}
@Override
public boolean equals(Object o) {
if (o.getClass() != ByteFieldSource.class) return false;
ByteFieldSource
other = (ByteFieldSource) o;
return super.equals(other)
&& (this.parser == null ? other.parser == null :
this.parser.getClass() == other.parser.getClass());
}
@Override
public int hashCode() {
int h = parser == null ? Byte.class.hashCode() : parser.getClass().hashCode();
h += super.hashCode();
return h;
}
}

View File

@ -1,114 +0,0 @@
package org.apache.lucene.queries.function.valuesource;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.search.FieldCache;
/**
* Obtains short field values from the {@link org.apache.lucene.search.FieldCache}
* using <code>getShorts()</code>
* and makes those values available as other numeric types, casting as needed.
**/
public class ShortFieldSource extends FieldCacheSource {
final FieldCache.ShortParser parser;
public ShortFieldSource(String field) {
this(field, null);
}
public ShortFieldSource(String field, FieldCache.ShortParser parser) {
super(field);
this.parser = parser;
}
@Override
public String description() {
return "short(" + field + ')';
}
@Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FieldCache.Shorts arr = cache.getShorts(readerContext.reader(), field, parser, false);
return new FunctionValues() {
@Override
public byte byteVal(int doc) {
return (byte) arr.get(doc);
}
@Override
public short shortVal(int doc) {
return arr.get(doc);
}
@Override
public float floatVal(int doc) {
return (float) arr.get(doc);
}
@Override
public int intVal(int doc) {
return (int) arr.get(doc);
}
@Override
public long longVal(int doc) {
return (long) arr.get(doc);
}
@Override
public double doubleVal(int doc) {
return (double) arr.get(doc);
}
@Override
public String strVal(int doc) {
return Short.toString(arr.get(doc));
}
@Override
public String toString(int doc) {
return description() + '=' + shortVal(doc);
}
};
}
@Override
public boolean equals(Object o) {
if (o.getClass() != ShortFieldSource.class) return false;
ShortFieldSource
other = (ShortFieldSource) o;
return super.equals(other)
&& (this.parser == null ? other.parser == null :
this.parser.getClass() == other.parser.getClass());
}
@Override
public int hashCode() {
int h = parser == null ? Short.class.hashCode() : parser.getClass().hashCode();
h += super.hashCode();
return h;
}
}

View File

@ -52,26 +52,6 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
createIndex(true); createIndex(true);
} }
/**
* Test that CustomScoreQuery of Type.BYTE returns the expected scores.
*/
@Test
public void testCustomScoreByte() throws Exception {
// INT field values are small enough to be parsed as byte
doTestCustomScore(BYTE_VALUESOURCE, 1.0);
doTestCustomScore(BYTE_VALUESOURCE, 2.0);
}
/**
* Test that CustomScoreQuery of Type.SHORT returns the expected scores.
*/
@Test
public void testCustomScoreShort() throws Exception {
// INT field values are small enough to be parsed as short
doTestCustomScore(SHORT_VALUESOURCE, 1.0);
doTestCustomScore(SHORT_VALUESOURCE, 3.0);
}
/** /**
* Test that CustomScoreQuery of Type.INT returns the expected scores. * Test that CustomScoreQuery of Type.INT returns the expected scores.
*/ */

View File

@ -1,18 +1,25 @@
package org.apache.lucene.queries.function; package org.apache.lucene.queries.function;
import java.io.IOException;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queries.function.valuesource.ByteFieldSource; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource; import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource; import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.ShortFieldSource; import org.apache.lucene.search.FieldCache;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -53,10 +60,21 @@ public abstract class FunctionTestSetup extends LuceneTestCase {
protected static final String INT_FIELD = "iii"; protected static final String INT_FIELD = "iii";
protected static final String FLOAT_FIELD = "fff"; protected static final String FLOAT_FIELD = "fff";
protected ValueSource BYTE_VALUESOURCE = new ByteFieldSource(INT_FIELD); private static final FieldCache.FloatParser CUSTOM_FLOAT_PARSER = new FieldCache.FloatParser() {
protected ValueSource SHORT_VALUESOURCE = new ShortFieldSource(INT_FIELD);
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return FieldCache.NUMERIC_UTILS_INT_PARSER.termsEnum(terms);
}
@Override
public float parseFloat(BytesRef term) {
return (float) FieldCache.NUMERIC_UTILS_INT_PARSER.parseInt(term);
}
};
protected ValueSource INT_VALUESOURCE = new IntFieldSource(INT_FIELD); protected ValueSource INT_VALUESOURCE = new IntFieldSource(INT_FIELD);
protected ValueSource INT_AS_FLOAT_VALUESOURCE = new FloatFieldSource(INT_FIELD); protected ValueSource INT_AS_FLOAT_VALUESOURCE = new FloatFieldSource(INT_FIELD, CUSTOM_FLOAT_PARSER);
protected ValueSource FLOAT_VALUESOURCE = new FloatFieldSource(FLOAT_FIELD); protected ValueSource FLOAT_VALUESOURCE = new FloatFieldSource(FLOAT_FIELD);
private static final String DOC_TEXT_LINES[] = { private static final String DOC_TEXT_LINES[] = {
@ -140,10 +158,10 @@ public abstract class FunctionTestSetup extends LuceneTestCase {
f = newField(TEXT_FIELD, "text of doc" + scoreAndID + textLine(i), customType2); // for regular search f = newField(TEXT_FIELD, "text of doc" + scoreAndID + textLine(i), customType2); // for regular search
d.add(f); d.add(f);
f = newField(INT_FIELD, "" + scoreAndID, customType); // for function scoring f = new IntField(INT_FIELD, scoreAndID, Store.YES); // for function scoring
d.add(f); d.add(f);
f = newField(FLOAT_FIELD, scoreAndID + ".000", customType); // for function scoring f = new FloatField(FLOAT_FIELD, scoreAndID, Store.YES); // for function scoring
d.add(f); d.add(f);
iw.addDocument(d); iw.addDocument(d);

View File

@ -19,12 +19,6 @@ package org.apache.lucene.queries.function;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.ByteFieldSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.ShortFieldSource;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryUtils; import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
@ -50,20 +44,6 @@ public class TestFieldScoreQuery extends FunctionTestSetup {
createIndex(true); createIndex(true);
} }
/** Test that FieldScoreQuery of Type.BYTE returns docs in expected order. */
@Test
public void testRankByte () throws Exception {
// INT field values are small enough to be parsed as byte
doTestRank(BYTE_VALUESOURCE);
}
/** Test that FieldScoreQuery of Type.SHORT returns docs in expected order. */
@Test
public void testRankShort () throws Exception {
// INT field values are small enough to be parsed as short
doTestRank(SHORT_VALUESOURCE);
}
/** Test that FieldScoreQuery of Type.INT returns docs in expected order. */ /** Test that FieldScoreQuery of Type.INT returns docs in expected order. */
@Test @Test
public void testRankInt () throws Exception { public void testRankInt () throws Exception {
@ -99,20 +79,6 @@ public class TestFieldScoreQuery extends FunctionTestSetup {
r.close(); r.close();
} }
/** Test that FieldScoreQuery of Type.BYTE returns the expected scores. */
@Test
public void testExactScoreByte () throws Exception {
// INT field values are small enough to be parsed as byte
doTestExactScore(BYTE_VALUESOURCE);
}
/** Test that FieldScoreQuery of Type.SHORT returns the expected scores. */
@Test
public void testExactScoreShort () throws Exception {
// INT field values are small enough to be parsed as short
doTestExactScore(SHORT_VALUESOURCE);
}
/** Test that FieldScoreQuery of Type.INT returns the expected scores. */ /** Test that FieldScoreQuery of Type.INT returns the expected scores. */
@Test @Test
public void testExactScoreInt () throws Exception { public void testExactScoreInt () throws Exception {

View File

@ -18,9 +18,10 @@ package org.apache.lucene.queries.function;
*/ */
import java.io.IOException; import java.io.IOException;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.IntField;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
@ -46,13 +47,13 @@ public class TestFunctionQuerySort extends LuceneTestCase {
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document(); Document doc = new Document();
Field field = new StringField("value", "", Field.Store.YES); Field field = new IntField("value", 0, Field.Store.YES);
doc.add(field); doc.add(field);
// Save docs unsorted (decreasing value n, n-1, ...) // Save docs unsorted (decreasing value n, n-1, ...)
final int NUM_VALS = 5; final int NUM_VALS = 5;
for (int val = NUM_VALS; val > 0; val--) { for (int val = NUM_VALS; val > 0; val--) {
field.setStringValue(Integer.toString(val)); field.setIntValue(val);
writer.addDocument(doc); writer.addDocument(doc);
} }

View File

@ -22,13 +22,16 @@ import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.queries.function.valuesource.ByteFieldSource;
import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource; import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource;
import org.apache.lucene.queries.function.valuesource.ConstValueSource; import org.apache.lucene.queries.function.valuesource.ConstValueSource;
import org.apache.lucene.queries.function.valuesource.DivFloatFunction; import org.apache.lucene.queries.function.valuesource.DivFloatFunction;
@ -54,7 +57,6 @@ import org.apache.lucene.queries.function.valuesource.QueryValueSource;
import org.apache.lucene.queries.function.valuesource.RangeMapFloatFunction; import org.apache.lucene.queries.function.valuesource.RangeMapFloatFunction;
import org.apache.lucene.queries.function.valuesource.ReciprocalFloatFunction; import org.apache.lucene.queries.function.valuesource.ReciprocalFloatFunction;
import org.apache.lucene.queries.function.valuesource.ScaleFloatFunction; import org.apache.lucene.queries.function.valuesource.ScaleFloatFunction;
import org.apache.lucene.queries.function.valuesource.ShortFieldSource;
import org.apache.lucene.queries.function.valuesource.SumFloatFunction; import org.apache.lucene.queries.function.valuesource.SumFloatFunction;
import org.apache.lucene.queries.function.valuesource.SumTotalTermFreqValueSource; import org.apache.lucene.queries.function.valuesource.SumTotalTermFreqValueSource;
import org.apache.lucene.queries.function.valuesource.TFValueSource; import org.apache.lucene.queries.function.valuesource.TFValueSource;
@ -85,9 +87,9 @@ public class TestValueSources extends LuceneTestCase {
static IndexSearcher searcher; static IndexSearcher searcher;
static final List<String[]> documents = Arrays.asList(new String[][] { static final List<String[]> documents = Arrays.asList(new String[][] {
/* id, byte, double, float, int, long, short, string, text */ /* id, double, float, int, long, string, text */
new String[] { "0", "5", "3.63", "5.2", "35", "4343", "945", "test", "this is a test test test" }, new String[] { "0", "3.63", "5.2", "35", "4343", "test", "this is a test test test" },
new String[] { "1", "12", "5.65", "9.3", "54", "1954", "123", "bar", "second test" }, new String[] { "1", "5.65", "9.3", "54", "1954", "bar", "second test" },
}); });
@BeforeClass @BeforeClass
@ -99,18 +101,14 @@ public class TestValueSources extends LuceneTestCase {
Document document = new Document(); Document document = new Document();
Field idField = new StringField("id", "", Field.Store.NO); Field idField = new StringField("id", "", Field.Store.NO);
document.add(idField); document.add(idField);
Field byteField = new StringField("byte", "", Field.Store.NO); Field doubleField = new DoubleField("double", 0d, Field.Store.NO);
document.add(byteField);
Field doubleField = new StringField("double", "", Field.Store.NO);
document.add(doubleField); document.add(doubleField);
Field floatField = new StringField("float", "", Field.Store.NO); Field floatField = new FloatField("float", 0f, Field.Store.NO);
document.add(floatField); document.add(floatField);
Field intField = new StringField("int", "", Field.Store.NO); Field intField = new IntField("int", 0, Field.Store.NO);
document.add(intField); document.add(intField);
Field longField = new StringField("long", "", Field.Store.NO); Field longField = new LongField("long", 0L, Field.Store.NO);
document.add(longField); document.add(longField);
Field shortField = new StringField("short", "", Field.Store.NO);
document.add(shortField);
Field stringField = new StringField("string", "", Field.Store.NO); Field stringField = new StringField("string", "", Field.Store.NO);
document.add(stringField); document.add(stringField);
Field textField = new TextField("text", "", Field.Store.NO); Field textField = new TextField("text", "", Field.Store.NO);
@ -118,14 +116,12 @@ public class TestValueSources extends LuceneTestCase {
for (String [] doc : documents) { for (String [] doc : documents) {
idField.setStringValue(doc[0]); idField.setStringValue(doc[0]);
byteField.setStringValue(doc[1]); doubleField.setDoubleValue(Double.valueOf(doc[1]));
doubleField.setStringValue(doc[2]); floatField.setFloatValue(Float.valueOf(doc[2]));
floatField.setStringValue(doc[3]); intField.setIntValue(Integer.valueOf(doc[3]));
intField.setStringValue(doc[4]); longField.setLongValue(Long.valueOf(doc[4]));
longField.setStringValue(doc[5]); stringField.setStringValue(doc[5]);
shortField.setStringValue(doc[6]); textField.setStringValue(doc[6]);
stringField.setStringValue(doc[7]);
textField.setStringValue(doc[8]);
iw.addDocument(document); iw.addDocument(document);
} }
@ -143,11 +139,6 @@ public class TestValueSources extends LuceneTestCase {
dir = null; dir = null;
} }
public void testByte() throws Exception {
assertHits(new FunctionQuery(new ByteFieldSource("byte")),
new float[] { 5f, 12f });
}
public void testConst() throws Exception { public void testConst() throws Exception {
assertHits(new FunctionQuery(new ConstValueSource(0.3f)), assertHits(new FunctionQuery(new ConstValueSource(0.3f)),
new float[] { 0.3f, 0.3f }); new float[] { 0.3f, 0.3f });
@ -298,11 +289,6 @@ public class TestValueSources extends LuceneTestCase {
new float[] { 0.0f, 1.0f }); new float[] { 0.0f, 1.0f });
} }
public void testShort() throws Exception {
assertHits(new FunctionQuery(new ShortFieldSource("short")),
new float[] { 945f, 123f });
}
public void testSumFloat() throws Exception { public void testSumFloat() throws Exception {
assertHits(new FunctionQuery(new SumFloatFunction(new ValueSource[] { assertHits(new FunctionQuery(new SumFloatFunction(new ValueSource[] {
new ConstValueSource(1f), new ConstValueSource(2f)})), new ConstValueSource(1f), new ConstValueSource(2f)})),

View File

@ -36,6 +36,7 @@ import java.util.zip.GZIPInputStream;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
@ -158,6 +159,7 @@ public class LineFileDocs implements Closeable {
final Field titleDV; final Field titleDV;
final Field body; final Field body;
final Field id; final Field id;
final Field idNum;
final Field date; final Field date;
public DocState(boolean useDocValues) { public DocState(boolean useDocValues) {
@ -180,6 +182,9 @@ public class LineFileDocs implements Closeable {
id = new StringField("docid", "", Field.Store.YES); id = new StringField("docid", "", Field.Store.YES);
doc.add(id); doc.add(id);
idNum = new IntField("docid_int", 0, Field.Store.NO);
doc.add(idNum);
date = new StringField("date", "", Field.Store.YES); date = new StringField("date", "", Field.Store.YES);
doc.add(date); doc.add(date);
@ -233,7 +238,9 @@ public class LineFileDocs implements Closeable {
} }
docState.titleTokenized.setStringValue(title); docState.titleTokenized.setStringValue(title);
docState.date.setStringValue(line.substring(1+spot, spot2)); docState.date.setStringValue(line.substring(1+spot, spot2));
docState.id.setStringValue(Integer.toString(id.getAndIncrement())); final int i = id.getAndIncrement();
docState.id.setStringValue(Integer.toString(i));
docState.idNum.setIntValue(i);
return docState.doc; return docState.doc;
} }
} }

View File

@ -49,7 +49,12 @@ import org.apache.lucene.codecs.lucene42.Lucene42Codec;
import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReader;
@ -78,14 +83,15 @@ import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.index.TieredMergePolicy;
import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.FilteredQuery.FilterStrategy;
import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.FilteredQuery.FilterStrategy;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.CompoundFileDirectory; import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.junit.Assert; import org.junit.Assert;
import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.RandomizedContext;
import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomPicks;
@ -872,6 +878,7 @@ public class _TestUtil {
final Field field1 = (Field) f; final Field field1 = (Field) f;
final Field field2; final Field field2;
final DocValuesType dvType = field1.fieldType().docValueType(); final DocValuesType dvType = field1.fieldType().docValueType();
final NumericType numType = field1.fieldType().numericType();
if (dvType != null) { if (dvType != null) {
switch(dvType) { switch(dvType) {
case NUMERIC: case NUMERIC:
@ -886,6 +893,23 @@ public class _TestUtil {
default: default:
throw new IllegalStateException("unknown Type: " + dvType); throw new IllegalStateException("unknown Type: " + dvType);
} }
} else if (numType != null) {
switch (numType) {
case INT:
field2 = new IntField(field1.name(), field1.numericValue().intValue(), field1.fieldType());
break;
case FLOAT:
field2 = new FloatField(field1.name(), field1.numericValue().intValue(), field1.fieldType());
break;
case LONG:
field2 = new LongField(field1.name(), field1.numericValue().intValue(), field1.fieldType());
break;
case DOUBLE:
field2 = new DoubleField(field1.name(), field1.numericValue().intValue(), field1.fieldType());
break;
default:
throw new IllegalStateException("unknown Type: " + numType);
}
} else { } else {
field2 = new Field(field1.name(), field1.stringValue(), field1.fieldType()); field2 = new Field(field1.name(), field1.stringValue(), field1.fieldType());
} }

View File

@ -79,6 +79,10 @@ Upgrading from Solr 4.3.0
* SOLR-4778: The signature of LogWatcher.registerListener has changed, from * SOLR-4778: The signature of LogWatcher.registerListener has changed, from
(ListenerConfig, CoreContainer) to (ListenerConfig). Users implementing their (ListenerConfig, CoreContainer) to (ListenerConfig). Users implementing their
own LogWatcher classes will need to change their code accordingly. own LogWatcher classes will need to change their code accordingly.
* LUCENE-5063: ByteField and ShortField have been deprecated and will be removed
in 5.0. If you are still using these field types, you should migrate your
fields to TrieIntField.
Detailed Change List Detailed Change List
---------------------- ----------------------

View File

@ -49,9 +49,7 @@ public class StatsValuesFactory {
if (DoubleField.class.isInstance(fieldType) || if (DoubleField.class.isInstance(fieldType) ||
IntField.class.isInstance(fieldType) || IntField.class.isInstance(fieldType) ||
LongField.class.isInstance(fieldType) || LongField.class.isInstance(fieldType) ||
ShortField.class.isInstance(fieldType) ||
FloatField.class.isInstance(fieldType) || FloatField.class.isInstance(fieldType) ||
ByteField.class.isInstance(fieldType) ||
TrieField.class.isInstance(fieldType) || TrieField.class.isInstance(fieldType) ||
SortableDoubleField.class.isInstance(fieldType) || SortableDoubleField.class.isInstance(fieldType) ||
SortableIntField.class.isInstance(fieldType) || SortableIntField.class.isInstance(fieldType) ||

View File

@ -260,11 +260,9 @@ public class BinaryResponseWriter implements BinaryQueryResponseWriter {
KNOWN_TYPES.add(BCDIntField.class); KNOWN_TYPES.add(BCDIntField.class);
KNOWN_TYPES.add(BCDLongField.class); KNOWN_TYPES.add(BCDLongField.class);
KNOWN_TYPES.add(BCDStrField.class); KNOWN_TYPES.add(BCDStrField.class);
KNOWN_TYPES.add(ByteField.class);
KNOWN_TYPES.add(DateField.class); KNOWN_TYPES.add(DateField.class);
KNOWN_TYPES.add(DoubleField.class); KNOWN_TYPES.add(DoubleField.class);
KNOWN_TYPES.add(FloatField.class); KNOWN_TYPES.add(FloatField.class);
KNOWN_TYPES.add(ShortField.class);
KNOWN_TYPES.add(IntField.class); KNOWN_TYPES.add(IntField.class);
KNOWN_TYPES.add(LongField.class); KNOWN_TYPES.add(LongField.class);
KNOWN_TYPES.add(SortableLongField.class); KNOWN_TYPES.add(SortableLongField.class);

View File

@ -1,98 +0,0 @@
package org.apache.solr.schema;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.ByteFieldSource;
import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.search.SortField;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
import java.io.IOException;
import java.util.Map;
/**
* A numeric field that can contain 8-bit signed two's complement integer
* values, encoded as simple Strings.
*
* <p>
* Field values will sort numerically, but Range Queries (and other features
* that rely on numeric ranges) will not work as expected: values will be
* evaluated in unicode String order, not numeric order.
* </p>
*
* <ul>
* <li>Min Value Allowed: -128</li>
* <li>Max Value Allowed: 127</li>
* </ul>
*
* @see Byte
*/
public class ByteField extends PrimitiveFieldType {
@Override
protected void init(IndexSchema schema, Map<String, String> args) {
super.init(schema, args);
restrictProps(SORT_MISSING_FIRST | SORT_MISSING_LAST);
}
/////////////////////////////////////////////////////////////
@Override
public SortField getSortField(SchemaField field, boolean reverse) {
field.checkSortability();
return new SortField(field.name, SortField.Type.BYTE, reverse);
}
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
return new ByteFieldSource(field.name);
}
@Override
public void write(TextResponseWriter writer, String name, StorableField f) throws IOException {
String s = f.stringValue();
// these values may be from a legacy lucene index, which may
// not be properly formatted in some output formats, or may
// incorrectly have a zero length.
if (s.length()==0) {
// zero length value means someone mistakenly indexed the value
// instead of simply leaving it out. Write a null value instead of a numeric.
writer.writeNull(name);
return;
}
try {
byte val = Byte.parseByte(s);
writer.writeInt(name, val);
} catch (NumberFormatException e){
// can't parse - write out the contents as a string so nothing is lost and
// clients don't get a parse error.
writer.writeStr(name, s, true);
}
}
@Override
public Byte toObject(StorableField f) {
return Byte.valueOf(toExternal(f));
}
}

View File

@ -17,18 +17,20 @@
package org.apache.solr.schema; package org.apache.solr.schema;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.search.SortField;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
/** /**
* A legacy numeric field type that encodes "Double" values as simple Strings. * A legacy numeric field type that encodes "Double" values as simple Strings.
* This class should not be used except by people with existing indexes that * This class should not be used except by people with existing indexes that
@ -44,6 +46,20 @@ import java.util.Map;
* @see TrieDoubleField * @see TrieDoubleField
*/ */
public class DoubleField extends PrimitiveFieldType { public class DoubleField extends PrimitiveFieldType {
private static final FieldCache.DoubleParser PARSER = new FieldCache.DoubleParser() {
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public double parseDouble(BytesRef term) {
return Double.parseDouble(term.utf8ToString());
}
};
@Override @Override
protected void init(IndexSchema schema, Map<String, String> args) { protected void init(IndexSchema schema, Map<String, String> args) {
super.init(schema, args); super.init(schema, args);
@ -54,13 +70,13 @@ public class DoubleField extends PrimitiveFieldType {
@Override @Override
public SortField getSortField(SchemaField field, boolean reverse) { public SortField getSortField(SchemaField field, boolean reverse) {
field.checkSortability(); field.checkSortability();
return new SortField(field.name, SortField.Type.DOUBLE, reverse); return new SortField(field.name, PARSER, reverse);
} }
@Override @Override
public ValueSource getValueSource(SchemaField field, QParser qparser) { public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser); field.checkFieldCacheSource(qparser);
return new DoubleFieldSource(field.name); return new DoubleFieldSource(field.name, PARSER);
} }
@Override @Override

View File

@ -19,11 +19,15 @@ package org.apache.solr.schema;
import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource; import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.search.QParser; import org.apache.solr.search.QParser;
import org.apache.lucene.index.GeneralField; import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField; import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.solr.response.TextResponseWriter; import org.apache.solr.response.TextResponseWriter;
import java.util.Map; import java.util.Map;
@ -43,6 +47,20 @@ import java.io.IOException;
* @see TrieFloatField * @see TrieFloatField
*/ */
public class FloatField extends PrimitiveFieldType { public class FloatField extends PrimitiveFieldType {
private static final FieldCache.FloatParser PARSER = new FieldCache.FloatParser() {
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public float parseFloat(BytesRef term) {
return Float.parseFloat(term.utf8ToString());
}
};
@Override @Override
protected void init(IndexSchema schema, Map<String,String> args) { protected void init(IndexSchema schema, Map<String,String> args) {
super.init(schema, args); super.init(schema, args);
@ -58,7 +76,7 @@ public class FloatField extends PrimitiveFieldType {
@Override @Override
public ValueSource getValueSource(SchemaField field, QParser qparser) { public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser); field.checkFieldCacheSource(qparser);
return new FloatFieldSource(field.name); return new FloatFieldSource(field.name, PARSER);
} }
@Override @Override

View File

@ -19,11 +19,15 @@ package org.apache.solr.schema;
import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource; import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.search.QParser; import org.apache.solr.search.QParser;
import org.apache.lucene.index.GeneralField; import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField; import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.solr.response.TextResponseWriter; import org.apache.solr.response.TextResponseWriter;
import java.util.Map; import java.util.Map;
@ -43,6 +47,20 @@ import java.io.IOException;
* @see TrieIntField * @see TrieIntField
*/ */
public class IntField extends PrimitiveFieldType { public class IntField extends PrimitiveFieldType {
private static final FieldCache.IntParser PARSER = new FieldCache.IntParser() {
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public int parseInt(BytesRef term) {
return Integer.parseInt(term.utf8ToString());
}
};
@Override @Override
protected void init(IndexSchema schema, Map<String,String> args) { protected void init(IndexSchema schema, Map<String,String> args) {
super.init(schema, args); super.init(schema, args);
@ -52,13 +70,13 @@ public class IntField extends PrimitiveFieldType {
@Override @Override
public SortField getSortField(SchemaField field,boolean reverse) { public SortField getSortField(SchemaField field,boolean reverse) {
field.checkSortability(); field.checkSortability();
return new SortField(field.name,SortField.Type.INT, reverse); return new SortField(field.name, PARSER, reverse);
} }
@Override @Override
public ValueSource getValueSource(SchemaField field, QParser qparser) { public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser); field.checkFieldCacheSource(qparser);
return new IntFieldSource(field.name); return new IntFieldSource(field.name, PARSER);
} }
@Override @Override

View File

@ -22,7 +22,11 @@ import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.index.GeneralField; import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField; import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.response.TextResponseWriter; import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser; import org.apache.solr.search.QParser;
@ -43,6 +47,20 @@ import java.util.Map;
* @see TrieLongField * @see TrieLongField
*/ */
public class LongField extends PrimitiveFieldType { public class LongField extends PrimitiveFieldType {
private static final FieldCache.LongParser PARSER = new FieldCache.LongParser() {
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public long parseLong(BytesRef term) {
return Long.parseLong(term.utf8ToString());
}
};
@Override @Override
protected void init(IndexSchema schema, Map<String,String> args) { protected void init(IndexSchema schema, Map<String,String> args) {
super.init(schema, args); super.init(schema, args);
@ -54,13 +72,13 @@ public class LongField extends PrimitiveFieldType {
@Override @Override
public SortField getSortField(SchemaField field,boolean reverse) { public SortField getSortField(SchemaField field,boolean reverse) {
field.checkSortability(); field.checkSortability();
return new SortField(field.name,SortField.Type.LONG, reverse); return new SortField(field.name, PARSER, reverse);
} }
@Override @Override
public ValueSource getValueSource(SchemaField field, QParser qparser) { public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser); field.checkFieldCacheSource(qparser);
return new LongFieldSource(field.name); return new LongFieldSource(field.name, PARSER);
} }
@Override @Override

View File

@ -1,101 +0,0 @@
package org.apache.solr.schema;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.ShortFieldSource;
import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.search.SortField;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
import java.io.IOException;
import java.util.Map;
/**
* A numeric field that can contain 16-bit signed two's complement integer
* values, encoded as simple Strings.
*
* <p>
* Field values will sort numerically, but Range Queries (and other features
* that rely on numeric ranges) will not work as expected: values will be
* evaluated in unicode String order, not numeric order.
* </p>
*
* <ul>
* <li>Min Value Allowed: -32768</li>
* <li>Max Value Allowed: 32767</li>
* </ul>
*
* @see Short
**/
public class ShortField extends PrimitiveFieldType {
@Override
protected void init(IndexSchema schema, Map<String, String> args) {
super.init(schema, args);
restrictProps(SORT_MISSING_FIRST | SORT_MISSING_LAST);
}
/////////////////////////////////////////////////////////////
@Override
public SortField getSortField(SchemaField field, boolean reverse) {
field.checkSortability();
return new SortField(field.name, SortField.Type.SHORT, reverse);
}
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
return new ShortFieldSource(field.name);
}
@Override
public void write(TextResponseWriter writer, String name, StorableField f) throws IOException {
String s = f.stringValue();
// these values may be from a legacy lucene index, which may
// not be properly formatted in some output formats, or may
// incorrectly have a zero length.
if (s.length()==0) {
// zero length value means someone mistakenly indexed the value
// instead of simply leaving it out. Write a null value instead of a numeric.
writer.writeNull(name);
return;
}
try {
short val = Short.parseShort(s);
writer.writeInt(name, val);
} catch (NumberFormatException e){
// can't parse - write out the contents as a string so nothing is lost and
// clients don't get a parse error.
writer.writeStr(name, s, true);
}
}
@Override
public Short toObject(StorableField f) {
return Short.valueOf(toExternal(f));
}
}

View File

@ -221,8 +221,6 @@
<fieldType name="float" class="solr.TrieFloatField" precisionStep="4" omitNorms="true" positionIncrementGap="0"/> <fieldType name="float" class="solr.TrieFloatField" precisionStep="4" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="long" class="solr.TrieLongField" precisionStep="4" omitNorms="true" positionIncrementGap="0"/> <fieldType name="long" class="solr.TrieLongField" precisionStep="4" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="double" class="solr.TrieDoubleField" precisionStep="4" omitNorms="true" positionIncrementGap="0"/> <fieldType name="double" class="solr.TrieDoubleField" precisionStep="4" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="byte" class="solr.ByteField" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="short" class="solr.ShortField" omitNorms="true" positionIncrementGap="0"/>
<fieldtype name="boolean" class="solr.BoolField" sortMissingLast="true"/> <fieldtype name="boolean" class="solr.BoolField" sortMissingLast="true"/>
<fieldtype name="date" class="solr.TrieDateField" precisionStep="0"/> <fieldtype name="date" class="solr.TrieDateField" precisionStep="0"/>
</types> </types>
@ -233,8 +231,6 @@
<field name="float_f" type="float"/> <field name="float_f" type="float"/>
<field name="long_f" type="long"/> <field name="long_f" type="long"/>
<field name="double_f" type="double"/> <field name="double_f" type="double"/>
<field name="byte_f" type="byte"/>
<field name="short_f" type="short"/>
<field name="bool_f" type="boolean"/> <field name="bool_f" type="boolean"/>
<field name="date_f" type="date"/> <field name="date_f" type="date"/>

View File

@ -48,8 +48,6 @@
<fieldType name="float" class="solr.TrieFloatField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/> <fieldType name="float" class="solr.TrieFloatField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/> <fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/> <fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="byte" class="solr.ByteField" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="short" class="solr.ShortField" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="tint" class="solr.TrieIntField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/> <fieldType name="tint" class="solr.TrieIntField" precisionStep="8" omitNorms="true" positionIncrementGap="0"/>
@ -609,10 +607,6 @@
<dynamicField name="*_dt" type="date" indexed="true" stored="true"/> <dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
<dynamicField name="*_dt1" type="date" indexed="true" stored="true" multiValued="false"/> <dynamicField name="*_dt1" type="date" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_bcd" type="bcdstr" indexed="true" stored="true"/> <dynamicField name="*_bcd" type="bcdstr" indexed="true" stored="true"/>
<dynamicField name="*_by" type="byte" indexed="true" stored="true"/>
<dynamicField name="*_by1" type="byte" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_sh" type="short" indexed="true" stored="true"/>
<dynamicField name="*_sh1" type="short" indexed="true" stored="true" multiValued="false"/>
<!-- some trie-coded dynamic fields for faster range queries --> <!-- some trie-coded dynamic fields for faster range queries -->

View File

@ -38,7 +38,7 @@ public class QueryResultKeyTest extends SolrTestCaseJ4 {
// the hashcode should be the same even when the list // the hashcode should be the same even when the list
// of filters is in a different order // of filters is in a different order
Sort sort = new Sort(new SortField("test", SortField.Type.BYTE)); Sort sort = new Sort(new SortField("test", SortField.Type.INT));
List<Query> filters = new ArrayList<Query>(); List<Query> filters = new ArrayList<Query>();
filters.add(new TermQuery(new Term("test", "field"))); filters.add(new TermQuery(new Term("test", "field")));
filters.add(new TermQuery(new Term("test2", "field2"))); filters.add(new TermQuery(new Term("test2", "field2")));

View File

@ -59,11 +59,9 @@ public class PrimitiveFieldTypeTest extends SolrTestCaseJ4 {
TrieIntField ti; TrieIntField ti;
SortableIntField si; SortableIntField si;
LongField l; LongField l;
ShortField sf;
FloatField f; FloatField f;
DoubleField d; DoubleField d;
BoolField b; BoolField b;
ByteField bf;
// *********************** // ***********************
@ -95,10 +93,6 @@ public class PrimitiveFieldTypeTest extends SolrTestCaseJ4 {
l.init(schema, initMap); l.init(schema, initMap);
assertFalse(l.hasProperty(FieldType.OMIT_NORMS)); assertFalse(l.hasProperty(FieldType.OMIT_NORMS));
sf = new ShortField();
sf.init(schema, initMap);
assertFalse(sf.hasProperty(FieldType.OMIT_NORMS));
f = new FloatField(); f = new FloatField();
f.init(schema, initMap); f.init(schema, initMap);
assertFalse(f.hasProperty(FieldType.OMIT_NORMS)); assertFalse(f.hasProperty(FieldType.OMIT_NORMS));
@ -115,10 +109,6 @@ public class PrimitiveFieldTypeTest extends SolrTestCaseJ4 {
b.init(schema, initMap); b.init(schema, initMap);
assertFalse(b.hasProperty(FieldType.OMIT_NORMS)); assertFalse(b.hasProperty(FieldType.OMIT_NORMS));
bf = new ByteField();
bf.init(schema, initMap);
assertFalse(bf.hasProperty(FieldType.OMIT_NORMS));
// Non-primitive fields // Non-primitive fields
t = new TextField(); t = new TextField();
t.init(schema, initMap); t.init(schema, initMap);
@ -157,10 +147,6 @@ public class PrimitiveFieldTypeTest extends SolrTestCaseJ4 {
l.init(schema, initMap); l.init(schema, initMap);
assertTrue(l.hasProperty(FieldType.OMIT_NORMS)); assertTrue(l.hasProperty(FieldType.OMIT_NORMS));
sf = new ShortField();
sf.init(schema, initMap);
assertTrue(sf.hasProperty(FieldType.OMIT_NORMS));
f = new FloatField(); f = new FloatField();
f.init(schema, initMap); f.init(schema, initMap);
assertTrue(f.hasProperty(FieldType.OMIT_NORMS)); assertTrue(f.hasProperty(FieldType.OMIT_NORMS));
@ -177,10 +163,6 @@ public class PrimitiveFieldTypeTest extends SolrTestCaseJ4 {
b.init(schema, initMap); b.init(schema, initMap);
assertTrue(b.hasProperty(FieldType.OMIT_NORMS)); assertTrue(b.hasProperty(FieldType.OMIT_NORMS));
bf = new ByteField();
bf.init(schema, initMap);
assertTrue(bf.hasProperty(FieldType.OMIT_NORMS));
// Non-primitive fields // Non-primitive fields
t = new TextField(); t = new TextField();
t.init(schema, initMap); t.init(schema, initMap);

View File

@ -46,8 +46,6 @@ public class TestFoldingMultitermQuery extends SolrTestCaseJ4 {
"float_f", num, "float_f", num,
"long_f", num, "long_f", num,
"double_f", num, "double_f", num,
"byte_f", num,
"short_f", num,
"bool_f", boolVal, "bool_f", boolVal,
"date_f", "200" + Integer.toString(i % 10) + "-01-01T00:00:00Z", "date_f", "200" + Integer.toString(i % 10) + "-01-01T00:00:00Z",
"content", docs[i], "content", docs[i],
@ -269,7 +267,7 @@ public class TestFoldingMultitermQuery extends SolrTestCaseJ4 {
@Test @Test
public void testNonTextTypes() { public void testNonTextTypes() {
String[] intTypes = {"int_f", "float_f", "long_f", "double_f", "byte_f", "short_f"}; String[] intTypes = {"int_f", "float_f", "long_f", "double_f"};
for (String str : intTypes) { for (String str : intTypes) {
assertQ(req("q", str + ":" + "0"), assertQ(req("q", str + ":" + "0"),

View File

@ -78,8 +78,6 @@ public class TestValueSourceCache extends SolrTestCaseJ4 {
"val1_i1", "val1_i1",
"val1_l1", "val1_l1",
"val1_b1", "val1_b1",
"val1_by1",
"val1_sh1"
}; };
for (String template : templates) { for (String template : templates) {
for (String nums : numbers) { for (String nums : numbers) {