LUCENE-3443: port 3.x's FieldCache.getDocsWithField to trunk

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1200480 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2011-11-10 19:13:50 +00:00
parent 8a1245cf41
commit 5200e8e38f
52 changed files with 1735 additions and 2960 deletions

View File

@ -63,8 +63,8 @@ public class LatLongDistanceFilter extends DistanceFilter {
@Override
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
final double[] latIndex = FieldCache.DEFAULT.getDoubles(context.reader, latField);
final double[] lngIndex = FieldCache.DEFAULT.getDoubles(context.reader, lngField);
final double[] latIndex = FieldCache.DEFAULT.getDoubles(context.reader, latField, false);
final double[] lngIndex = FieldCache.DEFAULT.getDoubles(context.reader, lngField, false);
final int docBase = nextDocBase;
nextDocBase += context.reader.maxDoc();

View File

@ -17,23 +17,21 @@ package org.apache.lucene.search;
* limitations under the License.
*/
import java.io.IOException;
import java.io.PrintStream;
import java.text.DecimalFormat;
import org.apache.lucene.analysis.NumericTokenStream; // for javadocs
import org.apache.lucene.document.NumericField; // for javadocs
import org.apache.lucene.index.DocTermOrds;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.cache.EntryCreator;
import org.apache.lucene.search.cache.CachedArray.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.document.NumericField; // for javadocs
import org.apache.lucene.analysis.NumericTokenStream; // for javadocs
import org.apache.lucene.util.packed.PackedInts;
import java.io.IOException;
import java.io.PrintStream;
import java.text.DecimalFormat;
/**
* Expert: Maintains caches of term values.
*
@ -65,7 +63,7 @@ public interface FieldCache {
}
/** Interface to parse bytes from document fields.
* @see FieldCache#getBytes(IndexReader, String, FieldCache.ByteParser)
* @see FieldCache#getBytes(IndexReader, String, FieldCache.ByteParser, boolean)
*/
public interface ByteParser extends Parser {
/** Return a single Byte representation of this field's value. */
@ -73,7 +71,7 @@ public interface FieldCache {
}
/** Interface to parse shorts from document fields.
* @see FieldCache#getShorts(IndexReader, String, FieldCache.ShortParser)
* @see FieldCache#getShorts(IndexReader, String, FieldCache.ShortParser, boolean)
*/
public interface ShortParser extends Parser {
/** Return a short representation of this field's value. */
@ -81,7 +79,7 @@ public interface FieldCache {
}
/** Interface to parse ints from document fields.
* @see FieldCache#getInts(IndexReader, String, FieldCache.IntParser)
* @see FieldCache#getInts(IndexReader, String, FieldCache.IntParser, boolean)
*/
public interface IntParser extends Parser {
/** Return an integer representation of this field's value. */
@ -89,7 +87,7 @@ public interface FieldCache {
}
/** Interface to parse floats from document fields.
* @see FieldCache#getFloats(IndexReader, String, FieldCache.FloatParser)
* @see FieldCache#getFloats(IndexReader, String, FieldCache.FloatParser, boolean)
*/
public interface FloatParser extends Parser {
/** Return an float representation of this field's value. */
@ -97,7 +95,7 @@ public interface FieldCache {
}
/** Interface to parse long from document fields.
* @see FieldCache#getLongs(IndexReader, String, FieldCache.LongParser)
* @see FieldCache#getLongs(IndexReader, String, FieldCache.LongParser, boolean)
*/
public interface LongParser extends Parser {
/** Return an long representation of this field's value. */
@ -105,7 +103,7 @@ public interface FieldCache {
}
/** Interface to parse doubles from document fields.
* @see FieldCache#getDoubles(IndexReader, String, FieldCache.DoubleParser)
* @see FieldCache#getDoubles(IndexReader, String, FieldCache.DoubleParser, boolean)
*/
public interface DoubleParser extends Parser {
/** Return an long representation of this field's value. */
@ -299,16 +297,27 @@ public interface FieldCache {
}
};
/** Checks the internal cache for an appropriate entry, and if none is found,
* reads the terms in <code>field</code> and returns a bit set at the size of
* <code>reader.maxDoc()</code>, with turned on bits for each docid that
* does have a value for this field.
*/
public Bits getDocsWithField(IndexReader reader, String field)
throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is
* found, reads the terms in <code>field</code> as a single byte and returns an array
* of size <code>reader.maxDoc()</code> of the value each document
* has in the given field.
* @param reader Used to get field values.
* @param field Which field contains the single byte values.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public byte[] getBytes (IndexReader reader, String field)
public byte[] getBytes (IndexReader reader, String field, boolean setDocsWithField)
throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is found,
@ -318,35 +327,26 @@ public interface FieldCache {
* @param reader Used to get field values.
* @param field Which field contains the bytes.
* @param parser Computes byte for string values.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public byte[] getBytes (IndexReader reader, String field, ByteParser parser)
public byte[] getBytes (IndexReader reader, String field, ByteParser parser, boolean setDocsWithField)
throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is found,
* reads the terms in <code>field</code> as bytes and returns an array of
* size <code>reader.maxDoc()</code> of the value each document has in the
* given field.
* @param reader Used to get field values.
* @param field Which field contains the bytes.
* @param creator Used to make the ByteValues
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public ByteValues getBytes(IndexReader reader, String field, EntryCreator<ByteValues> creator ) throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is
* found, reads the terms in <code>field</code> as shorts and returns an array
* of size <code>reader.maxDoc()</code> of the value each document
* has in the given field.
* @param reader Used to get field values.
* @param field Which field contains the shorts.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public short[] getShorts (IndexReader reader, String field)
public short[] getShorts (IndexReader reader, String field, boolean setDocsWithField)
throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is found,
@ -356,36 +356,26 @@ public interface FieldCache {
* @param reader Used to get field values.
* @param field Which field contains the shorts.
* @param parser Computes short for string values.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public short[] getShorts (IndexReader reader, String field, ShortParser parser)
public short[] getShorts (IndexReader reader, String field, ShortParser parser, boolean setDocsWithField)
throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is found,
* reads the terms in <code>field</code> as shorts and returns an array of
* size <code>reader.maxDoc()</code> of the value each document has in the
* given field.
* @param reader Used to get field values.
* @param field Which field contains the shorts.
* @param creator Computes short for string values.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public ShortValues getShorts(IndexReader reader, String field, EntryCreator<ShortValues> creator ) throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is
* found, reads the terms in <code>field</code> as integers and returns an array
* of size <code>reader.maxDoc()</code> of the value each document
* has in the given field.
* @param reader Used to get field values.
* @param field Which field contains the integers.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public int[] getInts (IndexReader reader, String field)
public int[] getInts (IndexReader reader, String field, boolean setDocsWithField)
throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is found,
@ -395,35 +385,26 @@ public interface FieldCache {
* @param reader Used to get field values.
* @param field Which field contains the integers.
* @param parser Computes integer for string values.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public int[] getInts (IndexReader reader, String field, IntParser parser)
public int[] getInts (IndexReader reader, String field, IntParser parser, boolean setDocsWithField)
throws IOException;
/** Checks the internal cache for an appropriate entry, and if none is found,
* reads the terms in <code>field</code> as integers and returns an array of
* size <code>reader.maxDoc()</code> of the value each document has in the
* given field.
* @param reader Used to get field values.
* @param field Which field contains the integers.
* @param creator Computes integer for string values.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public IntValues getInts(IndexReader reader, String field, EntryCreator<IntValues> creator ) throws IOException;
/** Checks the internal cache for an appropriate entry, and if
* none is found, reads the terms in <code>field</code> as floats and returns an array
* of size <code>reader.maxDoc()</code> of the value each document
* has in the given field.
* @param reader Used to get field values.
* @param field Which field contains the floats.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public float[] getFloats (IndexReader reader, String field)
public float[] getFloats (IndexReader reader, String field, boolean setDocsWithField)
throws IOException;
/** Checks the internal cache for an appropriate entry, and if
@ -433,25 +414,14 @@ public interface FieldCache {
* @param reader Used to get field values.
* @param field Which field contains the floats.
* @param parser Computes float for string values.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public float[] getFloats (IndexReader reader, String field,
FloatParser parser) throws IOException;
FloatParser parser, boolean setDocsWithField) throws IOException;
/** Checks the internal cache for an appropriate entry, and if
* none is found, reads the terms in <code>field</code> as floats and returns an array
* of size <code>reader.maxDoc()</code> of the value each document
* has in the given field.
* @param reader Used to get field values.
* @param field Which field contains the floats.
* @param creator Computes float for string values.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public FloatValues getFloats(IndexReader reader, String field, EntryCreator<FloatValues> creator ) throws IOException;
/**
* Checks the internal cache for an appropriate entry, and if none is
* found, reads the terms in <code>field</code> as longs and returns an array
@ -460,10 +430,12 @@ public interface FieldCache {
*
* @param reader Used to get field values.
* @param field Which field contains the longs.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws java.io.IOException If any error occurs.
*/
public long[] getLongs(IndexReader reader, String field)
public long[] getLongs(IndexReader reader, String field, boolean setDocsWithField)
throws IOException;
/**
@ -475,27 +447,14 @@ public interface FieldCache {
* @param reader Used to get field values.
* @param field Which field contains the longs.
* @param parser Computes integer for string values.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public long[] getLongs(IndexReader reader, String field, LongParser parser)
public long[] getLongs(IndexReader reader, String field, LongParser parser, boolean setDocsWithField)
throws IOException;
/**
* Checks the internal cache for an appropriate entry, and if none is found,
* reads the terms in <code>field</code> as longs and returns an array of
* size <code>reader.maxDoc()</code> of the value each document has in the
* given field.
*
* @param reader Used to get field values.
* @param field Which field contains the longs.
* @param creator Computes integer for string values.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public LongValues getLongs(IndexReader reader, String field, EntryCreator<LongValues> creator ) throws IOException;
/**
* Checks the internal cache for an appropriate entry, and if none is
* found, reads the terms in <code>field</code> as integers and returns an array
@ -504,10 +463,12 @@ public interface FieldCache {
*
* @param reader Used to get field values.
* @param field Which field contains the doubles.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public double[] getDoubles(IndexReader reader, String field)
public double[] getDoubles(IndexReader reader, String field, boolean setDocsWithField)
throws IOException;
/**
@ -519,27 +480,14 @@ public interface FieldCache {
* @param reader Used to get field values.
* @param field Which field contains the doubles.
* @param parser Computes integer for string values.
* @param setDocsWithField If true then {@link #getDocsWithField} will
* also be computed and stored in the FieldCache.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public double[] getDoubles(IndexReader reader, String field, DoubleParser parser)
public double[] getDoubles(IndexReader reader, String field, DoubleParser parser, boolean setDocsWithField)
throws IOException;
/**
* Checks the internal cache for an appropriate entry, and if none is found,
* reads the terms in <code>field</code> as doubles and returns an array of
* size <code>reader.maxDoc()</code> of the value each document has in the
* given field.
*
* @param reader Used to get field values.
* @param field Which field contains the doubles.
* @param creator Computes integer for string values.
* @return The values in the given field for each document.
* @throws IOException If any error occurs.
*/
public DoubleValues getDoubles(IndexReader reader, String field, EntryCreator<DoubleValues> creator ) throws IOException;
/** Returned by {@link #getTerms} */
public abstract static class DocTerms {
/** The BytesRef argument must not be null; the method
@ -644,7 +592,6 @@ public interface FieldCache {
public DocTermsIndex getTermsIndex (IndexReader reader, String field)
throws IOException;
/** Expert: just like {@link
* #getTermsIndex(IndexReader,String)}, but you can specify
* whether more RAM should be consumed in exchange for

File diff suppressed because it is too large Load Diff

View File

@ -134,7 +134,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getBytes(IndexReader,String)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getBytes(IndexReader,String,boolean)}. This works with all
* byte fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -143,7 +143,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getBytes(IndexReader,String,FieldCache.ByteParser)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getBytes(IndexReader,String,FieldCache.ByteParser,boolean)}. This works with all
* byte fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -172,7 +172,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
final byte[] values = FieldCache.DEFAULT.getBytes(context.reader, field, (FieldCache.ByteParser) parser);
final byte[] values = FieldCache.DEFAULT.getBytes(context.reader, field, (FieldCache.ByteParser) parser, false);
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
boolean matchDoc(int doc) {
@ -184,7 +184,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getShorts(IndexReader,String)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getShorts(IndexReader,String,boolean)}. This works with all
* short fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -193,7 +193,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getShorts(IndexReader,String,FieldCache.ShortParser)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getShorts(IndexReader,String,FieldCache.ShortParser,boolean)}. This works with all
* short fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -222,7 +222,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
final short[] values = FieldCache.DEFAULT.getShorts(context.reader, field, (FieldCache.ShortParser) parser);
final short[] values = FieldCache.DEFAULT.getShorts(context.reader, field, (FieldCache.ShortParser) parser, false);
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
boolean matchDoc(int doc) {
@ -234,7 +234,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getInts(IndexReader,String)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getInts(IndexReader,String,boolean)}. This works with all
* int fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -243,7 +243,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getInts(IndexReader,String,FieldCache.IntParser)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getInts(IndexReader,String,FieldCache.IntParser,boolean)}. This works with all
* int fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -272,7 +272,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
final int[] values = FieldCache.DEFAULT.getInts(context.reader, field, (FieldCache.IntParser) parser);
final int[] values = FieldCache.DEFAULT.getInts(context.reader, field, (FieldCache.IntParser) parser, false);
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
boolean matchDoc(int doc) {
@ -284,7 +284,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getLongs(IndexReader,String)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getLongs(IndexReader,String,boolean)}. This works with all
* long fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -293,7 +293,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getLongs(IndexReader,String,FieldCache.LongParser)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getLongs(IndexReader,String,FieldCache.LongParser,boolean)}. This works with all
* long fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -322,7 +322,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
final long[] values = FieldCache.DEFAULT.getLongs(context.reader, field, (FieldCache.LongParser) parser);
final long[] values = FieldCache.DEFAULT.getLongs(context.reader, field, (FieldCache.LongParser) parser, false);
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
boolean matchDoc(int doc) {
@ -334,7 +334,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getFloats(IndexReader,String)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getFloats(IndexReader,String,boolean)}. This works with all
* float fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -343,7 +343,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getFloats(IndexReader,String,FieldCache.FloatParser)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getFloats(IndexReader,String,FieldCache.FloatParser,boolean)}. This works with all
* float fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -376,7 +376,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
final float[] values = FieldCache.DEFAULT.getFloats(context.reader, field, (FieldCache.FloatParser) parser);
final float[] values = FieldCache.DEFAULT.getFloats(context.reader, field, (FieldCache.FloatParser) parser, false);
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
boolean matchDoc(int doc) {
@ -388,7 +388,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getDoubles(IndexReader,String)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getDoubles(IndexReader,String,boolean)}. This works with all
* double fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -397,7 +397,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
}
/**
* Creates a numeric range filter using {@link FieldCache#getDoubles(IndexReader,String,FieldCache.DoubleParser)}. This works with all
* Creates a numeric range filter using {@link FieldCache#getDoubles(IndexReader,String,FieldCache.DoubleParser,boolean)}. This works with all
* double fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to <code>null</code>.
*/
@ -430,7 +430,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
final double[] values = FieldCache.DEFAULT.getDoubles(context.reader, field, (FieldCache.DoubleParser) parser);
final double[] values = FieldCache.DEFAULT.getDoubles(context.reader, field, (FieldCache.DoubleParser) parser, false);
// ignore deleted docs if range doesn't contain 0
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override

View File

@ -17,19 +17,23 @@ package org.apache.lucene.search;
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.index.values.IndexDocValues;
import org.apache.lucene.index.values.IndexDocValues.Source;
import org.apache.lucene.index.values.IndexDocValues;
import org.apache.lucene.search.FieldCache.ByteParser;
import org.apache.lucene.search.FieldCache.DocTerms;
import org.apache.lucene.search.FieldCache.DocTermsIndex;
import org.apache.lucene.search.cache.*;
import org.apache.lucene.search.cache.CachedArray.*;
import org.apache.lucene.search.FieldCache.DoubleParser;
import org.apache.lucene.search.FieldCache.FloatParser;
import org.apache.lucene.search.FieldCache.IntParser;
import org.apache.lucene.search.FieldCache.LongParser;
import org.apache.lucene.search.FieldCache.ShortParser;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.packed.PackedInts;
import java.io.IOException;
/**
* Expert: a FieldComparator compares hits so as to determine their
* sort order when collecting the top results with {@link
@ -185,38 +189,43 @@ public abstract class FieldComparator<T> {
}
}
public static abstract class NumericComparator<T extends CachedArray, U extends Number> extends FieldComparator<U> {
protected final CachedArrayCreator<T> creator;
protected T cached;
protected final boolean checkMissing;
protected Bits valid;
public static abstract class NumericComparator<T extends Number> extends FieldComparator<T> {
protected final T missingValue;
protected final String field;
protected Bits docsWithField;
public NumericComparator( CachedArrayCreator<T> c, boolean checkMissing ) {
this.creator = c;
this.checkMissing = checkMissing;
public NumericComparator(String field, T missingValue) {
this.field = field;
this.missingValue = missingValue;
}
protected FieldComparator setup(T cached) {
this.cached = cached;
if (checkMissing)
valid = cached.valid;
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
if (missingValue != null) {
docsWithField = FieldCache.DEFAULT.getDocsWithField(context.reader, field);
// optimization to remove unneeded checks on the bit interface:
if (docsWithField instanceof Bits.MatchAllBits) {
docsWithField = null;
}
} else {
docsWithField = null;
}
return this;
}
}
/** Parses field's values as byte (using {@link
* FieldCache#getBytes} and sorts by ascending value */
public static final class ByteComparator extends NumericComparator<ByteValues,Byte> {
private byte[] docValues;
public static final class ByteComparator extends NumericComparator<Byte> {
private final byte[] values;
private final byte missingValue;
private final ByteParser parser;
private byte[] currentReaderValues;
private byte bottom;
ByteComparator(int numHits, ByteValuesCreator creator, Byte missingValue ) {
super( creator, missingValue!=null );
ByteComparator(int numHits, String field, FieldCache.Parser parser, Byte missingValue) {
super(field, missingValue);
values = new byte[numHits];
this.missingValue = checkMissing
? missingValue.byteValue() : 0;
this.parser = (ByteParser) parser;
}
@Override
@ -226,27 +235,31 @@ public abstract class FieldComparator<T> {
@Override
public int compareBottom(int doc) {
byte v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
byte v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
return bottom - v2;
}
@Override
public void copy(int slot, int doc) {
byte v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
byte v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
setup(FieldCache.DEFAULT.getBytes(context.reader, creator.field, creator));
docValues = cached.values;
return this;
currentReaderValues = FieldCache.DEFAULT.getBytes(context.reader, field, parser, missingValue != null);
return super.setNextReader(context);
}
@Override
@ -263,17 +276,16 @@ public abstract class FieldComparator<T> {
/** Parses field's values as double (using {@link
* FieldCache#getDoubles} and sorts by ascending value */
public static final class DoubleComparator extends NumericComparator<DoubleValues,Double> {
private double[] docValues;
public static final class DoubleComparator extends NumericComparator<Double> {
private final double[] values;
private final double missingValue;
private final DoubleParser parser;
private double[] currentReaderValues;
private double bottom;
DoubleComparator(int numHits, DoubleValuesCreator creator, Double missingValue ) {
super( creator, missingValue != null );
DoubleComparator(int numHits, String field, FieldCache.Parser parser, Double missingValue) {
super(field, missingValue);
values = new double[numHits];
this.missingValue = checkMissing
? missingValue.doubleValue() : 0;
this.parser = (DoubleParser) parser;
}
@Override
@ -291,9 +303,12 @@ public abstract class FieldComparator<T> {
@Override
public int compareBottom(int doc) {
double v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
double v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
if (bottom > v2) {
return 1;
@ -306,18 +321,20 @@ public abstract class FieldComparator<T> {
@Override
public void copy(int slot, int doc) {
double v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
double v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
setup(FieldCache.DEFAULT.getDoubles(context.reader, creator.field, creator));
docValues = cached.values;
return this;
currentReaderValues = FieldCache.DEFAULT.getDoubles(context.reader, field, parser, missingValue != null);
return super.setNextReader(context);
}
@Override
@ -334,8 +351,8 @@ public abstract class FieldComparator<T> {
/** Uses float index values to sort by ascending value */
public static final class FloatDocValuesComparator extends FieldComparator<Double> {
private final double[] values;
private Source currentReaderValues;
private final String field;
private Source currentReaderValues;
private double bottom;
FloatDocValuesComparator(int numHits, String field) {
@ -395,17 +412,16 @@ public abstract class FieldComparator<T> {
/** Parses field's values as float (using {@link
* FieldCache#getFloats} and sorts by ascending value */
public static final class FloatComparator extends NumericComparator<FloatValues,Float> {
private float[] docValues;
public static final class FloatComparator extends NumericComparator<Float> {
private final float[] values;
private final float missingValue;
private final FloatParser parser;
private float[] currentReaderValues;
private float bottom;
FloatComparator(int numHits, FloatValuesCreator creator, Float missingValue ) {
super( creator, missingValue != null );
FloatComparator(int numHits, String field, FieldCache.Parser parser, Float missingValue) {
super(field, missingValue);
values = new float[numHits];
this.missingValue = checkMissing
? missingValue.floatValue() : 0;
this.parser = (FloatParser) parser;
}
@Override
@ -426,10 +442,12 @@ public abstract class FieldComparator<T> {
@Override
public int compareBottom(int doc) {
// TODO: are there sneaky non-branch ways to compute sign of float?
float v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
float v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
if (bottom > v2) {
return 1;
@ -442,18 +460,20 @@ public abstract class FieldComparator<T> {
@Override
public void copy(int slot, int doc) {
float v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
float v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
setup(FieldCache.DEFAULT.getFloats(context.reader, creator.field, creator));
docValues = cached.values;
return this;
currentReaderValues = FieldCache.DEFAULT.getFloats(context.reader, field, parser, missingValue != null);
return super.setNextReader(context);
}
@Override
@ -469,17 +489,16 @@ public abstract class FieldComparator<T> {
/** Parses field's values as short (using {@link
* FieldCache#getShorts} and sorts by ascending value */
public static final class ShortComparator extends NumericComparator<ShortValues,Short> {
private short[] docValues;
public static final class ShortComparator extends NumericComparator<Short> {
private final short[] values;
private final ShortParser parser;
private short[] currentReaderValues;
private short bottom;
private final short missingValue;
ShortComparator(int numHits, ShortValuesCreator creator, Short missingValue ) {
super( creator, missingValue != null );
ShortComparator(int numHits, String field, FieldCache.Parser parser, Short missingValue) {
super(field, missingValue);
values = new short[numHits];
this.missingValue = checkMissing
? missingValue.shortValue() : 0;
this.parser = (ShortParser) parser;
}
@Override
@ -489,27 +508,32 @@ public abstract class FieldComparator<T> {
@Override
public int compareBottom(int doc) {
short v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
short v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
return bottom - v2;
}
@Override
public void copy(int slot, int doc) {
short v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
short v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
setup( FieldCache.DEFAULT.getShorts(context.reader, creator.field, creator));
docValues = cached.values;
return this;
currentReaderValues = FieldCache.DEFAULT.getShorts(context.reader, field, parser, missingValue != null);
return super.setNextReader(context);
}
@Override
@ -525,17 +549,16 @@ public abstract class FieldComparator<T> {
/** Parses field's values as int (using {@link
* FieldCache#getInts} and sorts by ascending value */
public static final class IntComparator extends NumericComparator<IntValues,Integer> {
private int[] docValues;
public static final class IntComparator extends NumericComparator<Integer> {
private final int[] values;
private final IntParser parser;
private int[] currentReaderValues;
private int bottom; // Value of bottom of queue
final int missingValue;
IntComparator(int numHits, IntValuesCreator creator, Integer missingValue ) {
super( creator, missingValue != null );
IntComparator(int numHits, String field, FieldCache.Parser parser, Integer missingValue) {
super(field, missingValue);
values = new int[numHits];
this.missingValue = checkMissing
? missingValue.intValue() : 0;
this.parser = (IntParser) parser;
}
@Override
@ -561,9 +584,12 @@ public abstract class FieldComparator<T> {
// -1/+1/0 sign
// Cannot return bottom - values[slot2] because that
// may overflow
int v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
int v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
if (bottom > v2) {
return 1;
@ -576,18 +602,20 @@ public abstract class FieldComparator<T> {
@Override
public void copy(int slot, int doc) {
int v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
int v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
setup(FieldCache.DEFAULT.getInts(context.reader, creator.field, creator));
docValues = cached.values;
return this;
currentReaderValues = FieldCache.DEFAULT.getInts(context.reader, field, parser, missingValue != null);
return super.setNextReader(context);
}
@Override
@ -669,19 +697,18 @@ public abstract class FieldComparator<T> {
/** Parses field's values as long (using {@link
* FieldCache#getLongs} and sorts by ascending value */
public static final class LongComparator extends NumericComparator<LongValues,Long> {
private long[] docValues;
public static final class LongComparator extends NumericComparator<Long> {
private final long[] values;
private final LongParser parser;
private long[] currentReaderValues;
private long bottom;
private final long missingValue;
LongComparator(int numHits, LongValuesCreator creator, Long missingValue ) {
super( creator, missingValue != null );
LongComparator(int numHits, String field, FieldCache.Parser parser, Long missingValue) {
super(field, missingValue);
values = new long[numHits];
this.missingValue = checkMissing
? missingValue.longValue() : 0;
this.parser = (LongParser) parser;
}
@Override
public int compare(int slot1, int slot2) {
// TODO: there are sneaky non-branch ways to compute
@ -701,11 +728,13 @@ public abstract class FieldComparator<T> {
public int compareBottom(int doc) {
// TODO: there are sneaky non-branch ways to compute
// -1/+1/0 sign
long v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
long v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
if (bottom > v2) {
return 1;
} else if (bottom < v2) {
@ -717,18 +746,20 @@ public abstract class FieldComparator<T> {
@Override
public void copy(int slot, int doc) {
long v2 = docValues[doc];
if (valid != null && v2==0 && !valid.get(doc))
long v2 = currentReaderValues[doc];
// Test for v2 == 0 to save Bits.get method call for
// the common case (doc has value and value is non-zero):
if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
}
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
setup(FieldCache.DEFAULT.getLongs(context.reader, creator.field, creator));
docValues = cached.values;
return this;
currentReaderValues = FieldCache.DEFAULT.getLongs(context.reader, field, parser, missingValue != null);
return super.setNextReader(context);
}
@Override

View File

@ -20,7 +20,6 @@ package org.apache.lucene.search;
import java.io.IOException;
import java.util.Comparator;
import org.apache.lucene.search.cache.*;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.StringHelper;
@ -104,12 +103,14 @@ public class SortField {
private String field;
private Type type; // defaults to determining type dynamically
boolean reverse = false; // defaults to natural order
private CachedArrayCreator<?> creator;
public Object missingValue = null; // used for 'sortMissingFirst/Last'
private FieldCache.Parser parser;
// Used for CUSTOM sort
private FieldComparatorSource comparatorSource;
// Used for 'sortMissingFirst/Last'
public Object missingValue = null;
/** Creates a sort by terms in the given field with the type of term
* values explicitly given.
* @param field Name of field to sort by. Can be <code>null</code> if
@ -141,10 +142,7 @@ public class SortField {
* by testing which numeric parser the parser subclasses.
* @throws IllegalArgumentException if the parser fails to
* subclass an existing numeric parser, or field is null
*
* @deprecated (4.0) use EntryCreator version
*/
@Deprecated
public SortField(String field, FieldCache.Parser parser) {
this(field, parser, false);
}
@ -159,65 +157,27 @@ public class SortField {
* @param reverse True if natural order should be reversed.
* @throws IllegalArgumentException if the parser fails to
* subclass an existing numeric parser, or field is null
*
* @deprecated (4.0) use EntryCreator version
*/
@Deprecated
public SortField(String field, FieldCache.Parser parser, boolean reverse) {
if (field == null) {
throw new IllegalArgumentException("field can only be null when type is SCORE or DOC");
}
this.field = field;
this.reverse = reverse;
if (parser instanceof FieldCache.IntParser) {
this.creator = new IntValuesCreator( field, (FieldCache.IntParser)parser );
}
else if (parser instanceof FieldCache.FloatParser) {
this.creator = new FloatValuesCreator( field, (FieldCache.FloatParser)parser );
}
else if (parser instanceof FieldCache.ShortParser) {
this.creator = new ShortValuesCreator( field, (FieldCache.ShortParser)parser );
}
else if (parser instanceof FieldCache.ByteParser) {
this.creator = new ByteValuesCreator( field, (FieldCache.ByteParser)parser );
}
else if (parser instanceof FieldCache.LongParser) {
this.creator = new LongValuesCreator( field, (FieldCache.LongParser)parser );
}
else if (parser instanceof FieldCache.DoubleParser) {
this.creator = new DoubleValuesCreator( field, (FieldCache.DoubleParser)parser );
}
else
if (parser instanceof FieldCache.IntParser) initFieldType(field, Type.INT);
else if (parser instanceof FieldCache.FloatParser) initFieldType(field, Type.FLOAT);
else if (parser instanceof FieldCache.ShortParser) initFieldType(field, Type.SHORT);
else if (parser instanceof FieldCache.ByteParser) initFieldType(field, Type.BYTE);
else if (parser instanceof FieldCache.LongParser) initFieldType(field, Type.LONG);
else if (parser instanceof FieldCache.DoubleParser) initFieldType(field, Type.DOUBLE);
else {
throw new IllegalArgumentException("Parser instance does not subclass existing numeric parser from FieldCache (got " + parser + ")");
this.type = this.creator.getSortType();
}
/**
* Sort by a cached entry value
* @param creator
* @param reverse
*/
public SortField( CachedArrayCreator<?> creator, boolean reverse )
{
this.field = creator.field;
this.reverse = reverse;
this.creator = creator;
this.type = creator.getSortType();
}
public SortField setMissingValue( Object v )
{
missingValue = v;
if( missingValue != null ) {
if( this.creator == null ) {
throw new IllegalArgumentException( "Missing value only works for sort fields with a CachedArray" );
}
// Set the flag to get bits
creator.setFlag( CachedArrayCreator.OPTION_CACHE_BITS );
}
this.reverse = reverse;
this.parser = parser;
}
public SortField setMissingValue(Object missingValue) {
if (type != Type.BYTE && type != Type.SHORT && type != Type.INT && type != Type.FLOAT && type != Type.LONG && type != Type.DOUBLE) {
throw new IllegalArgumentException( "Missing value only works for numeric types" );
}
this.missingValue = missingValue;
return this;
}
@ -246,23 +206,12 @@ public class SortField {
private void initFieldType(String field, Type type) {
this.type = type;
if (field == null) {
if (type != Type.SCORE && type != Type.DOC)
if (type != Type.SCORE && type != Type.DOC) {
throw new IllegalArgumentException("field can only be null when type is SCORE or DOC");
}
} else {
this.field = field;
}
if( creator != null ) {
throw new IllegalStateException( "creator already exists: "+creator );
}
switch( type ) {
case BYTE: creator = new ByteValuesCreator( field, null ); break;
case SHORT: creator = new ShortValuesCreator( field, null ); break;
case INT: creator = new IntValuesCreator( field, null ); break;
case LONG: creator = new LongValuesCreator( field, null ); break;
case FLOAT: creator = new FloatValuesCreator( field, null ); break;
case DOUBLE: creator = new DoubleValuesCreator( field, null ); break;
}
}
/** Returns the name of the field. Could return <code>null</code>
@ -283,15 +232,9 @@ public class SortField {
/** Returns the instance of a {@link FieldCache} parser that fits to the given sort type.
* May return <code>null</code> if no parser was specified. Sorting is using the default parser then.
* @return An instance of a {@link FieldCache} parser, or <code>null</code>.
* @deprecated (4.0) use getEntryCreator()
*/
@Deprecated
public FieldCache.Parser getParser() {
return (creator==null) ? null : creator.getParser();
}
public CachedArrayCreator<?> getEntryCreator() {
return creator;
return parser;
}
/** Returns whether the sort should be reversed.
@ -365,7 +308,6 @@ public class SortField {
break;
}
if (creator != null) buffer.append('(').append(creator).append(')');
if (reverse) buffer.append('!');
return buffer.toString();
@ -385,7 +327,6 @@ public class SortField {
&& other.type == this.type
&& other.reverse == this.reverse
&& (other.comparatorSource == null ? this.comparatorSource == null : other.comparatorSource.equals(this.comparatorSource))
&& (other.creator == null ? this.creator == null : other.creator.equals(this.creator))
);
}
@ -399,7 +340,6 @@ public class SortField {
int hash = type.hashCode() ^ 0x346565dd + Boolean.valueOf(reverse).hashCode() ^ 0xaf5998bb;
if (field != null) hash += field.hashCode()^0xff5685dd;
if (comparatorSource != null) hash += comparatorSource.hashCode();
if (creator != null) hash += creator.hashCode()^0x3aaf56ff;
return hash;
}
@ -448,27 +388,27 @@ public class SortField {
if (useIndexValues) {
return new FieldComparator.IntDocValuesComparator(numHits, field);
} else {
return new FieldComparator.IntComparator(numHits, (IntValuesCreator)creator, (Integer) missingValue);
return new FieldComparator.IntComparator(numHits, field, parser, (Integer) missingValue);
}
case FLOAT:
if (useIndexValues) {
return new FieldComparator.FloatDocValuesComparator(numHits, field);
} else {
return new FieldComparator.FloatComparator(numHits, (FloatValuesCreator) creator, (Float) missingValue);
return new FieldComparator.FloatComparator(numHits, field, parser, (Float) missingValue);
}
case LONG:
return new FieldComparator.LongComparator(numHits, (LongValuesCreator)creator, (Long)missingValue );
return new FieldComparator.LongComparator(numHits, field, parser, (Long) missingValue);
case DOUBLE:
return new FieldComparator.DoubleComparator(numHits, (DoubleValuesCreator)creator, (Double)missingValue );
return new FieldComparator.DoubleComparator(numHits, field, parser, (Double) missingValue);
case BYTE:
return new FieldComparator.ByteComparator(numHits, (ByteValuesCreator)creator, (Byte)missingValue );
return new FieldComparator.ByteComparator(numHits, field, parser, (Byte) missingValue);
case SHORT:
return new FieldComparator.ShortComparator(numHits, (ShortValuesCreator)creator, (Short)missingValue );
return new FieldComparator.ShortComparator(numHits, field, parser, (Short) missingValue);
case CUSTOM:
assert comparatorSource != null;

View File

@ -1,146 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.FieldCache.ByteParser;
import org.apache.lucene.search.FieldCache.Parser;
import org.apache.lucene.search.cache.CachedArray.ByteValues;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
public class ByteValuesCreator extends CachedArrayCreator<ByteValues>
{
protected ByteParser parser;
public ByteValuesCreator( String field, ByteParser parser, int options )
{
super( field, options );
this.parser = parser;
}
public ByteValuesCreator( String field, ByteParser parser )
{
super( field );
this.parser = parser;
}
@Override
public Class getArrayType() {
return Byte.class;
}
@Override
public Parser getParser() {
return parser;
}
@Override
public SortField.Type getSortType() {
return SortField.Type.BYTE;
}
//--------------------------------------------------------------------------------
//--------------------------------------------------------------------------------
@Override
public ByteValues create(IndexReader reader) throws IOException {
return validate( new ByteValues(), reader );
}
@Override
public synchronized ByteValues validate(ByteValues entry, IndexReader reader) throws IOException {
boolean ok = false;
if( hasOption(OPTION_CACHE_VALUES) ) {
ok = true;
if( entry.values == null ) {
fillByteValues(entry, reader, field);
}
else {
assertSameParser( entry, parser );
}
}
if( hasOption(OPTION_CACHE_BITS) ) {
ok = true;
if( entry.valid == null ) {
fillValidBits(entry, reader, field);
}
}
if( !ok ) {
throw new RuntimeException( "the config must cache values and/or bits" );
}
return entry;
}
protected void fillByteValues( ByteValues vals, IndexReader reader, String field ) throws IOException
{
if( parser == null ) {
parser = FieldCache.DEFAULT_BYTE_PARSER;
}
setParserAndResetCounts(vals, parser);
Terms terms = MultiFields.getTerms(reader, field);
int maxDoc = reader.maxDoc();
vals.values = new byte[maxDoc];
if (terms != null) {
final TermsEnum termsEnum = terms.iterator();
FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
DocsEnum docs = null;
try {
while(true) {
final BytesRef term = termsEnum.next();
if (term == null) {
break;
}
final byte termval = parser.parseByte(term);
docs = termsEnum.docs(null, docs);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
vals.values[docID] = termval;
vals.numDocs++;
if( validBits != null ) {
validBits.set( docID );
}
}
vals.numTerms++;
}
} catch (FieldCache.StopFillCacheException stop) {}
if( vals.valid == null ) {
vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
}
}
if( vals.valid == null && vals.numDocs < 1 ) {
vals.valid = new Bits.MatchNoBits( maxDoc );
}
}
}

View File

@ -1,78 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.util.Bits;
public abstract class CachedArray
{
public Integer parserHashCode; // a flag to make sure you don't change what you are asking for in subsequent requests
public int numDocs;
public int numTerms;
/**
* NOTE: these Bits may have false positives for deleted documents. That is,
* Documents that are deleted may be marked as valid but the array value is not.
*/
public Bits valid;
public CachedArray() {
this.parserHashCode = null;
this.numDocs = 0;
this.numTerms = 0;
}
/**
* @return the native array
*/
public abstract Object getRawArray();
//-------------------------------------------------------------
// Concrete Values
//-------------------------------------------------------------
public static class ByteValues extends CachedArray {
public byte[] values = null;
@Override public byte[] getRawArray() { return values; }
};
public static class ShortValues extends CachedArray {
public short[] values = null;
@Override public short[] getRawArray() { return values; }
};
public static class IntValues extends CachedArray {
public int[] values = null;
@Override public int[] getRawArray() { return values; }
};
public static class FloatValues extends CachedArray {
public float[] values = null;
@Override public float[] getRawArray() { return values; }
};
public static class LongValues extends CachedArray {
public long[] values = null;
@Override public long[] getRawArray() { return values; }
};
public static class DoubleValues extends CachedArray {
public double[] values = null;
@Override public double[] getRawArray() { return values; }
};
}

View File

@ -1,152 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache.Parser;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
public abstract class CachedArrayCreator<T extends CachedArray> extends EntryCreatorWithOptions<T>
{
public static final int OPTION_VALIDATE = 1;
public static final int OPTION_CACHE_VALUES = 2;
public static final int OPTION_CACHE_BITS = 4;
// Composite Options Fields
public static final int CACHE_VALUES_AND_BITS = OPTION_CACHE_VALUES ^ OPTION_CACHE_BITS;
public static final int CACHE_VALUES_AND_BITS_VALIDATE = OPTION_CACHE_VALUES ^ OPTION_CACHE_BITS ^ OPTION_VALIDATE;
public final String field;
public CachedArrayCreator( String field )
{
super( OPTION_CACHE_VALUES ^ OPTION_VALIDATE );
if( field == null ) {
throw new IllegalArgumentException( "field can not be null" );
}
this.field = field;
}
public CachedArrayCreator( String field, int flags )
{
super( flags );
if( field == null ) {
throw new IllegalArgumentException( "field can not be null" );
}
this.field = field;
}
/**
* Note that the 'flags' are not part of the key -- subsequent calls to the cache
* with different options will use the same cache entry.
*/
@Override
public EntryKey getCacheKey() {
return new SimpleEntryKey( CachedArray.class, getArrayType(), field );
//return new Integer( CachedArrayCreator.class.hashCode() ^ getArrayType().hashCode() ^ field.hashCode() );
}
/** Return the type that the array will hold */
public abstract Class getArrayType();
public abstract Parser getParser();
public abstract SortField.Type getSortType();
protected void setParserAndResetCounts(T value, Parser parser)
{
int parserHashCode = parser.hashCode();
if( value.parserHashCode != null && value.parserHashCode != parserHashCode ) {
throw new RuntimeException( "Parser changed in subsequent call. "
+value.parserHashCode+" != "+parserHashCode + " :: " + parser );
}
value.parserHashCode = parserHashCode;
value.numDocs = value.numTerms = 0;
}
protected void assertSameParser(T value, Parser parser)
{
if( parser != null && value.parserHashCode != null ) {
int parserHashCode = parser.hashCode();
if( value.parserHashCode != parserHashCode ) {
throw new RuntimeException( "Parser changed in subsequent call. "
+value.parserHashCode+" != "+parserHashCode + " :: " + parser );
}
}
}
/**
* Utility function to help check what bits are valid
*/
protected Bits checkMatchAllBits( FixedBitSet valid, int numDocs, int maxDocs )
{
if( numDocs != maxDocs ) {
if( hasOption( OPTION_CACHE_BITS ) ) {
for( int i=0; i<maxDocs; i++ ) {
if( !valid.get(i) ) {
return valid;
}
}
}
else {
return null;
}
}
return new Bits.MatchAllBits( maxDocs );
}
public void fillValidBits( T vals, IndexReader reader, String field ) throws IOException
{
vals.numDocs = vals.numTerms = 0;
Terms terms = MultiFields.getTerms(reader, field);
if (terms != null) {
final TermsEnum termsEnum = terms.iterator();
FixedBitSet validBits = new FixedBitSet( reader.maxDoc() );
DocsEnum docs = null;
while(true) {
final BytesRef term = termsEnum.next();
if (term == null) {
break;
}
docs = termsEnum.docs(null, docs);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
validBits.set( docID );
vals.numDocs++;
}
vals.numTerms++;
}
vals.valid = checkMatchAllBits( validBits, vals.numDocs, reader.maxDoc() );
}
if( vals.numDocs < 1 ) {
vals.valid = new Bits.MatchNoBits( reader.maxDoc() );
}
}
}

View File

@ -1,51 +0,0 @@
package org.apache.lucene.search.cache;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.index.DocTermOrds;
import org.apache.lucene.index.IndexReader;
import java.io.IOException;
/**
* Creates {@link DocTermOrds} instances.
*/
public class DocTermOrdsCreator extends EntryCreatorWithOptions<DocTermOrds> {
private final String field;
public DocTermOrdsCreator(String field, int flag) {
super(flag);
this.field = field;
}
@Override
public DocTermOrds create(IndexReader reader) throws IOException {
return new DocTermOrds(reader, field);
}
@Override
public DocTermOrds validate(DocTermOrds entry, IndexReader reader) throws IOException {
return entry;
}
@Override
public EntryKey getCacheKey() {
return new SimpleEntryKey(DocTermOrdsCreator.class, field);
}
}

View File

@ -1,169 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache.DocTerms;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.PagedBytes;
import org.apache.lucene.util.packed.GrowableWriter;
import org.apache.lucene.util.packed.PackedInts;
// TODO: this if DocTermsIndex was already created, we should share it...
public class DocTermsCreator extends EntryCreatorWithOptions<DocTerms>
{
public static final int FASTER_BUT_MORE_RAM = 2;
public String field;
public DocTermsCreator( String field )
{
super( FASTER_BUT_MORE_RAM ); // By default turn on FASTER_BUT_MORE_RAM
if( field == null ) {
throw new IllegalArgumentException( "field can not be null" );
}
this.field = field;
}
public DocTermsCreator( String field, int flags )
{
super( flags );
if( field == null ) {
throw new IllegalArgumentException( "field can not be null" );
}
this.field = field;
}
@Override
public SimpleEntryKey getCacheKey() {
return new SimpleEntryKey( DocTermsCreator.class, field );
}
@Override
public DocTerms create(IndexReader reader) throws IOException {
Terms terms = MultiFields.getTerms(reader, field);
final boolean fasterButMoreRAM = hasOption( FASTER_BUT_MORE_RAM );
final int termCountHardLimit = reader.maxDoc();
// Holds the actual term data, expanded.
final PagedBytes bytes = new PagedBytes(15);
int startBPV;
if (terms != null) {
// Try for coarse estimate for number of bits; this
// should be an underestimate most of the time, which
// is fine -- GrowableWriter will reallocate as needed
long numUniqueTerms = 0;
try {
numUniqueTerms = terms.getUniqueTermCount();
} catch (UnsupportedOperationException uoe) {
numUniqueTerms = -1;
}
if (numUniqueTerms != -1) {
if (numUniqueTerms > termCountHardLimit) {
numUniqueTerms = termCountHardLimit;
}
startBPV = PackedInts.bitsRequired(numUniqueTerms*4);
} else {
startBPV = 1;
}
} else {
startBPV = 1;
}
final GrowableWriter docToOffset = new GrowableWriter(startBPV, reader.maxDoc(), fasterButMoreRAM);
// pointer==0 means not set
bytes.copyUsingLengthPrefix(new BytesRef());
if (terms != null) {
int termCount = 0;
final TermsEnum termsEnum = terms.iterator();
final Bits liveDocs = MultiFields.getLiveDocs(reader);
DocsEnum docs = null;
while(true) {
if (termCount++ == termCountHardLimit) {
// app is misusing the API (there is more than
// one term per doc); in this case we make best
// effort to load what we can (see LUCENE-2142)
break;
}
final BytesRef term = termsEnum.next();
if (term == null) {
break;
}
final long pointer = bytes.copyUsingLengthPrefix(term);
docs = termsEnum.docs(liveDocs, docs);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
docToOffset.set(docID, pointer);
}
}
}
// maybe an int-only impl?
return new DocTermsImpl(bytes.freeze(true), docToOffset.getMutable());
}
@Override
public DocTerms validate(DocTerms entry, IndexReader reader) throws IOException {
// TODO? nothing? perhaps subsequent call with FASTER_BUT_MORE_RAM?
return entry;
}
private static class DocTermsImpl extends DocTerms {
private final PagedBytes.Reader bytes;
private final PackedInts.Reader docToOffset;
public DocTermsImpl(PagedBytes.Reader bytes, PackedInts.Reader docToOffset) {
this.bytes = bytes;
this.docToOffset = docToOffset;
}
@Override
public int size() {
return docToOffset.size();
}
@Override
public boolean exists(int docID) {
return docToOffset.get(docID) == 0;
}
@Override
public BytesRef getTerm(int docID, BytesRef ret) {
final long pointer = docToOffset.get(docID);
return bytes.fill(ret, pointer);
}
}
}

View File

@ -1,353 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Comparator;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.OrdTermState;
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache.DocTermsIndex;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.PagedBytes;
import org.apache.lucene.util.packed.GrowableWriter;
import org.apache.lucene.util.packed.PackedInts;
public class DocTermsIndexCreator extends EntryCreatorWithOptions<DocTermsIndex>
{
public static final int FASTER_BUT_MORE_RAM = 2;
public String field;
public DocTermsIndexCreator( String field )
{
super( FASTER_BUT_MORE_RAM ); // By default turn on FASTER_BUT_MORE_RAM
if( field == null ) {
throw new IllegalArgumentException( "field can not be null" );
}
this.field = field;
}
public DocTermsIndexCreator( String field, int flags )
{
super( flags );
if( field == null ) {
throw new IllegalArgumentException( "field can not be null" );
}
this.field = field;
}
@Override
public EntryKey getCacheKey() {
return new SimpleEntryKey( DocTermsIndexCreator.class, field );
}
@Override
public DocTermsIndex create(IndexReader reader) throws IOException
{
Terms terms = MultiFields.getTerms(reader, field);
final boolean fasterButMoreRAM = hasOption(FASTER_BUT_MORE_RAM);
final PagedBytes bytes = new PagedBytes(15);
int startBytesBPV;
int startTermsBPV;
int startNumUniqueTerms;
int maxDoc = reader.maxDoc();
final int termCountHardLimit;
if (maxDoc == Integer.MAX_VALUE) {
termCountHardLimit = Integer.MAX_VALUE;
} else {
termCountHardLimit = maxDoc+1;
}
if (terms != null) {
// Try for coarse estimate for number of bits; this
// should be an underestimate most of the time, which
// is fine -- GrowableWriter will reallocate as needed
long numUniqueTerms = 0;
try {
numUniqueTerms = terms.getUniqueTermCount();
} catch (UnsupportedOperationException uoe) {
numUniqueTerms = -1;
}
if (numUniqueTerms != -1) {
if (numUniqueTerms > termCountHardLimit) {
// app is misusing the API (there is more than
// one term per doc); in this case we make best
// effort to load what we can (see LUCENE-2142)
numUniqueTerms = termCountHardLimit;
}
startBytesBPV = PackedInts.bitsRequired(numUniqueTerms*4);
startTermsBPV = PackedInts.bitsRequired(numUniqueTerms);
startNumUniqueTerms = (int) numUniqueTerms;
} else {
startBytesBPV = 1;
startTermsBPV = 1;
startNumUniqueTerms = 1;
}
} else {
startBytesBPV = 1;
startTermsBPV = 1;
startNumUniqueTerms = 1;
}
GrowableWriter termOrdToBytesOffset = new GrowableWriter(startBytesBPV, 1+startNumUniqueTerms, fasterButMoreRAM);
final GrowableWriter docToTermOrd = new GrowableWriter(startTermsBPV, reader.maxDoc(), fasterButMoreRAM);
// 0 is reserved for "unset"
bytes.copyUsingLengthPrefix(new BytesRef());
int termOrd = 1;
if (terms != null) {
final TermsEnum termsEnum = terms.iterator();
DocsEnum docs = null;
while(true) {
final BytesRef term = termsEnum.next();
if (term == null) {
break;
}
if (termOrd >= termCountHardLimit) {
break;
}
if (termOrd == termOrdToBytesOffset.size()) {
// NOTE: this code only runs if the incoming
// reader impl doesn't implement
// getUniqueTermCount (which should be uncommon)
termOrdToBytesOffset = termOrdToBytesOffset.resize(ArrayUtil.oversize(1+termOrd, 1));
}
termOrdToBytesOffset.set(termOrd, bytes.copyUsingLengthPrefix(term));
docs = termsEnum.docs(null, docs);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
docToTermOrd.set(docID, termOrd);
}
termOrd++;
}
if (termOrdToBytesOffset.size() > termOrd) {
termOrdToBytesOffset = termOrdToBytesOffset.resize(termOrd);
}
}
// maybe an int-only impl?
return new DocTermsIndexImpl(bytes.freeze(true), termOrdToBytesOffset.getMutable(), docToTermOrd.getMutable(), termOrd);
}
@Override
public DocTermsIndex validate(DocTermsIndex entry, IndexReader reader) throws IOException {
// TODO? nothing? perhaps subsequent call with FASTER_BUT_MORE_RAM?
return entry;
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
public static class DocTermsIndexImpl extends DocTermsIndex {
private final PagedBytes.Reader bytes;
private final PackedInts.Reader termOrdToBytesOffset;
private final PackedInts.Reader docToTermOrd;
private final int numOrd;
public DocTermsIndexImpl(PagedBytes.Reader bytes, PackedInts.Reader termOrdToBytesOffset, PackedInts.Reader docToTermOrd, int numOrd) {
this.bytes = bytes;
this.docToTermOrd = docToTermOrd;
this.termOrdToBytesOffset = termOrdToBytesOffset;
this.numOrd = numOrd;
}
@Override
public PackedInts.Reader getDocToOrd() {
return docToTermOrd;
}
@Override
public int numOrd() {
return numOrd;
}
@Override
public int getOrd(int docID) {
return (int) docToTermOrd.get(docID);
}
@Override
public int size() {
return docToTermOrd.size();
}
@Override
public BytesRef lookup(int ord, BytesRef ret) {
return bytes.fill(ret, termOrdToBytesOffset.get(ord));
}
@Override
public TermsEnum getTermsEnum() {
return this.new DocTermsIndexEnum();
}
class DocTermsIndexEnum extends TermsEnum {
int currentOrd;
int currentBlockNumber;
int end; // end position in the current block
final byte[][] blocks;
final int[] blockEnds;
final BytesRef term = new BytesRef();
public DocTermsIndexEnum() {
currentOrd = 0;
currentBlockNumber = 0;
blocks = bytes.getBlocks();
blockEnds = bytes.getBlockEnds();
currentBlockNumber = bytes.fillAndGetIndex(term, termOrdToBytesOffset.get(0));
end = blockEnds[currentBlockNumber];
}
@Override
public SeekStatus seekCeil(BytesRef text, boolean useCache /* ignored */) throws IOException {
int low = 1;
int high = numOrd-1;
while (low <= high) {
int mid = (low + high) >>> 1;
seekExact(mid);
int cmp = term.compareTo(text);
if (cmp < 0)
low = mid + 1;
else if (cmp > 0)
high = mid - 1;
else
return SeekStatus.FOUND; // key found
}
if (low == numOrd) {
return SeekStatus.END;
} else {
seekExact(low);
return SeekStatus.NOT_FOUND;
}
}
public void seekExact(long ord) throws IOException {
assert(ord >= 0 && ord <= numOrd);
// TODO: if gap is small, could iterate from current position? Or let user decide that?
currentBlockNumber = bytes.fillAndGetIndex(term, termOrdToBytesOffset.get((int)ord));
end = blockEnds[currentBlockNumber];
currentOrd = (int)ord;
}
@Override
public BytesRef next() throws IOException {
int start = term.offset + term.length;
if (start >= end) {
// switch byte blocks
if (currentBlockNumber +1 >= blocks.length) {
return null;
}
currentBlockNumber++;
term.bytes = blocks[currentBlockNumber];
end = blockEnds[currentBlockNumber];
start = 0;
if (end<=0) return null; // special case of empty last array
}
currentOrd++;
byte[] block = term.bytes;
if ((block[start] & 128) == 0) {
term.length = block[start];
term.offset = start+1;
} else {
term.length = (((block[start] & 0x7f)) << 8) | (block[1+start] & 0xff);
term.offset = start+2;
}
return term;
}
@Override
public BytesRef term() throws IOException {
return term;
}
@Override
public long ord() throws IOException {
return currentOrd;
}
@Override
public int docFreq() {
throw new UnsupportedOperationException();
}
@Override
public long totalTermFreq() {
return -1;
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public Comparator<BytesRef> getComparator() throws IOException {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public void seekExact(BytesRef term, TermState state) throws IOException {
assert state != null && state instanceof OrdTermState;
this.seekExact(((OrdTermState)state).ord);
}
@Override
public TermState termState() throws IOException {
OrdTermState state = new OrdTermState();
state.ord = currentOrd;
return state;
}
}
}
}

View File

@ -1,164 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.FieldCache.DoubleParser;
import org.apache.lucene.search.FieldCache.Parser;
import org.apache.lucene.search.cache.CachedArray.DoubleValues;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
public class DoubleValuesCreator extends CachedArrayCreator<DoubleValues>
{
protected DoubleParser parser;
public DoubleValuesCreator( String field, DoubleParser parser, int options )
{
super( field, options );
this.parser = parser;
}
public DoubleValuesCreator( String field, DoubleParser parser )
{
super( field );
this.parser = parser;
}
@Override
public Class getArrayType() {
return Double.class;
}
@Override
public Parser getParser() {
return parser;
}
@Override
public SortField.Type getSortType() {
return SortField.Type.DOUBLE;
}
//--------------------------------------------------------------------------------
//--------------------------------------------------------------------------------
@Override
public DoubleValues create(IndexReader reader) throws IOException {
return validate( new DoubleValues(), reader );
}
@Override
public synchronized DoubleValues validate(DoubleValues entry, IndexReader reader) throws IOException {
boolean ok = false;
if( hasOption(OPTION_CACHE_VALUES) ) {
ok = true;
if( entry.values == null ) {
fillDoubleValues(entry, reader, field);
}
else {
assertSameParser( entry, parser );
}
}
if( hasOption(OPTION_CACHE_BITS) ) {
ok = true;
if( entry.valid == null ) {
fillValidBits(entry, reader, field);
}
}
if( !ok ) {
throw new RuntimeException( "the config must cache values and/or bits" );
}
return entry;
}
protected void fillDoubleValues( DoubleValues vals, IndexReader reader, String field ) throws IOException
{
if( parser == null ) {
try {
parser = FieldCache.DEFAULT_DOUBLE_PARSER;
fillDoubleValues( vals, reader, field );
return;
}
catch (NumberFormatException ne) {
vals.parserHashCode = null; // wipe the previous one
parser = FieldCache.NUMERIC_UTILS_DOUBLE_PARSER;
fillDoubleValues( vals, reader, field );
return;
}
}
setParserAndResetCounts(vals, parser);
Terms terms = MultiFields.getTerms(reader, field);
int maxDoc = reader.maxDoc();
vals.values = null;
if (terms != null) {
final TermsEnum termsEnum = terms.iterator();
FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
DocsEnum docs = null;
try {
while(true) {
final BytesRef term = termsEnum.next();
if (term == null) {
break;
}
final double termval = parser.parseDouble(term);
docs = termsEnum.docs(null, docs);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
if(vals.values == null) {
vals.values = new double[maxDoc];
}
vals.values[docID] = termval;
vals.numDocs++;
if( validBits != null ) {
validBits.set( docID );
}
}
vals.numTerms++;
}
} catch (FieldCache.StopFillCacheException stop) {}
if( vals.valid == null ) {
vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
}
}
if(vals.values == null) {
vals.values = new double[maxDoc];
}
if( vals.valid == null && vals.numDocs < 1 ) {
vals.valid = new Bits.MatchNoBits( maxDoc );
}
}
}

View File

@ -1,72 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.IndexReader;
/**
* Create Cached Values for a given key
*
* @lucene.experimental
*/
public abstract class EntryCreator<T>
{
public abstract T create( IndexReader reader ) throws IOException;
public abstract T validate( T entry, IndexReader reader ) throws IOException;
/**
* Indicate if a cached cached value should be checked before usage.
* This is useful if an application wants to support subsequent calls
* to the same cached object that may alter the cached object. If
* an application wants to avoid this (synchronized) check, it should
* return 'false'
*
* @return 'true' if the Cache should call 'validate' before returning a cached object
*/
public boolean shouldValidate() {
return true;
}
/**
* @return A key to identify valid cache entries for subsequent requests
*/
public abstract EntryKey getCacheKey();
//------------------------------------------------------------------------
// The Following code is a hack to make things work while the
// EntryCreator is stored in in the FieldCache.
// When the FieldCache is replaced with a simpler map LUCENE-2665
// This can be removed
//------------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if( obj instanceof EntryCreator ) {
return getCacheKey().equals( ((EntryCreator)obj).getCacheKey() );
}
return false;
}
@Override
public int hashCode() {
return getCacheKey().hashCode();
}
}

View File

@ -1,45 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public abstract class EntryCreatorWithOptions<T> extends EntryCreator<T>
{
public static final int OPTION_VALIDATE = 1;
private int flags;
public EntryCreatorWithOptions( int flag ) {
this.flags = flag;
}
@Override
public boolean shouldValidate() {
return hasOption( OPTION_VALIDATE );
}
public boolean hasOption( int key )
{
return (flags & key) == key;
}
public void setFlag(int flag) {
this.flags |= flag;
}
}

View File

@ -1,26 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A Simple marker class -- Perhaps it could/should just be an Object
*/
public abstract class EntryKey {
}

View File

@ -1,165 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.FieldCache.FloatParser;
import org.apache.lucene.search.FieldCache.Parser;
import org.apache.lucene.search.cache.CachedArray.FloatValues;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
public class FloatValuesCreator extends CachedArrayCreator<FloatValues>
{
protected FloatParser parser;
public FloatValuesCreator( String field, FloatParser parser, int options )
{
super( field, options );
this.parser = parser;
}
public FloatValuesCreator( String field, FloatParser parser )
{
super( field );
this.parser = parser;
}
@Override
public Class getArrayType() {
return Float.class;
}
@Override
public Parser getParser() {
return parser;
}
@Override
public SortField.Type getSortType() {
return SortField.Type.FLOAT;
}
//--------------------------------------------------------------------------------
//--------------------------------------------------------------------------------
@Override
public FloatValues create(IndexReader reader) throws IOException {
return validate( new FloatValues(), reader );
}
@Override
public synchronized FloatValues validate(FloatValues entry, IndexReader reader) throws IOException {
boolean ok = false;
if( hasOption(OPTION_CACHE_VALUES) ) {
ok = true;
if( entry.values == null ) {
fillFloatValues(entry, reader, field);
}
else {
assertSameParser( entry, parser );
}
}
if( hasOption(OPTION_CACHE_BITS) ) {
ok = true;
if( entry.valid == null ) {
fillValidBits(entry, reader, field);
}
}
if( !ok ) {
throw new RuntimeException( "the config must cache values and/or bits" );
}
return entry;
}
protected void fillFloatValues( FloatValues vals, IndexReader reader, String field ) throws IOException
{
if( parser == null ) {
try {
parser = FieldCache.DEFAULT_FLOAT_PARSER;
fillFloatValues( vals, reader, field );
return;
}
catch (NumberFormatException ne) {
vals.parserHashCode = null; // wipe the previous one
parser = FieldCache.NUMERIC_UTILS_FLOAT_PARSER;
fillFloatValues( vals, reader, field );
return;
}
}
setParserAndResetCounts(vals, parser);
Terms terms = MultiFields.getTerms(reader, field);
int maxDoc = reader.maxDoc();
vals.values = null;
if (terms != null) {
final TermsEnum termsEnum = terms.iterator();
FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
DocsEnum docs = null;
try {
while(true) {
final BytesRef term = termsEnum.next();
if (term == null) {
break;
}
final float termval = parser.parseFloat(term);
docs = termsEnum.docs(null, docs);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
if(vals.values == null) {
vals.values = new float[maxDoc];
}
vals.values[docID] = termval;
vals.numDocs++;
if( validBits != null ) {
validBits.set( docID );
}
}
vals.numTerms++;
}
} catch (FieldCache.StopFillCacheException stop) {}
if( vals.valid == null ) {
vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
}
}
if(vals.values == null) {
vals.values = new float[maxDoc];
}
if( vals.valid == null && vals.numDocs < 1 ) {
vals.valid = new Bits.MatchNoBits( maxDoc );
}
}
}

View File

@ -1,165 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.FieldCache.IntParser;
import org.apache.lucene.search.FieldCache.Parser;
import org.apache.lucene.search.cache.CachedArray.IntValues;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
public class IntValuesCreator extends CachedArrayCreator<IntValues>
{
protected IntParser parser;
public IntValuesCreator( String field, IntParser parser, int options )
{
super( field, options );
this.parser = parser;
}
public IntValuesCreator( String field, IntParser parser )
{
super( field );
this.parser = parser;
}
@Override
public Class getArrayType() {
return Integer.class;
}
@Override
public Parser getParser() {
return parser;
}
@Override
public SortField.Type getSortType() {
return SortField.Type.INT;
}
//--------------------------------------------------------------------------------
//--------------------------------------------------------------------------------
@Override
public IntValues create(IndexReader reader) throws IOException {
return validate( new IntValues(), reader );
}
@Override
public synchronized IntValues validate(IntValues entry, IndexReader reader) throws IOException {
boolean ok = false;
if( hasOption(OPTION_CACHE_VALUES) ) {
ok = true;
if( entry.values == null ) {
fillIntValues(entry, reader, field);
}
else {
assertSameParser( entry, parser );
}
}
if( hasOption(OPTION_CACHE_BITS) ) {
ok = true;
if( entry.valid == null ) {
fillValidBits(entry, reader, field);
}
}
if( !ok ) {
throw new RuntimeException( "the config must cache values and/or bits" );
}
return entry;
}
protected void fillIntValues( IntValues vals, IndexReader reader, String field ) throws IOException
{
if( parser == null ) {
try {
parser = FieldCache.DEFAULT_INT_PARSER;
fillIntValues( vals, reader, field );
return;
}
catch (NumberFormatException ne) {
vals.parserHashCode = null;
parser = FieldCache.NUMERIC_UTILS_INT_PARSER;
fillIntValues( vals, reader, field );
return;
}
}
setParserAndResetCounts(vals, parser);
Terms terms = MultiFields.getTerms(reader, field);
int maxDoc = reader.maxDoc();
vals.values = null;
if (terms != null) {
final TermsEnum termsEnum = terms.iterator();
FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
DocsEnum docs = null;
try {
while(true) {
final BytesRef term = termsEnum.next();
if (term == null) {
break;
}
final int termval = parser.parseInt(term);
docs = termsEnum.docs(null, docs);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
if(vals.values == null) {
vals.values = new int[maxDoc];
}
vals.values[docID] = termval;
vals.numDocs++;
if( validBits != null ) {
validBits.set( docID );
}
}
vals.numTerms++;
}
} catch (FieldCache.StopFillCacheException stop) {}
if( vals.valid == null ) {
vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
}
}
if(vals.values == null) {
vals.values = new int[maxDoc];
}
if( vals.valid == null && vals.numDocs < 1 ) {
vals.valid = new Bits.MatchNoBits( maxDoc );
}
}
}

View File

@ -1,165 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.FieldCache.LongParser;
import org.apache.lucene.search.FieldCache.Parser;
import org.apache.lucene.search.cache.CachedArray.LongValues;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
public class LongValuesCreator extends CachedArrayCreator<LongValues>
{
protected LongParser parser;
public LongValuesCreator( String field, LongParser parser, int options )
{
super( field, options );
this.parser = parser;
}
public LongValuesCreator( String field, LongParser parser )
{
super( field );
this.parser = parser;
}
@Override
public Class getArrayType() {
return Long.class;
}
@Override
public Parser getParser() {
return parser;
}
@Override
public SortField.Type getSortType() {
return SortField.Type.LONG;
}
//--------------------------------------------------------------------------------
//--------------------------------------------------------------------------------
@Override
public LongValues create(IndexReader reader) throws IOException {
return validate( new LongValues(), reader );
}
@Override
public synchronized LongValues validate(LongValues entry, IndexReader reader) throws IOException {
boolean ok = false;
if( hasOption(OPTION_CACHE_VALUES) ) {
ok = true;
if( entry.values == null ) {
fillLongValues(entry, reader, field);
}
else {
assertSameParser( entry, parser );
}
}
if( hasOption(OPTION_CACHE_BITS) ) {
ok = true;
if( entry.valid == null ) {
fillValidBits(entry, reader, field);
}
}
if( !ok ) {
throw new RuntimeException( "the config must cache values and/or bits" );
}
return entry;
}
protected void fillLongValues( LongValues vals, IndexReader reader, String field ) throws IOException
{
if( parser == null ) {
try {
parser = FieldCache.DEFAULT_LONG_PARSER;
fillLongValues( vals, reader, field );
return;
}
catch (NumberFormatException ne) {
vals.parserHashCode = null; // wipe the previous one
parser = FieldCache.NUMERIC_UTILS_LONG_PARSER;
fillLongValues( vals, reader, field );
return;
}
}
setParserAndResetCounts(vals, parser);
Terms terms = MultiFields.getTerms(reader, field);
int maxDoc = reader.maxDoc();
vals.values = null;
if (terms != null) {
final TermsEnum termsEnum = terms.iterator();
FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
DocsEnum docs = null;
try {
while(true) {
final BytesRef term = termsEnum.next();
if (term == null) {
break;
}
final long termval = parser.parseLong(term);
docs = termsEnum.docs(null, docs);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
if(vals.values == null) {
vals.values = new long[maxDoc];
}
vals.values[docID] = termval;
vals.numDocs++;
if( validBits != null ) {
validBits.set( docID );
}
}
vals.numTerms++;
}
} catch (FieldCache.StopFillCacheException stop) {}
if( vals.valid == null ) {
vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
}
}
if(vals.values == null) {
vals.values = new long[maxDoc];
}
if( vals.valid == null && vals.numDocs < 1 ) {
vals.valid = new Bits.MatchNoBits( maxDoc );
}
}
}

View File

@ -1,147 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.FieldCache.Parser;
import org.apache.lucene.search.FieldCache.ShortParser;
import org.apache.lucene.search.cache.CachedArray.ShortValues;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
public class ShortValuesCreator extends CachedArrayCreator<ShortValues>
{
protected ShortParser parser;
public ShortValuesCreator( String field, ShortParser parser, int options )
{
super( field, options );
this.parser = parser;
}
public ShortValuesCreator( String field, ShortParser parser )
{
super( field );
this.parser = parser;
}
@Override
public Class getArrayType() {
return Short.class;
}
@Override
public Parser getParser() {
return parser;
}
@Override
public SortField.Type getSortType() {
return SortField.Type.SHORT;
}
//--------------------------------------------------------------------------------
//--------------------------------------------------------------------------------
@Override
public ShortValues create(IndexReader reader) throws IOException {
return validate( new ShortValues(), reader );
}
@Override
public synchronized ShortValues validate(ShortValues entry, IndexReader reader) throws IOException {
boolean ok = false;
if( hasOption(OPTION_CACHE_VALUES) ) {
ok = true;
if( entry.values == null ) {
fillShortValues(entry, reader, field);
}
else {
assertSameParser( entry, parser );
}
}
if( hasOption(OPTION_CACHE_BITS) ) {
ok = true;
if( entry.valid == null ) {
fillValidBits(entry, reader, field);
}
}
if( !ok ) {
throw new RuntimeException( "the config must cache values and/or bits" );
}
return entry;
}
protected void fillShortValues( ShortValues vals, IndexReader reader, String field ) throws IOException
{
if( parser == null ) {
parser = FieldCache.DEFAULT_SHORT_PARSER;
}
setParserAndResetCounts(vals, parser);
Terms terms = MultiFields.getTerms(reader, field);
int maxDoc = reader.maxDoc();
vals.values = new short[maxDoc];
if (terms != null) {
final TermsEnum termsEnum = terms.iterator();
FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
DocsEnum docs = null;
try {
while(true) {
final BytesRef term = termsEnum.next();
if (term == null) {
break;
}
final short termval = parser.parseShort(term);
docs = termsEnum.docs(null, docs);
while (true) {
final int docID = docs.nextDoc();
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
vals.values[docID] = termval;
vals.numDocs++;
if( validBits != null ) {
validBits.set( docID );
}
}
vals.numTerms++;
}
} catch (FieldCache.StopFillCacheException stop) {}
if( vals.valid == null ) {
vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
}
}
if( vals.valid == null && vals.numDocs < 1 ) {
vals.valid = new Bits.MatchNoBits( maxDoc );
}
}
}

View File

@ -1,77 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class SimpleEntryKey extends EntryKey
{
public final Class clazz;
public final Object[] args;
public final int hash;
public SimpleEntryKey( Class clazz, Object ... args ) {
this.clazz = clazz;
this.args = args;
int hash = clazz.hashCode();
if( args != null ) {
for( Object obj : args ) {
hash ^= obj.hashCode();
}
}
this.hash = hash;
}
@Override
public boolean equals(Object obj) {
if( obj instanceof SimpleEntryKey ) {
SimpleEntryKey key = (SimpleEntryKey)obj;
if( key.hash != hash ||
key.clazz != clazz ||
key.args.length != args.length ) {
return false;
}
// In the off chance that the hash etc is all the same
// we should actually check the values
for( int i=0; i<args.length; i++ ) {
if( !args[i].equals( key.args[i] ) ) {
return false;
}
}
return true;
}
return false;
}
@Override
public int hashCode() {
return hash;
}
@Override
public String toString() {
StringBuilder str = new StringBuilder();
str.append( '[' ).append( clazz.getName() ).append( ':' );
for( Object v : args ) {
str.append( v ).append( ':' );
}
str.append( hash ).append( ']' );
return str.toString();
}
}

View File

@ -1,25 +0,0 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
Fieldcache
</body>
</html>

View File

@ -119,6 +119,13 @@ public final class FieldCacheSanityChecker {
final CacheEntry item = cacheEntries[i];
final Object val = item.getValue();
// It's OK to have dup entries, where one is eg
// float[] and the other is the Bits (from
// getDocWithField())
if (val instanceof Bits) {
continue;
}
if (val instanceof FieldCache.CreationPlaceholder)
continue;

View File

@ -708,12 +708,12 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
assertEquals("wrong number of hits", 34, hits.length);
// check decoding into field cache
int[] fci = FieldCache.DEFAULT.getInts(searcher.getIndexReader(), "trieInt");
int[] fci = FieldCache.DEFAULT.getInts(searcher.getIndexReader(), "trieInt", false);
for (int val : fci) {
assertTrue("value in id bounds", val >= 0 && val < 35);
}
long[] fcl = FieldCache.DEFAULT.getLongs(searcher.getIndexReader(), "trieLong");
long[] fcl = FieldCache.DEFAULT.getLongs(searcher.getIndexReader(), "trieLong", false);
for (long val : fcl) {
assertTrue("value in id bounds", val >= 0L && val < 35L);
}

View File

@ -17,7 +17,6 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.ArrayList;
@ -31,20 +30,8 @@ import org.apache.lucene.document.NumericField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DocTermOrds.TermOrdsIterator;
import org.apache.lucene.index.codecs.BlockTermsReader;
import org.apache.lucene.index.codecs.BlockTermsWriter;
import org.apache.lucene.index.codecs.Codec;
import org.apache.lucene.index.codecs.PostingsFormat;
import org.apache.lucene.index.codecs.FieldsConsumer;
import org.apache.lucene.index.codecs.FieldsProducer;
import org.apache.lucene.index.codecs.FixedGapTermsIndexReader;
import org.apache.lucene.index.codecs.FixedGapTermsIndexWriter;
import org.apache.lucene.index.codecs.PostingsReaderBase;
import org.apache.lucene.index.codecs.PostingsWriterBase;
import org.apache.lucene.index.codecs.TermsIndexReaderBase;
import org.apache.lucene.index.codecs.TermsIndexWriterBase;
import org.apache.lucene.index.codecs.lucene40.Lucene40PostingsReader;
import org.apache.lucene.index.codecs.lucene40.Lucene40PostingsWriter;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
@ -317,7 +304,7 @@ public class TestDocTermOrds extends LuceneTestCase {
_TestUtil.nextInt(random, 2, 10));
final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id");
final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id", false);
/*
for(int docID=0;docID<subR.maxDoc();docID++) {
System.out.println(" docID=" + docID + " id=" + docIDToID[docID]);

View File

@ -285,7 +285,7 @@ public class TestFieldsReader extends LuceneTestCase {
assertEquals(numDocs, r.numDocs());
for(IndexReader sub : r.getSequentialSubReaders()) {
final int[] ids = FieldCache.DEFAULT.getInts(sub, "id");
final int[] ids = FieldCache.DEFAULT.getInts(sub, "id", false);
for(int docID=0;docID<sub.numDocs();docID++) {
final Document doc = sub.document(docID);
final Field f = (Field) doc.getField("nf");

View File

@ -1141,7 +1141,7 @@ public class TestIndexReader extends LuceneTestCase
// Open reader
IndexReader r = getOnlySegmentReader(IndexReader.open(dir, false));
final int[] ints = FieldCache.DEFAULT.getInts(r, "number");
final int[] ints = FieldCache.DEFAULT.getInts(r, "number", false);
assertEquals(1, ints.length);
assertEquals(17, ints[0]);
@ -1149,7 +1149,7 @@ public class TestIndexReader extends LuceneTestCase
IndexReader r2 = (IndexReader) r.clone();
r.close();
assertTrue(r2 != r);
final int[] ints2 = FieldCache.DEFAULT.getInts(r2, "number");
final int[] ints2 = FieldCache.DEFAULT.getInts(r2, "number", false);
r2.close();
assertEquals(1, ints2.length);
@ -1177,7 +1177,7 @@ public class TestIndexReader extends LuceneTestCase
// Open reader1
IndexReader r = IndexReader.open(dir, false);
IndexReader r1 = getOnlySegmentReader(r);
final int[] ints = FieldCache.DEFAULT.getInts(r1, "number");
final int[] ints = FieldCache.DEFAULT.getInts(r1, "number", false);
assertEquals(1, ints.length);
assertEquals(17, ints[0]);
@ -1190,7 +1190,7 @@ public class TestIndexReader extends LuceneTestCase
assertNotNull(r2);
r.close();
IndexReader sub0 = r2.getSequentialSubReaders()[0];
final int[] ints2 = FieldCache.DEFAULT.getInts(sub0, "number");
final int[] ints2 = FieldCache.DEFAULT.getInts(sub0, "number", false);
r2.close();
assertTrue(ints == ints2);

View File

@ -1279,7 +1279,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
IndexReader r = IndexReader.open(dir, false);
assertTrue(r instanceof DirectoryReader);
IndexReader r1 = getOnlySegmentReader(r);
final int[] ints = FieldCache.DEFAULT.getInts(r1, "number");
final int[] ints = FieldCache.DEFAULT.getInts(r1, "number", false);
assertEquals(1, ints.length);
assertEquals(17, ints[0]);
@ -1299,7 +1299,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
r.close();
assertTrue(((DirectoryReader) r2).readOnly);
IndexReader[] subs = r2.getSequentialSubReaders();
final int[] ints2 = FieldCache.DEFAULT.getInts(subs[0], "number");
final int[] ints2 = FieldCache.DEFAULT.getInts(subs[0], "number", false);
r2.close();
assertTrue(((SegmentReader) subs[0]).readOnly);

View File

@ -232,7 +232,7 @@ public class TestTermsEnum extends LuceneTestCase {
w.close();
// NOTE: intentional insanity!!
final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id");
final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id", false);
for(int iter=0;iter<10*RANDOM_MULTIPLIER;iter++) {

View File

@ -16,15 +16,6 @@ package org.apache.lucene.search;
* limitations under the License.
*/
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
@ -32,6 +23,20 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
public class TestFieldCache extends LuceneTestCase {
protected IndexReader reader;
@ -67,6 +72,13 @@ public class TestFieldCache extends LuceneTestCase {
doc.add(newField("theShort", String.valueOf(theShort--), StringField.TYPE_UNSTORED));
doc.add(newField("theInt", String.valueOf(theInt--), StringField.TYPE_UNSTORED));
doc.add(newField("theFloat", String.valueOf(theFloat--), StringField.TYPE_UNSTORED));
if (i%2 == 0) {
doc.add(newField("sparse", String.valueOf(i), StringField.TYPE_UNSTORED));
}
if (i%2 == 0) {
doc.add(new NumericField("numInt").setIntValue(i));
}
// sometimes skip the field:
if (random.nextInt(40) != 17) {
@ -101,8 +113,8 @@ public class TestFieldCache extends LuceneTestCase {
FieldCache cache = FieldCache.DEFAULT;
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
cache.setInfoStream(new PrintStream(bos));
cache.getDoubles(reader, "theDouble");
cache.getFloats(reader, "theDouble");
cache.getDoubles(reader, "theDouble", false);
cache.getFloats(reader, "theDouble", false);
assertTrue(bos.toString().indexOf("WARNING") != -1);
} finally {
FieldCache.DEFAULT.purgeAllCaches();
@ -111,60 +123,76 @@ public class TestFieldCache extends LuceneTestCase {
public void test() throws IOException {
FieldCache cache = FieldCache.DEFAULT;
double [] doubles = cache.getDoubles(reader, "theDouble");
assertSame("Second request to cache return same array", doubles, cache.getDoubles(reader, "theDouble"));
assertSame("Second request with explicit parser return same array", doubles, cache.getDoubles(reader, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER));
double [] doubles = cache.getDoubles(reader, "theDouble", random.nextBoolean());
assertSame("Second request to cache return same array", doubles, cache.getDoubles(reader, "theDouble", random.nextBoolean()));
assertSame("Second request with explicit parser return same array", doubles, cache.getDoubles(reader, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, random.nextBoolean()));
assertTrue("doubles Size: " + doubles.length + " is not: " + NUM_DOCS, doubles.length == NUM_DOCS);
for (int i = 0; i < doubles.length; i++) {
assertTrue(doubles[i] + " does not equal: " + (Double.MAX_VALUE - i), doubles[i] == (Double.MAX_VALUE - i));
}
long [] longs = cache.getLongs(reader, "theLong");
assertSame("Second request to cache return same array", longs, cache.getLongs(reader, "theLong"));
assertSame("Second request with explicit parser return same array", longs, cache.getLongs(reader, "theLong", FieldCache.DEFAULT_LONG_PARSER));
long [] longs = cache.getLongs(reader, "theLong", random.nextBoolean());
assertSame("Second request to cache return same array", longs, cache.getLongs(reader, "theLong", random.nextBoolean()));
assertSame("Second request with explicit parser return same array", longs, cache.getLongs(reader, "theLong", FieldCache.DEFAULT_LONG_PARSER, random.nextBoolean()));
assertTrue("longs Size: " + longs.length + " is not: " + NUM_DOCS, longs.length == NUM_DOCS);
for (int i = 0; i < longs.length; i++) {
assertTrue(longs[i] + " does not equal: " + (Long.MAX_VALUE - i) + " i=" + i, longs[i] == (Long.MAX_VALUE - i));
}
byte [] bytes = cache.getBytes(reader, "theByte");
assertSame("Second request to cache return same array", bytes, cache.getBytes(reader, "theByte"));
assertSame("Second request with explicit parser return same array", bytes, cache.getBytes(reader, "theByte", FieldCache.DEFAULT_BYTE_PARSER));
byte [] bytes = cache.getBytes(reader, "theByte", random.nextBoolean());
assertSame("Second request to cache return same array", bytes, cache.getBytes(reader, "theByte", random.nextBoolean()));
assertSame("Second request with explicit parser return same array", bytes, cache.getBytes(reader, "theByte", FieldCache.DEFAULT_BYTE_PARSER, random.nextBoolean()));
assertTrue("bytes Size: " + bytes.length + " is not: " + NUM_DOCS, bytes.length == NUM_DOCS);
for (int i = 0; i < bytes.length; i++) {
assertTrue(bytes[i] + " does not equal: " + (Byte.MAX_VALUE - i), bytes[i] == (byte) (Byte.MAX_VALUE - i));
}
short [] shorts = cache.getShorts(reader, "theShort");
assertSame("Second request to cache return same array", shorts, cache.getShorts(reader, "theShort"));
assertSame("Second request with explicit parser return same array", shorts, cache.getShorts(reader, "theShort", FieldCache.DEFAULT_SHORT_PARSER));
short [] shorts = cache.getShorts(reader, "theShort", random.nextBoolean());
assertSame("Second request to cache return same array", shorts, cache.getShorts(reader, "theShort", random.nextBoolean()));
assertSame("Second request with explicit parser return same array", shorts, cache.getShorts(reader, "theShort", FieldCache.DEFAULT_SHORT_PARSER, random.nextBoolean()));
assertTrue("shorts Size: " + shorts.length + " is not: " + NUM_DOCS, shorts.length == NUM_DOCS);
for (int i = 0; i < shorts.length; i++) {
assertTrue(shorts[i] + " does not equal: " + (Short.MAX_VALUE - i), shorts[i] == (short) (Short.MAX_VALUE - i));
}
int [] ints = cache.getInts(reader, "theInt");
assertSame("Second request to cache return same array", ints, cache.getInts(reader, "theInt"));
assertSame("Second request with explicit parser return same array", ints, cache.getInts(reader, "theInt", FieldCache.DEFAULT_INT_PARSER));
int [] ints = cache.getInts(reader, "theInt", random.nextBoolean());
assertSame("Second request to cache return same array", ints, cache.getInts(reader, "theInt", random.nextBoolean()));
assertSame("Second request with explicit parser return same array", ints, cache.getInts(reader, "theInt", FieldCache.DEFAULT_INT_PARSER, random.nextBoolean()));
assertTrue("ints Size: " + ints.length + " is not: " + NUM_DOCS, ints.length == NUM_DOCS);
for (int i = 0; i < ints.length; i++) {
assertTrue(ints[i] + " does not equal: " + (Integer.MAX_VALUE - i), ints[i] == (Integer.MAX_VALUE - i));
}
float [] floats = cache.getFloats(reader, "theFloat");
assertSame("Second request to cache return same array", floats, cache.getFloats(reader, "theFloat"));
assertSame("Second request with explicit parser return same array", floats, cache.getFloats(reader, "theFloat", FieldCache.DEFAULT_FLOAT_PARSER));
float [] floats = cache.getFloats(reader, "theFloat", random.nextBoolean());
assertSame("Second request to cache return same array", floats, cache.getFloats(reader, "theFloat", random.nextBoolean()));
assertSame("Second request with explicit parser return same array", floats, cache.getFloats(reader, "theFloat", FieldCache.DEFAULT_FLOAT_PARSER, random.nextBoolean()));
assertTrue("floats Size: " + floats.length + " is not: " + NUM_DOCS, floats.length == NUM_DOCS);
for (int i = 0; i < floats.length; i++) {
assertTrue(floats[i] + " does not equal: " + (Float.MAX_VALUE - i), floats[i] == (Float.MAX_VALUE - i));
}
Bits docsWithField = cache.getDocsWithField(reader, "theLong");
assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "theLong"));
assertTrue("docsWithField(theLong) must be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits);
assertTrue("docsWithField(theLong) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS);
for (int i = 0; i < docsWithField.length(); i++) {
assertTrue(docsWithField.get(i));
}
docsWithField = cache.getDocsWithField(reader, "sparse");
assertSame("Second request to cache return same array", docsWithField, cache.getDocsWithField(reader, "sparse"));
assertFalse("docsWithField(sparse) must not be class Bits.MatchAllBits", docsWithField instanceof Bits.MatchAllBits);
assertTrue("docsWithField(sparse) Size: " + docsWithField.length() + " is not: " + NUM_DOCS, docsWithField.length() == NUM_DOCS);
for (int i = 0; i < docsWithField.length(); i++) {
assertEquals(i%2 == 0, docsWithField.get(i));
}
// getTermsIndex
FieldCache.DocTermsIndex termsIndex = cache.getTermsIndex(reader, "theRandomUnicodeString");
assertSame("Second request to cache return same array", termsIndex, cache.getTermsIndex(reader, "theRandomUnicodeString"));
@ -261,8 +289,8 @@ public class TestFieldCache extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer= new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(500));
IndexReader r = IndexReader.open(writer, true);
FieldCache.DocTerms terms = FieldCache.DEFAULT.getTerms(r, "foobar");
FieldCache.DocTermsIndex termsIndex = FieldCache.DEFAULT.getTermsIndex(r, "foobar");
FieldCache.DEFAULT.getTerms(r, "foobar");
FieldCache.DEFAULT.getTermsIndex(r, "foobar");
writer.close();
r.close();
dir.close();
@ -284,4 +312,110 @@ public class TestFieldCache extends LuceneTestCase {
return s;
}
public void testDocsWithField() throws Exception {
FieldCache cache = FieldCache.DEFAULT;
cache.purgeAllCaches();
assertEquals(0, cache.getCacheEntries().length);
double[] doubles = cache.getDoubles(reader, "theDouble", true);
// The double[] takes two slots (one w/ null parser, one
// w/ real parser), and docsWithField should also
// have been populated:
assertEquals(3, cache.getCacheEntries().length);
Bits bits = cache.getDocsWithField(reader, "theDouble");
// No new entries should appear:
assertEquals(3, cache.getCacheEntries().length);
assertTrue(bits instanceof Bits.MatchAllBits);
int[] ints = cache.getInts(reader, "sparse", true);
assertEquals(6, cache.getCacheEntries().length);
Bits docsWithField = cache.getDocsWithField(reader, "sparse");
assertEquals(6, cache.getCacheEntries().length);
for (int i = 0; i < docsWithField.length(); i++) {
if (i%2 == 0) {
assertTrue(docsWithField.get(i));
assertEquals(i, ints[i]);
} else {
assertFalse(docsWithField.get(i));
}
}
int[] numInts = cache.getInts(reader, "numInt", random.nextBoolean());
docsWithField = cache.getDocsWithField(reader, "numInt");
for (int i = 0; i < docsWithField.length(); i++) {
if (i%2 == 0) {
assertTrue(docsWithField.get(i));
assertEquals(i, numInts[i]);
} else {
assertFalse(docsWithField.get(i));
}
}
}
public void testGetDocsWithFieldThreadSafety() throws Exception {
final FieldCache cache = FieldCache.DEFAULT;
cache.purgeAllCaches();
int NUM_THREADS = 3;
Thread[] threads = new Thread[NUM_THREADS];
final AtomicBoolean failed = new AtomicBoolean();
final AtomicInteger iters = new AtomicInteger();
final int NUM_ITER = 200 * RANDOM_MULTIPLIER;
final CyclicBarrier restart = new CyclicBarrier(NUM_THREADS,
new Runnable() {
@Override
public void run() {
cache.purgeAllCaches();
iters.incrementAndGet();
}
});
for(int threadIDX=0;threadIDX<NUM_THREADS;threadIDX++) {
threads[threadIDX] = new Thread() {
@Override
public void run() {
try {
while(!failed.get()) {
final int op = random.nextInt(3);
if (op == 0) {
// Purge all caches & resume, once all
// threads get here:
restart.await();
if (iters.get() >= NUM_ITER) {
break;
}
} else if (op == 1) {
Bits docsWithField = cache.getDocsWithField(reader, "sparse");
for (int i = 0; i < docsWithField.length(); i++) {
assertEquals(i%2 == 0, docsWithField.get(i));
}
} else {
int[] ints = cache.getInts(reader, "sparse", true);
Bits docsWithField = cache.getDocsWithField(reader, "sparse");
for (int i = 0; i < docsWithField.length(); i++) {
if (i%2 == 0) {
assertTrue(docsWithField.get(i));
assertEquals(i, ints[i]);
} else {
assertFalse(docsWithField.get(i));
}
}
}
}
} catch (Throwable t) {
failed.set(true);
restart.reset();
throw new RuntimeException(t);
}
}
};
threads[threadIDX].start();
}
for(int threadIDX=0;threadIDX<NUM_THREADS;threadIDX++) {
threads[threadIDX].join();
}
assertFalse(failed.get());
}
}

View File

@ -44,13 +44,6 @@ import org.apache.lucene.index.codecs.Codec;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.FieldValueHitQueue.Entry;
import org.apache.lucene.search.cache.ByteValuesCreator;
import org.apache.lucene.search.cache.CachedArrayCreator;
import org.apache.lucene.search.cache.DoubleValuesCreator;
import org.apache.lucene.search.cache.FloatValuesCreator;
import org.apache.lucene.search.cache.IntValuesCreator;
import org.apache.lucene.search.cache.LongValuesCreator;
import org.apache.lucene.search.cache.ShortValuesCreator;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.Bits;
@ -139,7 +132,7 @@ public class TestSort extends LuceneTestCase {
Field f = new StringField ("int", data[i][2]);
if (supportsDocValues) {
f = IndexDocValuesField.build(f, ValueType.VAR_INTS);
};
}
doc.add(f);
}
if (data[i][3] != null) {
@ -350,12 +343,12 @@ public class TestSort extends LuceneTestCase {
}
private static class SortMissingLastTestHelper {
CachedArrayCreator<?> creator;
Object min;
Object max;
final SortField sortField;
final Object min;
final Object max;
SortMissingLastTestHelper( CachedArrayCreator<?> c, Object min, Object max ) {
creator = c;
SortMissingLastTestHelper(SortField sortField, Object min, Object max) {
this.sortField = sortField;
this.min = min;
this.max = max;
}
@ -364,27 +357,51 @@ public class TestSort extends LuceneTestCase {
// test sorts where the type of field is specified
public void testSortMissingLast() throws Exception {
SortMissingLastTestHelper[] testers = new SortMissingLastTestHelper[] {
new SortMissingLastTestHelper( new ByteValuesCreator( "byte", null ), Byte.MIN_VALUE, Byte.MAX_VALUE ),
new SortMissingLastTestHelper( new ShortValuesCreator( "short", null ), Short.MIN_VALUE, Short.MAX_VALUE ),
new SortMissingLastTestHelper( new IntValuesCreator( "int", null ), Integer.MIN_VALUE, Integer.MAX_VALUE ),
new SortMissingLastTestHelper( new LongValuesCreator( "long", null ), Long.MIN_VALUE, Long.MAX_VALUE ),
new SortMissingLastTestHelper( new FloatValuesCreator( "float", null ), Float.MIN_VALUE, Float.MAX_VALUE ),
new SortMissingLastTestHelper( new DoubleValuesCreator( "double", null ), Double.MIN_VALUE, Double.MAX_VALUE ),
@SuppressWarnings("boxing")
SortMissingLastTestHelper[] ascendTesters = new SortMissingLastTestHelper[] {
new SortMissingLastTestHelper( new SortField( "byte", SortField.Type.BYTE ), Byte.MIN_VALUE, Byte.MAX_VALUE ),
new SortMissingLastTestHelper( new SortField( "short", SortField.Type.SHORT ), Short.MIN_VALUE, Short.MAX_VALUE ),
new SortMissingLastTestHelper( new SortField( "int", SortField.Type.INT ), Integer.MIN_VALUE, Integer.MAX_VALUE ),
new SortMissingLastTestHelper( new SortField( "long", SortField.Type.LONG ), Long.MIN_VALUE, Long.MAX_VALUE ),
new SortMissingLastTestHelper( new SortField( "float", SortField.Type.FLOAT ), Float.MIN_VALUE, Float.MAX_VALUE ),
new SortMissingLastTestHelper( new SortField( "double", SortField.Type.DOUBLE ), Double.MIN_VALUE, Double.MAX_VALUE ),
};
for( SortMissingLastTestHelper t : testers ) {
sort.setSort (new SortField( t.creator, false ), SortField.FIELD_DOC );
assertMatches("creator:"+t.creator, full, queryM, sort, "adbc" );
@SuppressWarnings("boxing")
SortMissingLastTestHelper[] descendTesters = new SortMissingLastTestHelper[] {
new SortMissingLastTestHelper( new SortField( "byte", SortField.Type.BYTE, true ), Byte.MIN_VALUE, Byte.MAX_VALUE ),
new SortMissingLastTestHelper( new SortField( "short", SortField.Type.SHORT, true ), Short.MIN_VALUE, Short.MAX_VALUE ),
new SortMissingLastTestHelper( new SortField( "int", SortField.Type.INT, true ), Integer.MIN_VALUE, Integer.MAX_VALUE ),
new SortMissingLastTestHelper( new SortField( "long", SortField.Type.LONG, true ), Long.MIN_VALUE, Long.MAX_VALUE ),
new SortMissingLastTestHelper( new SortField( "float", SortField.Type.FLOAT, true ), Float.MIN_VALUE, Float.MAX_VALUE ),
new SortMissingLastTestHelper( new SortField( "double", SortField.Type.DOUBLE, true ), Double.MIN_VALUE, Double.MAX_VALUE ),
};
// Default order: ascending
for(SortMissingLastTestHelper t : ascendTesters) {
sort.setSort(t.sortField, SortField.FIELD_DOC);
assertMatches("sortField:"+t.sortField, full, queryM, sort, "adbc");
sort.setSort (new SortField( t.creator, false ).setMissingValue( t.max ), SortField.FIELD_DOC );
assertMatches("creator:"+t.creator, full, queryM, sort, "bcad" );
sort.setSort(t.sortField.setMissingValue(t.max), SortField.FIELD_DOC);
assertMatches("sortField:"+t.sortField, full, queryM, sort, "bcad");
sort.setSort (new SortField( t.creator, false ).setMissingValue( t.min ), SortField.FIELD_DOC );
assertMatches("creator:"+t.creator, full, queryM, sort, "adbc" );
sort.setSort(t.sortField.setMissingValue(t.min), SortField.FIELD_DOC);
assertMatches("sortField:"+t.sortField, full, queryM, sort, "adbc");
}
// Reverse order: descending (Note: Order for un-valued documents remains the same due to tie breaker: a,d)
for(SortMissingLastTestHelper t : descendTesters) {
sort.setSort(t.sortField, SortField.FIELD_DOC);
assertMatches("sortField:"+t.sortField, full, queryM, sort, "cbad");
sort.setSort(t.sortField.setMissingValue( t.max ), SortField.FIELD_DOC);
assertMatches("sortField:"+t.sortField, full, queryM, sort, "adcb");
sort.setSort(t.sortField.setMissingValue( t.min ), SortField.FIELD_DOC);
assertMatches("sortField:"+t.sortField, full, queryM, sort, "cbad");
}
}
/**
* Test String sorting: small queue to many matches, multi field sort, reverse sort
*/
@ -572,7 +589,7 @@ public class TestSort extends LuceneTestCase {
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
docValues = FieldCache.DEFAULT.getInts(context.reader, "parser", testIntParser);
docValues = FieldCache.DEFAULT.getInts(context.reader, "parser", testIntParser, false);
return this;
}
@ -1064,7 +1081,7 @@ public class TestSort extends LuceneTestCase {
private void assertMatches(String msg, IndexSearcher searcher, Query query, Sort sort,
String expectedResult) throws IOException {
//ScoreDoc[] result = searcher.search (query, null, 1000, sort).scoreDocs;
TopDocs hits = searcher.search (query, null, Math.max(1, expectedResult.length()), sort);
TopDocs hits = searcher.search(query, null, Math.max(1, expectedResult.length()), sort);
ScoreDoc[] result = hits.scoreDocs;
assertEquals(expectedResult.length(),hits.totalHits);
StringBuilder buff = new StringBuilder(10);
@ -1076,7 +1093,7 @@ public class TestSort extends LuceneTestCase {
buff.append (v[j].stringValue());
}
}
assertEquals (msg, expectedResult, buff.toString());
assertEquals(msg, expectedResult, buff.toString());
}
public void testEmptyStringVsNullStringSort() throws Exception {

View File

@ -1,234 +0,0 @@
package org.apache.lucene.search.cache;
/**
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Set;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.FieldCache.*;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.FixedBitSet;
import org.junit.BeforeClass;
import static org.hamcrest.CoreMatchers.*;
public class TestEntryCreators extends LuceneTestCase {
protected IndexReader reader;
private static int NUM_DOCS;
private Directory directory;
@BeforeClass
public static void beforeClass() throws Exception {
NUM_DOCS = atLeast(500);
}
static class NumberTypeTester {
String funcName;
Class<? extends CachedArrayCreator> creator;
Class<? extends Parser> parser;
String field;
Number[] values;
public NumberTypeTester( String f, String func, Class<? extends CachedArrayCreator> creator, Class<? extends Parser> parser ) {
field = f;
funcName = func;
this.creator = creator;
this.parser = parser;
values = new Number[NUM_DOCS];
}
@Override
public String toString()
{
return field;
}
}
private NumberTypeTester[] typeTests;
@Override
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
RandomIndexWriter writer= new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
typeTests = new NumberTypeTester[] {
new NumberTypeTester( "theRandomByte", "getBytes", ByteValuesCreator.class, ByteParser.class ),
new NumberTypeTester( "theRandomShort", "getShorts", ShortValuesCreator.class, ShortParser.class ),
new NumberTypeTester( "theRandomInt", "getInts", IntValuesCreator.class, IntParser.class ),
new NumberTypeTester( "theRandomLong", "getLongs", LongValuesCreator.class, LongParser.class ),
new NumberTypeTester( "theRandomFloat", "getFloats", FloatValuesCreator.class, FloatParser.class ),
new NumberTypeTester( "theRandomDouble", "getDoubles", DoubleValuesCreator.class, DoubleParser.class ),
};
for (int i = 0; i < NUM_DOCS; i++){
Document doc = new Document();
// Test the valid bits
for( NumberTypeTester tester : typeTests ) {
if (random.nextInt(20) != 17 && i > 1) {
tester.values[i] = 10 + random.nextInt( 20 ); // get some field overlap
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setTokenized(false);
doc.add(newField(tester.field, String.valueOf(tester.values[i]), customType));
}
}
writer.addDocument(doc);
}
reader = writer.getReader();
writer.close();
}
@Override
public void tearDown() throws Exception {
reader.close();
directory.close();
super.tearDown();
}
public void testKeys() throws IOException {
// Check that the keys are unique for different fields
EntryKey key_1 = new ByteValuesCreator( "field1", null ).getCacheKey();
EntryKey key_2 = new ByteValuesCreator( "field2", null ).getCacheKey();
assertThat("different fields should have a different key", key_1, not(key_2) );
key_1 = new ByteValuesCreator( "field1", null ).getCacheKey();
key_2 = new ShortValuesCreator( "field1", null ).getCacheKey();
assertThat( "same field different type should have different key", key_1, not( key_2 ) );
key_1 = new ByteValuesCreator( "ff", null ).getCacheKey();
key_2 = new ByteValuesCreator( "ff", null ).getCacheKey();
assertThat( "same args should have same key", key_1, is( key_2 ) );
key_1 = new ByteValuesCreator( "ff", null, ByteValuesCreator.OPTION_CACHE_BITS ^ ByteValuesCreator.OPTION_CACHE_VALUES ).getCacheKey();
key_2 = new ByteValuesCreator( "ff", null ).getCacheKey();
assertThat( "different options should share same key", key_1, is( key_2 ) );
key_1 = new IntValuesCreator( "ff", FieldCache.DEFAULT_INT_PARSER ).getCacheKey();
key_2 = new IntValuesCreator( "ff", FieldCache.NUMERIC_UTILS_INT_PARSER ).getCacheKey();
assertThat( "diferent parser should have same key", key_1, is( key_2 ) );
}
private CachedArray getWithReflection( FieldCache cache, NumberTypeTester tester, int flags ) throws IOException
{
try {
Method getXXX = cache.getClass().getMethod( tester.funcName, IndexReader.class, String.class, EntryCreator.class );
Constructor constructor = tester.creator.getConstructor( String.class, tester.parser, Integer.TYPE );
CachedArrayCreator creator = (CachedArrayCreator)constructor.newInstance( tester.field, null, flags );
return (CachedArray) getXXX.invoke(cache, reader, tester.field, creator );
}
catch( Exception ex ) {
throw new RuntimeException( "Reflection failed", ex );
}
}
public void testCachedArrays() throws IOException
{
FieldCache cache = FieldCache.DEFAULT;
// Check the Different CachedArray Types
CachedArray last = null;
CachedArray justbits = null;
String field;
for( NumberTypeTester tester : typeTests ) {
justbits = getWithReflection( cache, tester, CachedArrayCreator.OPTION_CACHE_BITS );
assertNull( "should not get values : "+tester, justbits.getRawArray() );
assertNotNull( "should get bits : "+tester, justbits.valid );
last = getWithReflection( cache, tester, CachedArrayCreator.CACHE_VALUES_AND_BITS );
assertEquals( "should use same cached object : "+tester, justbits, last );
assertNull( "Validate=false shoudl not regenerate : "+tester, justbits.getRawArray() );
last = getWithReflection( cache, tester, CachedArrayCreator.CACHE_VALUES_AND_BITS_VALIDATE );
assertEquals( "should use same cached object : "+tester, justbits, last );
assertNotNull( "Validate=true should add the Array : "+tester, justbits.getRawArray() );
checkCachedArrayValuesAndBits( tester, last );
}
// Now switch the the parser (for the same type) and expect an error
cache.purgeAllCaches();
int flags = CachedArrayCreator.CACHE_VALUES_AND_BITS_VALIDATE;
field = "theRandomInt";
last = cache.getInts(reader, field, new IntValuesCreator( field, FieldCache.DEFAULT_INT_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[2], last );
try {
cache.getInts(reader, field, new IntValuesCreator( field, FieldCache.NUMERIC_UTILS_INT_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
field = "theRandomLong";
last = cache.getLongs(reader, field, new LongValuesCreator( field, FieldCache.DEFAULT_LONG_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[3], last );
try {
cache.getLongs(reader, field, new LongValuesCreator( field, FieldCache.NUMERIC_UTILS_LONG_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
field = "theRandomFloat";
last = cache.getFloats(reader, field, new FloatValuesCreator( field, FieldCache.DEFAULT_FLOAT_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[4], last );
try {
cache.getFloats(reader, field, new FloatValuesCreator( field, FieldCache.NUMERIC_UTILS_FLOAT_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
field = "theRandomDouble";
last = cache.getDoubles(reader, field, new DoubleValuesCreator( field, FieldCache.DEFAULT_DOUBLE_PARSER, flags ) );
checkCachedArrayValuesAndBits( typeTests[5], last );
try {
cache.getDoubles(reader, field, new DoubleValuesCreator( field, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, flags ) );
fail( "Should fail if you ask for the same type with a different parser : " + field );
} catch( Exception ex ) {} // expected
}
private void checkCachedArrayValuesAndBits( NumberTypeTester tester, CachedArray cachedVals )
{
// for( int i=0; i<NUM_DOCS; i++ ) {
// System.out.println( i + "] "+ tester.values[i] + " :: " + cachedVals.valid.get(i) );
// }
int numDocs =0;
Set<Number> distinctTerms = new HashSet<Number>();
for( int i=0; i<NUM_DOCS; i++ ) {
Number v = tester.values[i];
boolean isValid = cachedVals.valid.get(i);
if( v != null ) {
numDocs++;
distinctTerms.add( v );
assertTrue( "Valid bit should be true ("+i+"="+tester.values[i]+") "+tester, isValid );
}
else {
assertFalse( "Valid bit should be false ("+i+") "+tester, isValid );
}
}
assertEquals( "Cached numTerms does not match : "+tester, distinctTerms.size(), cachedVals.numTerms );
assertEquals( "Cached numDocs does not match : "+tester, numDocs, cachedVals.numDocs );
assertEquals( "Ordinal should match numDocs : "+tester, numDocs, ((FixedBitSet)cachedVals.valid).cardinality() );
}
}

View File

@ -87,12 +87,12 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
FieldCache cache = FieldCache.DEFAULT;
cache.purgeAllCaches();
cache.getDoubles(readerA, "theDouble");
cache.getDoubles(readerA, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER);
cache.getDoubles(readerB, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER);
cache.getDoubles(readerA, "theDouble", false);
cache.getDoubles(readerA, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);
cache.getDoubles(readerB, "theDouble", FieldCache.DEFAULT_DOUBLE_PARSER, false);
cache.getInts(readerX, "theInt");
cache.getInts(readerX, "theInt", FieldCache.DEFAULT_INT_PARSER);
cache.getInts(readerX, "theInt", false);
cache.getInts(readerX, "theInt", FieldCache.DEFAULT_INT_PARSER, false);
// // //
@ -110,9 +110,9 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
FieldCache cache = FieldCache.DEFAULT;
cache.purgeAllCaches();
cache.getInts(readerX, "theInt", FieldCache.DEFAULT_INT_PARSER);
cache.getInts(readerX, "theInt", FieldCache.DEFAULT_INT_PARSER, false);
cache.getTerms(readerX, "theInt");
cache.getBytes(readerX, "theByte");
cache.getBytes(readerX, "theByte", false);
// // //
@ -138,7 +138,7 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
cache.getTerms(readerB, "theString");
cache.getTerms(readerX, "theString");
cache.getBytes(readerX, "theByte");
cache.getBytes(readerX, "theByte", false);
// // //

View File

@ -250,7 +250,7 @@ public class AllGroupHeadsCollectorTest extends LuceneTestCase {
w.close();
// NOTE: intentional but temporary field cache insanity!
final int[] docIdToFieldId = FieldCache.DEFAULT.getInts(r, "id");
final int[] docIdToFieldId = FieldCache.DEFAULT.getInts(r, "id", false);
final int[] fieldIdToDocID = new int[numDocs];
for (int i = 0; i < docIdToFieldId.length; i++) {
int fieldId = docIdToFieldId[i];

View File

@ -705,7 +705,7 @@ public class TestGrouping extends LuceneTestCase {
w.close();
// NOTE: intentional but temporary field cache insanity!
final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id");
final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id", false);
IndexReader rBlocks = null;
Directory dirBlocks = null;
@ -733,7 +733,7 @@ public class TestGrouping extends LuceneTestCase {
dirBlocks = newDirectory();
rBlocks = getDocBlockReader(dirBlocks, groupDocs);
final Filter lastDocInBlock = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("groupend", "x"))));
final int[] docIDToIDBlocks = FieldCache.DEFAULT.getInts(rBlocks, "id");
final int[] docIDToIDBlocks = FieldCache.DEFAULT.getInts(rBlocks, "id", false);
final IndexSearcher sBlocks = newSearcher(rBlocks);
final ShardState shardsBlocks = new ShardState(sBlocks);

View File

@ -16,14 +16,13 @@ package org.apache.lucene.queries.function.valuesource;
* limitations under the License.
*/
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.search.cache.ByteValuesCreator;
import org.apache.lucene.search.cache.CachedArray.ByteValues;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.search.FieldCache;
/**
* Obtains int field values from the {@link org.apache.lucene.search.FieldCache}
* using <code>getInts()</code>
@ -32,10 +31,17 @@ import java.util.Map;
*
*/
public class ByteFieldSource extends NumericFieldCacheSource<ByteValues> {
public class ByteFieldSource extends FieldCacheSource {
public ByteFieldSource(ByteValuesCreator creator) {
super(creator);
private FieldCache.ByteParser parser;
public ByteFieldSource(String field) {
this(field, null);
}
public ByteFieldSource(String field, FieldCache.ByteParser parser) {
super(field);
this.parser = parser;
}
@Override
@ -45,8 +51,7 @@ public class ByteFieldSource extends NumericFieldCacheSource<ByteValues> {
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final ByteValues vals = cache.getBytes(readerContext.reader, field, creator);
final byte[] arr = vals.values;
final byte[] arr = cache.getBytes(readerContext.reader, field, parser, false);
return new DocValues() {
@Override
@ -96,4 +101,19 @@ public class ByteFieldSource extends NumericFieldCacheSource<ByteValues> {
};
}
public boolean equals(Object o) {
if (o.getClass() != ByteFieldSource.class) return false;
ByteFieldSource
other = (ByteFieldSource) o;
return super.equals(other)
&& (this.parser == null ? other.parser == null :
this.parser.getClass() == other.parser.getClass());
}
public int hashCode() {
int h = parser == null ? Byte.class.hashCode() : parser.getClass().hashCode();
h += super.hashCode();
return h;
}
}

View File

@ -17,20 +17,19 @@
package org.apache.lucene.queries.function.valuesource;
import org.apache.lucene.index.IndexReader;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits;
import org.apache.lucene.search.cache.DoubleValuesCreator;
import org.apache.lucene.search.cache.CachedArray.DoubleValues;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueDouble;
import java.io.IOException;
import java.util.Map;
/**
* Obtains float field values from the {@link org.apache.lucene.search.FieldCache}
* using <code>getFloats()</code>
@ -39,23 +38,27 @@ import java.util.Map;
*
*/
public class DoubleFieldSource extends NumericFieldCacheSource<DoubleValues> {
public class DoubleFieldSource extends FieldCacheSource {
public DoubleFieldSource(DoubleValuesCreator creator) {
super(creator);
protected FieldCache.DoubleParser parser;
public DoubleFieldSource(String field) {
this(field, null);
}
public DoubleFieldSource(String field, FieldCache.DoubleParser parser) {
super(field);
this.parser = parser;
}
@Override
public String description() {
return "double(" + field + ')';
}
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final DoubleValues vals = cache.getDoubles(readerContext.reader, field, creator);
final double[] arr = vals.values;
final Bits valid = vals.valid;
final double[] arr = cache.getDoubles(readerContext.reader, field, parser, true);
final Bits valid = cache.getDocsWithField(readerContext.reader, field);
return new DoubleDocValues(this) {
@Override
public double doubleVal(int doc) {
@ -148,4 +151,18 @@ public class DoubleFieldSource extends NumericFieldCacheSource<DoubleValues> {
};
}
public boolean equals(Object o) {
if (o.getClass() != DoubleFieldSource.class) return false;
DoubleFieldSource other = (DoubleFieldSource) o;
return super.equals(other)
&& (this.parser == null ? other.parser == null :
this.parser.getClass() == other.parser.getClass());
}
public int hashCode() {
int h = parser == null ? Double.class.hashCode() : parser.getClass().hashCode();
h += super.hashCode();
return h;
}
}

View File

@ -23,9 +23,8 @@ import java.util.Map;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.queries.function.docvalues.FloatDocValues;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits;
import org.apache.lucene.search.cache.FloatValuesCreator;
import org.apache.lucene.search.cache.CachedArray.FloatValues;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueFloat;
@ -37,23 +36,28 @@ import org.apache.lucene.util.mutable.MutableValueFloat;
*
*/
public class FloatFieldSource extends NumericFieldCacheSource<FloatValues> {
public class FloatFieldSource extends FieldCacheSource {
public FloatFieldSource(FloatValuesCreator creator) {
super(creator);
protected FieldCache.FloatParser parser;
public FloatFieldSource(String field) {
this(field, null);
}
public FloatFieldSource(String field, FieldCache.FloatParser parser) {
super(field);
this.parser = parser;
}
@Override
public String description() {
return "float(" + field + ')';
}
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FloatValues vals = cache.getFloats(readerContext.reader, field, creator);
final float[] arr = vals.values;
final Bits valid = vals.valid;
final float[] arr = cache.getFloats(readerContext.reader, field, parser, true);
final Bits valid = cache.getDocsWithField(readerContext.reader, field);
return new FloatDocValues(this) {
@Override
public float floatVal(int doc) {
@ -91,4 +95,18 @@ public class FloatFieldSource extends NumericFieldCacheSource<FloatValues> {
};
}
public boolean equals(Object o) {
if (o.getClass() != FloatFieldSource.class) return false;
FloatFieldSource other = (FloatFieldSource)o;
return super.equals(other)
&& (this.parser==null ? other.parser==null :
this.parser.getClass() == other.parser.getClass());
}
public int hashCode() {
int h = parser==null ? Float.class.hashCode() : parser.getClass().hashCode();
h += super.hashCode();
return h;
};
}

View File

@ -17,20 +17,19 @@
package org.apache.lucene.queries.function.valuesource;
import org.apache.lucene.index.IndexReader;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.queries.function.docvalues.IntDocValues;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits;
import org.apache.lucene.search.cache.IntValuesCreator;
import org.apache.lucene.search.cache.CachedArray.IntValues;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueInt;
import java.io.IOException;
import java.util.Map;
/**
* Obtains int field values from the {@link org.apache.lucene.search.FieldCache}
* using <code>getInts()</code>
@ -38,10 +37,16 @@ import java.util.Map;
*
*/
public class IntFieldSource extends NumericFieldCacheSource<IntValues> {
public class IntFieldSource extends FieldCacheSource {
final FieldCache.IntParser parser;
public IntFieldSource(IntValuesCreator creator) {
super(creator);
public IntFieldSource(String field) {
this(field, null);
}
public IntFieldSource(String field, FieldCache.IntParser parser) {
super(field);
this.parser = parser;
}
@Override
@ -52,9 +57,8 @@ public class IntFieldSource extends NumericFieldCacheSource<IntValues> {
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final IntValues vals = cache.getInts(readerContext.reader, field, creator);
final int[] arr = vals.values;
final Bits valid = vals.valid;
final int[] arr = cache.getInts(readerContext.reader, field, parser, true);
final Bits valid = cache.getDocsWithField(readerContext.reader, field);
return new IntDocValues(this) {
final MutableValueInt val = new MutableValueInt();
@ -155,4 +159,18 @@ public class IntFieldSource extends NumericFieldCacheSource<IntValues> {
};
}
public boolean equals(Object o) {
if (o.getClass() != IntFieldSource.class) return false;
IntFieldSource other = (IntFieldSource)o;
return super.equals(other)
&& (this.parser==null ? other.parser==null :
this.parser.getClass() == other.parser.getClass());
}
public int hashCode() {
int h = parser==null ? Integer.class.hashCode() : parser.getClass().hashCode();
h += super.hashCode();
return h;
};
}

View File

@ -17,21 +17,19 @@
package org.apache.lucene.queries.function.valuesource;
import org.apache.lucene.index.IndexReader;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.queries.function.docvalues.LongDocValues;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits;
import org.apache.lucene.search.cache.LongValuesCreator;
import org.apache.lucene.search.cache.CachedArray.LongValues;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueLong;
import java.io.IOException;
import java.util.Map;
/**
* Obtains float field values from the {@link org.apache.lucene.search.FieldCache}
* using <code>getFloats()</code>
@ -40,10 +38,17 @@ import java.util.Map;
*
*/
public class LongFieldSource extends NumericFieldCacheSource<LongValues> {
public class LongFieldSource extends FieldCacheSource {
public LongFieldSource(LongValuesCreator creator) {
super(creator);
protected FieldCache.LongParser parser;
public LongFieldSource(String field) {
this(field, null);
}
public LongFieldSource(String field, FieldCache.LongParser parser) {
super(field);
this.parser = parser;
}
@Override
@ -61,9 +66,8 @@ public class LongFieldSource extends NumericFieldCacheSource<LongValues> {
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final LongValues vals = cache.getLongs(readerContext.reader, field, creator);
final long[] arr = vals.values;
final Bits valid = vals.valid;
final long[] arr = cache.getLongs(readerContext.reader, field, parser, true);
final Bits valid = cache.getDocsWithField(readerContext.reader, field);
return new LongDocValues(this) {
@Override
@ -141,4 +145,17 @@ public class LongFieldSource extends NumericFieldCacheSource<LongValues> {
return new MutableValueLong();
}
public boolean equals(Object o) {
if (o.getClass() != this.getClass()) return false;
LongFieldSource other = (LongFieldSource) o;
return super.equals(other)
&& (this.parser == null ? other.parser == null :
this.parser.getClass() == other.parser.getClass());
}
public int hashCode() {
int h = parser == null ? this.getClass().hashCode() : parser.getClass().hashCode();
h += super.hashCode();
return h;
}
}

View File

@ -1,50 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.queries.function.valuesource;
import org.apache.lucene.search.cache.CachedArray;
import org.apache.lucene.search.cache.CachedArrayCreator;
/**
*
*
*/
public abstract class NumericFieldCacheSource<T extends CachedArray> extends FieldCacheSource {
protected final CachedArrayCreator<T> creator;
public NumericFieldCacheSource( CachedArrayCreator<T> creator ) {
super( creator.field );
this.creator = creator;
}
@Override
public final boolean equals(Object o) {
if (o.getClass() != this.getClass()) return false;
NumericFieldCacheSource other = (NumericFieldCacheSource) o;
return super.equals(other)
&& (this.creator == null ? other.creator == null :
this.creator.getClass() == other.creator.getClass());
}
@Override
public final int hashCode() {
int h = creator == null ? this.getClass().hashCode() : creator.getClass().hashCode();
h += super.hashCode();
return h;
}
}

View File

@ -16,25 +16,30 @@ package org.apache.lucene.queries.function.valuesource;
* limitations under the License.
*/
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.search.cache.ShortValuesCreator;
import org.apache.lucene.search.cache.CachedArray.ShortValues;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.search.FieldCache;
/**
*
*
**/
public class ShortFieldSource extends NumericFieldCacheSource<ShortValues> {
public class ShortFieldSource extends FieldCacheSource {
public ShortFieldSource(ShortValuesCreator creator) {
super(creator);
final FieldCache.ShortParser parser;
public ShortFieldSource(String field) {
this(field, null);
}
public ShortFieldSource(String field, FieldCache.ShortParser parser) {
super(field);
this.parser = parser;
}
@Override
public String description() {
@ -43,8 +48,7 @@ public class ShortFieldSource extends NumericFieldCacheSource<ShortValues> {
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final ShortValues vals = cache.getShorts(readerContext.reader, field, creator);
final short[] arr = vals.values;
final short[] arr = cache.getShorts(readerContext.reader, field, parser, false);
return new DocValues() {
@Override
@ -89,4 +93,19 @@ public class ShortFieldSource extends NumericFieldCacheSource<ShortValues> {
};
}
public boolean equals(Object o) {
if (o.getClass() != ShortFieldSource.class) return false;
ShortFieldSource
other = (ShortFieldSource) o;
return super.equals(other)
&& (this.parser == null ? other.parser == null :
this.parser.getClass() == other.parser.getClass());
}
public int hashCode() {
int h = parser == null ? Short.class.hashCode() : parser.getClass().hashCode();
h += super.hashCode();
return h;
}
}

View File

@ -22,7 +22,6 @@ import org.apache.lucene.queries.function.FunctionTestSetup;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.search.*;
import org.apache.lucene.search.cache.*;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
@ -77,9 +76,6 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
@Test
public void testCustomScoreFloat() throws Exception {
// INT field can be parsed as float
FloatValuesCreator valuesCreator = new FloatValuesCreator(INT_FIELD, null, CachedArrayCreator.CACHE_VALUES_AND_BITS);
FloatFieldSource fieldSource = new FloatFieldSource(valuesCreator);
doTestCustomScore(INT_AS_FLOAT_VALUESOURCE, 1.0);
doTestCustomScore(INT_AS_FLOAT_VALUESOURCE, 5.0);
@ -177,7 +173,7 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
@Override
protected CustomScoreProvider getCustomScoreProvider(AtomicReaderContext context) throws IOException {
final int[] values = FieldCache.DEFAULT.getInts(context.reader, INT_FIELD);
final int[] values = FieldCache.DEFAULT.getInts(context.reader, INT_FIELD, false);
return new CustomScoreProvider(context) {
@Override
public float customScore(int doc, float subScore, float valSrcScore) throws IOException {
@ -237,8 +233,8 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
// Test that FieldScoreQuery returns docs with expected score.
private void doTestCustomScore(ValueSource valueSource, double dboost) throws Exception {
FunctionQuery functionQuery = new FunctionQuery(valueSource);
float boost = (float) dboost;
FunctionQuery functionQuery = new FunctionQuery(valueSource);
IndexSearcher s = new IndexSearcher(dir, true);
// regular (boolean) query.

View File

@ -12,7 +12,6 @@ import org.apache.lucene.queries.function.valuesource.ByteFieldSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.ShortFieldSource;
import org.apache.lucene.search.cache.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
@ -54,13 +53,11 @@ public abstract class FunctionTestSetup extends LuceneTestCase {
protected static final String INT_FIELD = "iii";
protected static final String FLOAT_FIELD = "fff";
private static final int CREATOR_FLAGS = CachedArrayCreator.CACHE_VALUES_AND_BITS;
protected ValueSource BYTE_VALUESOURCE = new ByteFieldSource(new ByteValuesCreator(INT_FIELD, null, CREATOR_FLAGS));
protected ValueSource SHORT_VALUESOURCE = new ShortFieldSource(new ShortValuesCreator(INT_FIELD, null, CREATOR_FLAGS));
protected ValueSource INT_VALUESOURCE = new IntFieldSource(new IntValuesCreator(INT_FIELD, null, CREATOR_FLAGS));
protected ValueSource INT_AS_FLOAT_VALUESOURCE = new FloatFieldSource(new FloatValuesCreator(INT_FIELD, null, CREATOR_FLAGS));
protected ValueSource FLOAT_VALUESOURCE = new FloatFieldSource(new FloatValuesCreator(FLOAT_FIELD, null, CREATOR_FLAGS));
protected ValueSource BYTE_VALUESOURCE = new ByteFieldSource(INT_FIELD);
protected ValueSource SHORT_VALUESOURCE = new ShortFieldSource(INT_FIELD);
protected ValueSource INT_VALUESOURCE = new IntFieldSource(INT_FIELD);
protected ValueSource INT_AS_FLOAT_VALUESOURCE = new FloatFieldSource(INT_FIELD);
protected ValueSource FLOAT_VALUESOURCE = new FloatFieldSource(FLOAT_FIELD);
private static final String DOC_TEXT_LINES[] = {
"Well, this is just some plain text we use for creating the ",

View File

@ -27,7 +27,6 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.cache.*;
import org.junit.BeforeClass;
import org.junit.Test;

View File

@ -20,8 +20,6 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.ByteFieldSource;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.cache.ByteValuesCreator;
import org.apache.lucene.search.cache.CachedArrayCreator;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
@ -48,7 +46,7 @@ public class ByteField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
return new ByteFieldSource( new ByteValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
return new ByteFieldSource(field.name);
}
@Override

View File

@ -21,8 +21,6 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.cache.CachedArrayCreator;
import org.apache.lucene.search.cache.DoubleValuesCreator;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
@ -48,7 +46,7 @@ public class DoubleField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
return new DoubleFieldSource( new DoubleValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
return new DoubleFieldSource(field.name);
}
@Override

View File

@ -20,8 +20,6 @@ package org.apache.solr.schema;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.cache.CachedArrayCreator;
import org.apache.lucene.search.cache.FloatValuesCreator;
import org.apache.solr.search.QParser;
import org.apache.lucene.index.IndexableField;
import org.apache.solr.response.TextResponseWriter;
@ -46,7 +44,7 @@ public class FloatField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
return new FloatFieldSource( new FloatValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
return new FloatFieldSource(field.name);
}
@Override

View File

@ -20,8 +20,6 @@ package org.apache.solr.schema;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.cache.CachedArrayCreator;
import org.apache.lucene.search.cache.IntValuesCreator;
import org.apache.solr.search.QParser;
import org.apache.lucene.index.IndexableField;
import org.apache.solr.response.TextResponseWriter;
@ -46,7 +44,7 @@ public class IntField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
return new IntFieldSource(new IntValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
return new IntFieldSource(field.name);
}
@Override

View File

@ -21,8 +21,6 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.cache.CachedArrayCreator;
import org.apache.lucene.search.cache.LongValuesCreator;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
@ -48,7 +46,7 @@ public class LongField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
return new LongFieldSource( new LongValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
return new LongFieldSource(field.name);
}
@Override

View File

@ -20,8 +20,6 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.ShortFieldSource;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.cache.CachedArrayCreator;
import org.apache.lucene.search.cache.ShortValuesCreator;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
@ -51,7 +49,7 @@ public class ShortField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
return new ShortFieldSource(new ShortValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
return new ShortFieldSource(field.name);
}
@Override

View File

@ -20,16 +20,12 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.*;
import org.apache.lucene.search.cache.CachedArrayCreator;
import org.apache.lucene.search.cache.DoubleValuesCreator;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.search.cache.FloatValuesCreator;
import org.apache.lucene.search.cache.IntValuesCreator;
import org.apache.lucene.search.cache.LongValuesCreator;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.NumericUtils;
@ -137,7 +133,6 @@ public class TrieField extends org.apache.solr.schema.FieldType {
public SortField getSortField(SchemaField field, boolean top) {
field.checkSortability();
int flags = CachedArrayCreator.CACHE_VALUES_AND_BITS;
Object missingValue = null;
boolean sortMissingLast = field.sortMissingLast();
boolean sortMissingFirst = field.sortMissingFirst();
@ -150,8 +145,7 @@ public class TrieField extends org.apache.solr.schema.FieldType {
else if( sortMissingFirst ) {
missingValue = top ? Integer.MAX_VALUE : Integer.MIN_VALUE;
}
return new SortField( new IntValuesCreator( field.getName(),
FieldCache.NUMERIC_UTILS_INT_PARSER, flags ), top).setMissingValue( missingValue );
return new SortField( field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER, top).setMissingValue(missingValue);
case FLOAT:
if( sortMissingLast ) {
@ -160,8 +154,7 @@ public class TrieField extends org.apache.solr.schema.FieldType {
else if( sortMissingFirst ) {
missingValue = top ? Float.POSITIVE_INFINITY : Float.NEGATIVE_INFINITY;
}
return new SortField( new FloatValuesCreator( field.getName(),
FieldCache.NUMERIC_UTILS_FLOAT_PARSER, flags ), top).setMissingValue( missingValue );
return new SortField( field.getName(), FieldCache.NUMERIC_UTILS_FLOAT_PARSER, top).setMissingValue(missingValue);
case DATE: // fallthrough
case LONG:
@ -171,8 +164,7 @@ public class TrieField extends org.apache.solr.schema.FieldType {
else if( sortMissingFirst ) {
missingValue = top ? Long.MAX_VALUE : Long.MIN_VALUE;
}
return new SortField( new LongValuesCreator( field.getName(),
FieldCache.NUMERIC_UTILS_LONG_PARSER, flags ), top).setMissingValue( missingValue );
return new SortField( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER, top).setMissingValue(missingValue);
case DOUBLE:
if( sortMissingLast ) {
@ -181,8 +173,7 @@ public class TrieField extends org.apache.solr.schema.FieldType {
else if( sortMissingFirst ) {
missingValue = top ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY;
}
return new SortField( new DoubleValuesCreator( field.getName(),
FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, flags ), top).setMissingValue( missingValue );
return new SortField( field.getName(), FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, top).setMissingValue(missingValue);
default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + field.name);
@ -192,18 +183,17 @@ public class TrieField extends org.apache.solr.schema.FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
int flags = CachedArrayCreator.CACHE_VALUES_AND_BITS;
switch (type) {
case INTEGER:
return new IntFieldSource( new IntValuesCreator( field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER, flags ) );
return new IntFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER );
case FLOAT:
return new FloatFieldSource( new FloatValuesCreator( field.getName(), FieldCache.NUMERIC_UTILS_FLOAT_PARSER, flags ));
return new FloatFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_FLOAT_PARSER );
case DATE:
return new TrieDateFieldSource( new LongValuesCreator( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER, flags ));
return new TrieDateFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER );
case LONG:
return new LongFieldSource( new LongValuesCreator( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER, flags ) );
return new LongFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER );
case DOUBLE:
return new DoubleFieldSource( new DoubleValuesCreator( field.getName(), FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, flags ));
return new DoubleFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_DOUBLE_PARSER );
default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + field.name);
}
@ -573,8 +563,8 @@ public class TrieField extends org.apache.solr.schema.FieldType {
class TrieDateFieldSource extends LongFieldSource {
public TrieDateFieldSource(LongValuesCreator creator) {
super(creator);
public TrieDateFieldSource(String field, FieldCache.LongParser parser) {
super(field, parser);
}
@Override