creator ) throws IOException;
-
-
/** Returned by {@link #getTerms} */
public abstract static class DocTerms {
/** The BytesRef argument must not be null; the method
@@ -644,7 +592,6 @@ public interface FieldCache {
public DocTermsIndex getTermsIndex (IndexReader reader, String field)
throws IOException;
-
/** Expert: just like {@link
* #getTermsIndex(IndexReader,String)}, but you can specify
* whether more RAM should be consumed in exchange for
diff --git a/lucene/src/java/org/apache/lucene/search/FieldCacheImpl.java b/lucene/src/java/org/apache/lucene/search/FieldCacheImpl.java
index 9332b03caba..c49d061fbac 100644
--- a/lucene/src/java/org/apache/lucene/search/FieldCacheImpl.java
+++ b/lucene/src/java/org/apache/lucene/search/FieldCacheImpl.java
@@ -17,28 +17,42 @@ package org.apache.lucene.search;
* limitations under the License.
*/
-import org.apache.lucene.index.DocTermOrds;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.search.cache.*;
-import org.apache.lucene.search.cache.CachedArray.*;
-import org.apache.lucene.util.FieldCacheSanityChecker;
-
import java.io.IOException;
import java.io.PrintStream;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.WeakHashMap;
+
+import org.apache.lucene.index.DocTermOrds;
+import org.apache.lucene.index.DocsAndPositionsEnum;
+import org.apache.lucene.index.DocsEnum;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiFields;
+import org.apache.lucene.index.OrdTermState;
+import org.apache.lucene.index.TermState;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.util.ArrayUtil;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.FieldCacheSanityChecker;
+import org.apache.lucene.util.FixedBitSet;
+import org.apache.lucene.util.PagedBytes;
+import org.apache.lucene.util.packed.GrowableWriter;
+import org.apache.lucene.util.packed.PackedInts;
/**
* Expert: The default cache implementation, storing all values in memory.
* A WeakHashMap is used for storage.
*
* Created: May 19, 2004 4:40:36 PM
- *
- * @lucene.internal -- this is now public so that the tests can use reflection
- * to call methods. It will likely be removed without (much) notice.
- *
+ *
* @since lucene 1.4
*/
-public class FieldCacheImpl implements FieldCache { // Made Public so that
+class FieldCacheImpl implements FieldCache {
private Map,Cache> caches;
FieldCacheImpl() {
@@ -46,17 +60,18 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
}
private synchronized void init() {
caches = new HashMap,Cache>(9);
- caches.put(Byte.TYPE, new Cache(this));
- caches.put(Short.TYPE, new Cache(this));
- caches.put(Integer.TYPE, new Cache(this));
- caches.put(Float.TYPE, new Cache(this));
- caches.put(Long.TYPE, new Cache(this));
- caches.put(Double.TYPE, new Cache(this));
- caches.put(DocTermsIndex.class, new Cache(this));
- caches.put(DocTerms.class, new Cache(this));
- caches.put(DocTermOrds.class, new Cache(this));
+ caches.put(Byte.TYPE, new ByteCache(this));
+ caches.put(Short.TYPE, new ShortCache(this));
+ caches.put(Integer.TYPE, new IntCache(this));
+ caches.put(Float.TYPE, new FloatCache(this));
+ caches.put(Long.TYPE, new LongCache(this));
+ caches.put(Double.TYPE, new DoubleCache(this));
+ caches.put(DocTerms.class, new DocTermsCache(this));
+ caches.put(DocTermsIndex.class, new DocTermsIndexCache(this));
+ caches.put(DocTermOrds.class, new DocTermOrdsCache(this));
+ caches.put(DocsWithFieldCache.class, new DocsWithFieldCache(this));
}
-
+
public synchronized void purgeAllCaches() {
init();
}
@@ -70,15 +85,17 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
public synchronized CacheEntry[] getCacheEntries() {
List result = new ArrayList(17);
for(final Map.Entry,Cache> cacheEntry: caches.entrySet()) {
- final Cache> cache = cacheEntry.getValue();
+ final Cache cache = cacheEntry.getValue();
final Class> cacheType = cacheEntry.getKey();
synchronized(cache.readerCache) {
- for( Object readerKey : cache.readerCache.keySet() ) {
- Map, Object> innerCache = cache.readerCache.get(readerKey);
- for (final Map.Entry, Object> mapEntry : innerCache.entrySet()) {
- Entry entry = (Entry)mapEntry.getKey();
+ for (final Map.Entry> readerCacheEntry : cache.readerCache.entrySet()) {
+ final Object readerKey = readerCacheEntry.getKey();
+ if (readerKey == null) continue;
+ final Map innerCache = readerCacheEntry.getValue();
+ for (final Map.Entry mapEntry : innerCache.entrySet()) {
+ Entry entry = mapEntry.getKey();
result.add(new CacheEntryImpl(readerKey, entry.field,
- cacheType, entry.creator,
+ cacheType, entry.custom,
mapEntry.getValue()));
}
}
@@ -91,11 +108,11 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
private final Object readerKey;
private final String fieldName;
private final Class> cacheType;
- private final EntryCreator custom;
+ private final Object custom;
private final Object value;
CacheEntryImpl(Object readerKey, String fieldName,
Class> cacheType,
- EntryCreator custom,
+ Object custom,
Object value) {
this.readerKey = readerKey;
this.fieldName = fieldName;
@@ -121,30 +138,37 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
public Object getValue() { return value; }
}
+ /**
+ * Hack: When thrown from a Parser (NUMERIC_UTILS_* ones), this stops
+ * processing terms and returns the current FieldCache
+ * array.
+ */
+ static final class StopFillCacheException extends RuntimeException {
+ }
+
final static IndexReader.ReaderFinishedListener purgeReader = new IndexReader.ReaderFinishedListener() {
- @Override
+ // @Override -- not until Java 1.6
public void finished(IndexReader reader) {
FieldCache.DEFAULT.purge(reader);
}
};
/** Expert: Internal cache. */
- final static class Cache {
+ abstract static class Cache {
Cache() {
this.wrapper = null;
}
- Cache(FieldCache wrapper) {
+ Cache(FieldCacheImpl wrapper) {
this.wrapper = wrapper;
}
- final FieldCache wrapper;
+ final FieldCacheImpl wrapper;
- final Map,Object>> readerCache = new WeakHashMap,Object>>();
-
- protected Object createValue(IndexReader reader, Entry entryKey) throws IOException {
- return entryKey.creator.create( reader );
- }
+ final Map> readerCache = new WeakHashMap>();
+
+ protected abstract Object createValue(IndexReader reader, Entry key, boolean setDocsWithField)
+ throws IOException;
/** Remove this reader from the cache, if present. */
public void purge(IndexReader r) {
@@ -154,16 +178,36 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
}
}
- @SuppressWarnings("unchecked")
- public Object get(IndexReader reader, Entry key) throws IOException {
- Map,Object> innerCache;
+ /** Sets the key to the value for the provided reader;
+ * if the key is already set then this doesn't change it. */
+ public void put(IndexReader reader, Entry key, Object value) {
+ final Object readerKey = reader.getCoreCacheKey();
+ synchronized (readerCache) {
+ Map innerCache = readerCache.get(readerKey);
+ if (innerCache == null) {
+ // First time this reader is using FieldCache
+ innerCache = new HashMap();
+ readerCache.put(readerKey, innerCache);
+ reader.addReaderFinishedListener(purgeReader);
+ }
+ if (innerCache.get(key) == null) {
+ innerCache.put(key, value);
+ } else {
+ // Another thread beat us to it; leave the current
+ // value
+ }
+ }
+ }
+
+ public Object get(IndexReader reader, Entry key, boolean setDocsWithField) throws IOException {
+ Map innerCache;
Object value;
final Object readerKey = reader.getCoreCacheKey();
synchronized (readerCache) {
innerCache = readerCache.get(readerKey);
if (innerCache == null) {
// First time this reader is using FieldCache
- innerCache = new HashMap,Object>();
+ innerCache = new HashMap();
readerCache.put(readerKey, innerCache);
reader.addReaderFinishedListener(purgeReader);
value = null;
@@ -179,7 +223,7 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
synchronized (value) {
CreationPlaceholder progress = (CreationPlaceholder) value;
if (progress.value == null) {
- progress.value = createValue(reader, key);
+ progress.value = createValue(reader, key, setDocsWithField);
synchronized (readerCache) {
innerCache.put(key, progress.value);
}
@@ -187,7 +231,7 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
// Only check if key.custom (the parser) is
// non-null; else, we check twice for a single
// call to FieldCache.getXXX
- if (key.creator != null && wrapper != null) {
+ if (key.custom != null && wrapper != null) {
final PrintStream infoStream = wrapper.getInfoStream();
if (infoStream != null) {
printNewInsanity(infoStream, progress.value);
@@ -197,11 +241,6 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
return progress.value;
}
}
-
- // Validate new entries
- if( key.creator.shouldValidate() ) {
- key.creator.validate( (T)value, reader);
- }
return value;
}
@@ -224,14 +263,14 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
}
/** Expert: Every composite-key in the internal cache is of this type. */
- static class Entry {
+ static class Entry {
final String field; // which Fieldable
- final EntryCreator creator; // which custom comparator or parser
+ final Object custom; // which custom comparator or parser
/** Creates one of these objects for a custom comparator/parser. */
- Entry (String field, EntryCreator custom) {
+ Entry (String field, Object custom) {
this.field = field;
- this.creator = custom;
+ this.custom = custom;
}
/** Two of these are equal iff they reference the same field and type. */
@@ -240,9 +279,9 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
if (o instanceof Entry) {
Entry other = (Entry) o;
if (other.field.equals(field)) {
- if (other.creator == null) {
- if (creator == null) return true;
- } else if (other.creator.equals (creator)) {
+ if (other.custom == null) {
+ if (custom == null) return true;
+ } else if (other.custom.equals (custom)) {
return true;
}
}
@@ -253,134 +292,1012 @@ public class FieldCacheImpl implements FieldCache { // Made Public so that
/** Composes a hashcode based on the field and type. */
@Override
public int hashCode() {
- return field.hashCode() ^ (creator==null ? 0 : creator.hashCode());
+ return field.hashCode() ^ (custom==null ? 0 : custom.hashCode());
}
}
// inherit javadocs
- public byte[] getBytes (IndexReader reader, String field) throws IOException {
- return getBytes(reader, field, new ByteValuesCreator(field, null)).values;
+ public byte[] getBytes (IndexReader reader, String field, boolean setDocsWithField) throws IOException {
+ return getBytes(reader, field, null, setDocsWithField);
}
// inherit javadocs
- public byte[] getBytes(IndexReader reader, String field, ByteParser parser) throws IOException {
- return getBytes(reader, field, new ByteValuesCreator(field, parser)).values;
+ public byte[] getBytes(IndexReader reader, String field, ByteParser parser, boolean setDocsWithField)
+ throws IOException {
+ return (byte[]) caches.get(Byte.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
}
- @SuppressWarnings("unchecked")
- public ByteValues getBytes(IndexReader reader, String field, EntryCreator creator ) throws IOException
- {
- return (ByteValues)caches.get(Byte.TYPE).get(reader, new Entry(field, creator));
+ static final class ByteCache extends Cache {
+ ByteCache(FieldCacheImpl wrapper) {
+ super(wrapper);
+ }
+ @Override
+ protected Object createValue(IndexReader reader, Entry entryKey, boolean setDocsWithField)
+ throws IOException {
+ String field = entryKey.field;
+ ByteParser parser = (ByteParser) entryKey.custom;
+ if (parser == null) {
+ return wrapper.getBytes(reader, field, FieldCache.DEFAULT_BYTE_PARSER, setDocsWithField);
+ }
+ final int maxDoc = reader.maxDoc();
+ final byte[] retArray = new byte[maxDoc];
+ Terms terms = MultiFields.getTerms(reader, field);
+ FixedBitSet docsWithField = null;
+ if (terms != null) {
+ if (setDocsWithField) {
+ final int termsDocCount = terms.getDocCount();
+ assert termsDocCount <= maxDoc;
+ if (termsDocCount == maxDoc) {
+ // Fast case: all docs have this field:
+ wrapper.setDocsWithField(reader, field, new Bits.MatchAllBits(maxDoc));
+ setDocsWithField = false;
+ }
+ }
+ final TermsEnum termsEnum = terms.iterator();
+ DocsEnum docs = null;
+ try {
+ while(true) {
+ final BytesRef term = termsEnum.next();
+ if (term == null) {
+ break;
+ }
+ final byte termval = parser.parseByte(term);
+ docs = termsEnum.docs(null, docs);
+ while (true) {
+ final int docID = docs.nextDoc();
+ if (docID == DocsEnum.NO_MORE_DOCS) {
+ break;
+ }
+ retArray[docID] = termval;
+ if (setDocsWithField) {
+ if (docsWithField == null) {
+ // Lazy init
+ docsWithField = new FixedBitSet(maxDoc);
+ }
+ docsWithField.set(docID);
+ }
+ }
+ }
+ } catch (StopFillCacheException stop) {
+ }
+ }
+ if (setDocsWithField) {
+ wrapper.setDocsWithField(reader, field, docsWithField);
+ }
+ return retArray;
+ }
}
// inherit javadocs
- public short[] getShorts (IndexReader reader, String field) throws IOException {
- return getShorts(reader, field, new ShortValuesCreator(field,null)).values;
+ public short[] getShorts (IndexReader reader, String field, boolean setDocsWithField) throws IOException {
+ return getShorts(reader, field, null, setDocsWithField);
}
// inherit javadocs
- public short[] getShorts(IndexReader reader, String field, ShortParser parser) throws IOException {
- return getShorts(reader, field, new ShortValuesCreator(field,parser)).values;
+ public short[] getShorts(IndexReader reader, String field, ShortParser parser, boolean setDocsWithField)
+ throws IOException {
+ return (short[]) caches.get(Short.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
}
- @SuppressWarnings("unchecked")
- public ShortValues getShorts(IndexReader reader, String field, EntryCreator creator ) throws IOException
- {
- return (ShortValues)caches.get(Short.TYPE).get(reader, new Entry(field, creator));
+ static final class ShortCache extends Cache {
+ ShortCache(FieldCacheImpl wrapper) {
+ super(wrapper);
+ }
+
+ @Override
+ protected Object createValue(IndexReader reader, Entry entryKey, boolean setDocsWithField)
+ throws IOException {
+ String field = entryKey.field;
+ ShortParser parser = (ShortParser) entryKey.custom;
+ if (parser == null) {
+ return wrapper.getShorts(reader, field, FieldCache.DEFAULT_SHORT_PARSER, setDocsWithField);
+ }
+ final int maxDoc = reader.maxDoc();
+ final short[] retArray = new short[maxDoc];
+ Terms terms = MultiFields.getTerms(reader, field);
+ FixedBitSet docsWithField = null;
+ if (terms != null) {
+ if (setDocsWithField) {
+ final int termsDocCount = terms.getDocCount();
+ assert termsDocCount <= maxDoc;
+ if (termsDocCount == maxDoc) {
+ // Fast case: all docs have this field:
+ wrapper.setDocsWithField(reader, field, new Bits.MatchAllBits(maxDoc));
+ setDocsWithField = false;
+ }
+ }
+ final TermsEnum termsEnum = terms.iterator();
+ DocsEnum docs = null;
+ try {
+ while(true) {
+ final BytesRef term = termsEnum.next();
+ if (term == null) {
+ break;
+ }
+ final short termval = parser.parseShort(term);
+ docs = termsEnum.docs(null, docs);
+ while (true) {
+ final int docID = docs.nextDoc();
+ if (docID == DocsEnum.NO_MORE_DOCS) {
+ break;
+ }
+ retArray[docID] = termval;
+ if (setDocsWithField) {
+ if (docsWithField == null) {
+ // Lazy init
+ docsWithField = new FixedBitSet(maxDoc);
+ }
+ docsWithField.set(docID);
+ }
+ }
+ }
+ } catch (StopFillCacheException stop) {
+ }
+ }
+ if (setDocsWithField) {
+ wrapper.setDocsWithField(reader, field, docsWithField);
+ }
+ return retArray;
+ }
+ }
+
+ // null Bits means no docs matched
+ void setDocsWithField(IndexReader reader, String field, Bits docsWithField) {
+ final int maxDoc = reader.maxDoc();
+ final Bits bits;
+ if (docsWithField == null) {
+ bits = new Bits.MatchNoBits(maxDoc);
+ } else if (docsWithField instanceof FixedBitSet) {
+ final int numSet = ((FixedBitSet) docsWithField).cardinality();
+ if (numSet >= maxDoc) {
+ // The cardinality of the BitSet is maxDoc if all documents have a value.
+ assert numSet == maxDoc;
+ bits = new Bits.MatchAllBits(maxDoc);
+ } else {
+ bits = docsWithField;
+ }
+ } else {
+ bits = docsWithField;
+ }
+ caches.get(DocsWithFieldCache.class).put(reader, new Entry(field, null), bits);
}
// inherit javadocs
- public int[] getInts (IndexReader reader, String field) throws IOException {
- return getInts(reader, field, new IntValuesCreator( field, null )).values;
+ public int[] getInts (IndexReader reader, String field, boolean setDocsWithField) throws IOException {
+ return getInts(reader, field, null, setDocsWithField);
}
// inherit javadocs
- public int[] getInts(IndexReader reader, String field, IntParser parser) throws IOException {
- return getInts(reader, field, new IntValuesCreator( field, parser )).values;
+ public int[] getInts(IndexReader reader, String field, IntParser parser, boolean setDocsWithField)
+ throws IOException {
+ return (int[]) caches.get(Integer.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
}
- @SuppressWarnings("unchecked")
- public IntValues getInts(IndexReader reader, String field, EntryCreator creator ) throws IOException {
- return (IntValues)caches.get(Integer.TYPE).get(reader, new Entry(field, creator));
+ static final class IntCache extends Cache {
+ IntCache(FieldCacheImpl wrapper) {
+ super(wrapper);
+ }
+
+ @Override
+ protected Object createValue(IndexReader reader, Entry entryKey, boolean setDocsWithField)
+ throws IOException {
+ String field = entryKey.field;
+ IntParser parser = (IntParser) entryKey.custom;
+ if (parser == null) {
+ try {
+ return wrapper.getInts(reader, field, DEFAULT_INT_PARSER, setDocsWithField);
+ } catch (NumberFormatException ne) {
+ return wrapper.getInts(reader, field, NUMERIC_UTILS_INT_PARSER, setDocsWithField);
+ }
+ }
+ final int maxDoc = reader.maxDoc();
+ int[] retArray = null;
+
+ Terms terms = MultiFields.getTerms(reader, field);
+ FixedBitSet docsWithField = null;
+ if (terms != null) {
+ if (setDocsWithField) {
+ final int termsDocCount = terms.getDocCount();
+ assert termsDocCount <= maxDoc;
+ if (termsDocCount == maxDoc) {
+ // Fast case: all docs have this field:
+ wrapper.setDocsWithField(reader, field, new Bits.MatchAllBits(maxDoc));
+ setDocsWithField = false;
+ }
+ }
+ final TermsEnum termsEnum = terms.iterator();
+ DocsEnum docs = null;
+ try {
+ while(true) {
+ final BytesRef term = termsEnum.next();
+ if (term == null) {
+ break;
+ }
+ final int termval = parser.parseInt(term);
+ if (retArray == null) {
+ // late init so numeric fields don't double allocate
+ retArray = new int[maxDoc];
+ }
+
+ docs = termsEnum.docs(null, docs);
+ while (true) {
+ final int docID = docs.nextDoc();
+ if (docID == DocsEnum.NO_MORE_DOCS) {
+ break;
+ }
+ retArray[docID] = termval;
+ if (setDocsWithField) {
+ if (docsWithField == null) {
+ // Lazy init
+ docsWithField = new FixedBitSet(maxDoc);
+ }
+ docsWithField.set(docID);
+ }
+ }
+ }
+ } catch (StopFillCacheException stop) {
+ }
+ }
+
+ if (retArray == null) {
+ // no values
+ retArray = new int[maxDoc];
+ }
+ if (setDocsWithField) {
+ wrapper.setDocsWithField(reader, field, docsWithField);
+ }
+ return retArray;
+ }
+ }
+
+ public Bits getDocsWithField(IndexReader reader, String field)
+ throws IOException {
+ return (Bits) caches.get(DocsWithFieldCache.class).get(reader, new Entry(field, null), false);
+ }
+
+ static final class DocsWithFieldCache extends Cache {
+ DocsWithFieldCache(FieldCacheImpl wrapper) {
+ super(wrapper);
+ }
+
+ @Override
+ protected Object createValue(IndexReader reader, Entry entryKey, boolean setDocsWithField)
+ throws IOException {
+ final String field = entryKey.field;
+ FixedBitSet res = null;
+ Terms terms = MultiFields.getTerms(reader, field);
+ final int maxDoc = reader.maxDoc();
+ if (terms != null) {
+ final int termsDocCount = terms.getDocCount();
+ assert termsDocCount <= maxDoc;
+ if (termsDocCount == maxDoc) {
+ // Fast case: all docs have this field:
+ return new Bits.MatchAllBits(maxDoc);
+ }
+ final TermsEnum termsEnum = terms.iterator();
+ DocsEnum docs = null;
+ while(true) {
+ final BytesRef term = termsEnum.next();
+ if (term == null) {
+ break;
+ }
+ if (res == null) {
+ // lazy init
+ res = new FixedBitSet(maxDoc);
+ }
+
+ docs = termsEnum.docs(null, docs);
+ // TODO: use bulk API
+ while (true) {
+ final int docID = docs.nextDoc();
+ if (docID == DocsEnum.NO_MORE_DOCS) {
+ break;
+ }
+ res.set(docID);
+ }
+ }
+ }
+ if (res == null) {
+ return new Bits.MatchNoBits(maxDoc);
+ }
+ final int numSet = res.cardinality();
+ if (numSet >= maxDoc) {
+ // The cardinality of the BitSet is maxDoc if all documents have a value.
+ assert numSet == maxDoc;
+ return new Bits.MatchAllBits(maxDoc);
+ }
+ return res;
+ }
+ }
+
+ // inherit javadocs
+ public float[] getFloats (IndexReader reader, String field, boolean setDocsWithField)
+ throws IOException {
+ return getFloats(reader, field, null, setDocsWithField);
+ }
+
+ // inherit javadocs
+ public float[] getFloats(IndexReader reader, String field, FloatParser parser, boolean setDocsWithField)
+ throws IOException {
+
+ return (float[]) caches.get(Float.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
+ }
+
+ static final class FloatCache extends Cache {
+ FloatCache(FieldCacheImpl wrapper) {
+ super(wrapper);
+ }
+
+ @Override
+ protected Object createValue(IndexReader reader, Entry entryKey, boolean setDocsWithField)
+ throws IOException {
+ String field = entryKey.field;
+ FloatParser parser = (FloatParser) entryKey.custom;
+ if (parser == null) {
+ try {
+ return wrapper.getFloats(reader, field, DEFAULT_FLOAT_PARSER, setDocsWithField);
+ } catch (NumberFormatException ne) {
+ return wrapper.getFloats(reader, field, NUMERIC_UTILS_FLOAT_PARSER, setDocsWithField);
+ }
+ }
+ final int maxDoc = reader.maxDoc();
+ float[] retArray = null;
+
+ Terms terms = MultiFields.getTerms(reader, field);
+ FixedBitSet docsWithField = null;
+ if (terms != null) {
+ if (setDocsWithField) {
+ final int termsDocCount = terms.getDocCount();
+ assert termsDocCount <= maxDoc;
+ if (termsDocCount == maxDoc) {
+ // Fast case: all docs have this field:
+ wrapper.setDocsWithField(reader, field, new Bits.MatchAllBits(maxDoc));
+ setDocsWithField = false;
+ }
+ }
+ final TermsEnum termsEnum = terms.iterator();
+ DocsEnum docs = null;
+ try {
+ while(true) {
+ final BytesRef term = termsEnum.next();
+ if (term == null) {
+ break;
+ }
+ final float termval = parser.parseFloat(term);
+ if (retArray == null) {
+ // late init so numeric fields don't double allocate
+ retArray = new float[maxDoc];
+ }
+
+ docs = termsEnum.docs(null, docs);
+ while (true) {
+ final int docID = docs.nextDoc();
+ if (docID == DocsEnum.NO_MORE_DOCS) {
+ break;
+ }
+ retArray[docID] = termval;
+ if (setDocsWithField) {
+ if (docsWithField == null) {
+ // Lazy init
+ docsWithField = new FixedBitSet(maxDoc);
+ }
+ docsWithField.set(docID);
+ }
+ }
+ }
+ } catch (StopFillCacheException stop) {
+ }
+ }
+
+ if (retArray == null) {
+ // no values
+ retArray = new float[maxDoc];
+ }
+ if (setDocsWithField) {
+ wrapper.setDocsWithField(reader, field, docsWithField);
+ }
+ return retArray;
+ }
+ }
+
+
+ public long[] getLongs(IndexReader reader, String field, boolean setDocsWithField) throws IOException {
+ return getLongs(reader, field, null, setDocsWithField);
}
// inherit javadocs
- public float[] getFloats (IndexReader reader, String field) throws IOException {
- return getFloats(reader, field, new FloatValuesCreator( field, null ) ).values;
+ public long[] getLongs(IndexReader reader, String field, FieldCache.LongParser parser, boolean setDocsWithField)
+ throws IOException {
+ return (long[]) caches.get(Long.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
+ }
+
+ static final class LongCache extends Cache {
+ LongCache(FieldCacheImpl wrapper) {
+ super(wrapper);
+ }
+
+ @Override
+ protected Object createValue(IndexReader reader, Entry entryKey, boolean setDocsWithField)
+ throws IOException {
+ String field = entryKey.field;
+ FieldCache.LongParser parser = (FieldCache.LongParser) entryKey.custom;
+ if (parser == null) {
+ try {
+ return wrapper.getLongs(reader, field, DEFAULT_LONG_PARSER, setDocsWithField);
+ } catch (NumberFormatException ne) {
+ return wrapper.getLongs(reader, field, NUMERIC_UTILS_LONG_PARSER, setDocsWithField);
+ }
+ }
+ final int maxDoc = reader.maxDoc();
+ long[] retArray = null;
+
+ Terms terms = MultiFields.getTerms(reader, field);
+ FixedBitSet docsWithField = null;
+ if (terms != null) {
+ if (setDocsWithField) {
+ final int termsDocCount = terms.getDocCount();
+ assert termsDocCount <= maxDoc;
+ if (termsDocCount == maxDoc) {
+ // Fast case: all docs have this field:
+ wrapper.setDocsWithField(reader, field, new Bits.MatchAllBits(maxDoc));
+ setDocsWithField = false;
+ }
+ }
+ final TermsEnum termsEnum = terms.iterator();
+ DocsEnum docs = null;
+ try {
+ while(true) {
+ final BytesRef term = termsEnum.next();
+ if (term == null) {
+ break;
+ }
+ final long termval = parser.parseLong(term);
+ if (retArray == null) {
+ // late init so numeric fields don't double allocate
+ retArray = new long[maxDoc];
+ }
+
+ docs = termsEnum.docs(null, docs);
+ while (true) {
+ final int docID = docs.nextDoc();
+ if (docID == DocsEnum.NO_MORE_DOCS) {
+ break;
+ }
+ retArray[docID] = termval;
+ if (setDocsWithField) {
+ if (docsWithField == null) {
+ // Lazy init
+ docsWithField = new FixedBitSet(maxDoc);
+ }
+ docsWithField.set(docID);
+ }
+ }
+ }
+ } catch (StopFillCacheException stop) {
+ }
+ }
+
+ if (retArray == null) {
+ // no values
+ retArray = new long[maxDoc];
+ }
+ if (setDocsWithField) {
+ wrapper.setDocsWithField(reader, field, docsWithField);
+ }
+ return retArray;
+ }
}
// inherit javadocs
- public float[] getFloats(IndexReader reader, String field, FloatParser parser) throws IOException {
- return getFloats(reader, field, new FloatValuesCreator( field, parser ) ).values;
- }
-
- @SuppressWarnings("unchecked")
- public FloatValues getFloats(IndexReader reader, String field, EntryCreator creator ) throws IOException {
- return (FloatValues)caches.get(Float.TYPE).get(reader, new Entry(field, creator));
- }
-
- public long[] getLongs(IndexReader reader, String field) throws IOException {
- return getLongs(reader, field, new LongValuesCreator( field, null ) ).values;
+ public double[] getDoubles(IndexReader reader, String field, boolean setDocsWithField)
+ throws IOException {
+ return getDoubles(reader, field, null, setDocsWithField);
}
// inherit javadocs
- public long[] getLongs(IndexReader reader, String field, FieldCache.LongParser parser) throws IOException {
- return getLongs(reader, field, new LongValuesCreator( field, parser ) ).values;
+ public double[] getDoubles(IndexReader reader, String field, FieldCache.DoubleParser parser, boolean setDocsWithField)
+ throws IOException {
+ return (double[]) caches.get(Double.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
}
- @SuppressWarnings("unchecked")
- public LongValues getLongs(IndexReader reader, String field, EntryCreator creator ) throws IOException {
- return (LongValues)caches.get(Long.TYPE).get(reader, new Entry(field, creator));
- }
-
- // inherit javadocs
- public double[] getDoubles(IndexReader reader, String field) throws IOException {
- return getDoubles(reader, field, new DoubleValuesCreator( field, null ) ).values;
+ static final class DoubleCache extends Cache {
+ DoubleCache(FieldCacheImpl wrapper) {
+ super(wrapper);
+ }
+
+ @Override
+ protected Object createValue(IndexReader reader, Entry entryKey, boolean setDocsWithField)
+ throws IOException {
+ String field = entryKey.field;
+ FieldCache.DoubleParser parser = (FieldCache.DoubleParser) entryKey.custom;
+ if (parser == null) {
+ try {
+ return wrapper.getDoubles(reader, field, DEFAULT_DOUBLE_PARSER, setDocsWithField);
+ } catch (NumberFormatException ne) {
+ return wrapper.getDoubles(reader, field, NUMERIC_UTILS_DOUBLE_PARSER, setDocsWithField);
+ }
+ }
+ final int maxDoc = reader.maxDoc();
+ double[] retArray = null;
+
+ Terms terms = MultiFields.getTerms(reader, field);
+ FixedBitSet docsWithField = null;
+ if (terms != null) {
+ if (setDocsWithField) {
+ final int termsDocCount = terms.getDocCount();
+ assert termsDocCount <= maxDoc;
+ if (termsDocCount == maxDoc) {
+ // Fast case: all docs have this field:
+ wrapper.setDocsWithField(reader, field, new Bits.MatchAllBits(maxDoc));
+ setDocsWithField = false;
+ }
+ }
+ final TermsEnum termsEnum = terms.iterator();
+ DocsEnum docs = null;
+ try {
+ while(true) {
+ final BytesRef term = termsEnum.next();
+ if (term == null) {
+ break;
+ }
+ final double termval = parser.parseDouble(term);
+ if (retArray == null) {
+ // late init so numeric fields don't double allocate
+ retArray = new double[maxDoc];
+ }
+
+ docs = termsEnum.docs(null, docs);
+ while (true) {
+ final int docID = docs.nextDoc();
+ if (docID == DocsEnum.NO_MORE_DOCS) {
+ break;
+ }
+ retArray[docID] = termval;
+ if (setDocsWithField) {
+ if (docsWithField == null) {
+ // Lazy init
+ docsWithField = new FixedBitSet(maxDoc);
+ }
+ docsWithField.set(docID);
+ }
+ }
+ }
+ } catch (StopFillCacheException stop) {
+ }
+ }
+ if (retArray == null) { // no values
+ retArray = new double[maxDoc];
+ }
+ if (setDocsWithField) {
+ wrapper.setDocsWithField(reader, field, docsWithField);
+ }
+ return retArray;
+ }
}
- // inherit javadocs
- public double[] getDoubles(IndexReader reader, String field, FieldCache.DoubleParser parser) throws IOException {
- return getDoubles(reader, field, new DoubleValuesCreator( field, parser ) ).values;
+ public static class DocTermsIndexImpl extends DocTermsIndex {
+ private final PagedBytes.Reader bytes;
+ private final PackedInts.Reader termOrdToBytesOffset;
+ private final PackedInts.Reader docToTermOrd;
+ private final int numOrd;
+
+ public DocTermsIndexImpl(PagedBytes.Reader bytes, PackedInts.Reader termOrdToBytesOffset, PackedInts.Reader docToTermOrd, int numOrd) {
+ this.bytes = bytes;
+ this.docToTermOrd = docToTermOrd;
+ this.termOrdToBytesOffset = termOrdToBytesOffset;
+ this.numOrd = numOrd;
+ }
+
+ @Override
+ public PackedInts.Reader getDocToOrd() {
+ return docToTermOrd;
+ }
+
+ @Override
+ public int numOrd() {
+ return numOrd;
+ }
+
+ @Override
+ public int getOrd(int docID) {
+ return (int) docToTermOrd.get(docID);
+ }
+
+ @Override
+ public int size() {
+ return docToTermOrd.size();
+ }
+
+ @Override
+ public BytesRef lookup(int ord, BytesRef ret) {
+ return bytes.fill(ret, termOrdToBytesOffset.get(ord));
+ }
+
+ @Override
+ public TermsEnum getTermsEnum() {
+ return this.new DocTermsIndexEnum();
+ }
+
+ class DocTermsIndexEnum extends TermsEnum {
+ int currentOrd;
+ int currentBlockNumber;
+ int end; // end position in the current block
+ final byte[][] blocks;
+ final int[] blockEnds;
+
+ final BytesRef term = new BytesRef();
+
+ public DocTermsIndexEnum() {
+ currentOrd = 0;
+ currentBlockNumber = 0;
+ blocks = bytes.getBlocks();
+ blockEnds = bytes.getBlockEnds();
+ currentBlockNumber = bytes.fillAndGetIndex(term, termOrdToBytesOffset.get(0));
+ end = blockEnds[currentBlockNumber];
+ }
+
+ @Override
+ public SeekStatus seekCeil(BytesRef text, boolean useCache /* ignored */) throws IOException {
+ int low = 1;
+ int high = numOrd-1;
+
+ while (low <= high) {
+ int mid = (low + high) >>> 1;
+ seekExact(mid);
+ int cmp = term.compareTo(text);
+
+ if (cmp < 0)
+ low = mid + 1;
+ else if (cmp > 0)
+ high = mid - 1;
+ else
+ return SeekStatus.FOUND; // key found
+ }
+
+ if (low == numOrd) {
+ return SeekStatus.END;
+ } else {
+ seekExact(low);
+ return SeekStatus.NOT_FOUND;
+ }
+ }
+
+ public void seekExact(long ord) throws IOException {
+ assert(ord >= 0 && ord <= numOrd);
+ // TODO: if gap is small, could iterate from current position? Or let user decide that?
+ currentBlockNumber = bytes.fillAndGetIndex(term, termOrdToBytesOffset.get((int)ord));
+ end = blockEnds[currentBlockNumber];
+ currentOrd = (int)ord;
+ }
+
+ @Override
+ public BytesRef next() throws IOException {
+ int start = term.offset + term.length;
+ if (start >= end) {
+ // switch byte blocks
+ if (currentBlockNumber +1 >= blocks.length) {
+ return null;
+ }
+ currentBlockNumber++;
+ term.bytes = blocks[currentBlockNumber];
+ end = blockEnds[currentBlockNumber];
+ start = 0;
+ if (end<=0) return null; // special case of empty last array
+ }
+
+ currentOrd++;
+
+ byte[] block = term.bytes;
+ if ((block[start] & 128) == 0) {
+ term.length = block[start];
+ term.offset = start+1;
+ } else {
+ term.length = (((block[start] & 0x7f)) << 8) | (block[1+start] & 0xff);
+ term.offset = start+2;
+ }
+
+ return term;
+ }
+
+ @Override
+ public BytesRef term() throws IOException {
+ return term;
+ }
+
+ @Override
+ public long ord() throws IOException {
+ return currentOrd;
+ }
+
+ @Override
+ public int docFreq() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public long totalTermFreq() {
+ return -1;
+ }
+
+ @Override
+ public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Comparator getComparator() throws IOException {
+ return BytesRef.getUTF8SortedAsUnicodeComparator();
+ }
+
+ @Override
+ public void seekExact(BytesRef term, TermState state) throws IOException {
+ assert state != null && state instanceof OrdTermState;
+ this.seekExact(((OrdTermState)state).ord);
+ }
+
+ @Override
+ public TermState termState() throws IOException {
+ OrdTermState state = new OrdTermState();
+ state.ord = currentOrd;
+ return state;
+ }
+ }
}
- @SuppressWarnings("unchecked")
- public DoubleValues getDoubles(IndexReader reader, String field, EntryCreator creator ) throws IOException {
- return (DoubleValues)caches.get(Double.TYPE).get(reader, new Entry(field, creator));
+ private static boolean DEFAULT_FASTER_BUT_MORE_RAM = true;
+
+ public DocTermsIndex getTermsIndex(IndexReader reader, String field) throws IOException {
+ return getTermsIndex(reader, field, DEFAULT_FASTER_BUT_MORE_RAM);
}
- public DocTermsIndex getTermsIndex(IndexReader reader, String field) throws IOException {
- return getTermsIndex(reader, field, new DocTermsIndexCreator(field));
+ public DocTermsIndex getTermsIndex(IndexReader reader, String field, boolean fasterButMoreRAM) throws IOException {
+ return (DocTermsIndex) caches.get(DocTermsIndex.class).get(reader, new Entry(field, Boolean.valueOf(fasterButMoreRAM)), false);
}
- public DocTermsIndex getTermsIndex(IndexReader reader, String field, boolean fasterButMoreRAM) throws IOException {
- return getTermsIndex(reader, field, new DocTermsIndexCreator(field,
- fasterButMoreRAM ? DocTermsIndexCreator.FASTER_BUT_MORE_RAM : 0));
+ static class DocTermsIndexCache extends Cache {
+ DocTermsIndexCache(FieldCacheImpl wrapper) {
+ super(wrapper);
+ }
+
+ @Override
+ protected Object createValue(IndexReader reader, Entry entryKey, boolean setDocsWithField)
+ throws IOException {
+
+ Terms terms = MultiFields.getTerms(reader, entryKey.field);
+
+ final boolean fasterButMoreRAM = ((Boolean) entryKey.custom).booleanValue();
+
+ final PagedBytes bytes = new PagedBytes(15);
+
+ int startBytesBPV;
+ int startTermsBPV;
+ int startNumUniqueTerms;
+
+ int maxDoc = reader.maxDoc();
+ final int termCountHardLimit;
+ if (maxDoc == Integer.MAX_VALUE) {
+ termCountHardLimit = Integer.MAX_VALUE;
+ } else {
+ termCountHardLimit = maxDoc+1;
+ }
+
+ if (terms != null) {
+ // Try for coarse estimate for number of bits; this
+ // should be an underestimate most of the time, which
+ // is fine -- GrowableWriter will reallocate as needed
+ long numUniqueTerms = 0;
+ try {
+ numUniqueTerms = terms.getUniqueTermCount();
+ } catch (UnsupportedOperationException uoe) {
+ numUniqueTerms = -1;
+ }
+ if (numUniqueTerms != -1) {
+
+ if (numUniqueTerms > termCountHardLimit) {
+ // app is misusing the API (there is more than
+ // one term per doc); in this case we make best
+ // effort to load what we can (see LUCENE-2142)
+ numUniqueTerms = termCountHardLimit;
+ }
+
+ startBytesBPV = PackedInts.bitsRequired(numUniqueTerms*4);
+ startTermsBPV = PackedInts.bitsRequired(numUniqueTerms);
+
+ startNumUniqueTerms = (int) numUniqueTerms;
+ } else {
+ startBytesBPV = 1;
+ startTermsBPV = 1;
+ startNumUniqueTerms = 1;
+ }
+ } else {
+ startBytesBPV = 1;
+ startTermsBPV = 1;
+ startNumUniqueTerms = 1;
+ }
+
+ GrowableWriter termOrdToBytesOffset = new GrowableWriter(startBytesBPV, 1+startNumUniqueTerms, fasterButMoreRAM);
+ final GrowableWriter docToTermOrd = new GrowableWriter(startTermsBPV, maxDoc, fasterButMoreRAM);
+
+ // 0 is reserved for "unset"
+ bytes.copyUsingLengthPrefix(new BytesRef());
+ int termOrd = 1;
+
+ if (terms != null) {
+ final TermsEnum termsEnum = terms.iterator();
+ DocsEnum docs = null;
+
+ while(true) {
+ final BytesRef term = termsEnum.next();
+ if (term == null) {
+ break;
+ }
+ if (termOrd >= termCountHardLimit) {
+ break;
+ }
+
+ if (termOrd == termOrdToBytesOffset.size()) {
+ // NOTE: this code only runs if the incoming
+ // reader impl doesn't implement
+ // getUniqueTermCount (which should be uncommon)
+ termOrdToBytesOffset = termOrdToBytesOffset.resize(ArrayUtil.oversize(1+termOrd, 1));
+ }
+ termOrdToBytesOffset.set(termOrd, bytes.copyUsingLengthPrefix(term));
+ docs = termsEnum.docs(null, docs);
+ while (true) {
+ final int docID = docs.nextDoc();
+ if (docID == DocsEnum.NO_MORE_DOCS) {
+ break;
+ }
+ docToTermOrd.set(docID, termOrd);
+ }
+ termOrd++;
+ }
+
+ if (termOrdToBytesOffset.size() > termOrd) {
+ termOrdToBytesOffset = termOrdToBytesOffset.resize(termOrd);
+ }
+ }
+
+ // maybe an int-only impl?
+ return new DocTermsIndexImpl(bytes.freeze(true), termOrdToBytesOffset.getMutable(), docToTermOrd.getMutable(), termOrd);
+ }
}
- @SuppressWarnings("unchecked")
- public DocTermsIndex getTermsIndex(IndexReader reader, String field, EntryCreator creator) throws IOException {
- return (DocTermsIndex)caches.get(DocTermsIndex.class).get(reader, new Entry(field, creator));
+ private static class DocTermsImpl extends DocTerms {
+ private final PagedBytes.Reader bytes;
+ private final PackedInts.Reader docToOffset;
+
+ public DocTermsImpl(PagedBytes.Reader bytes, PackedInts.Reader docToOffset) {
+ this.bytes = bytes;
+ this.docToOffset = docToOffset;
+ }
+
+ @Override
+ public int size() {
+ return docToOffset.size();
+ }
+
+ @Override
+ public boolean exists(int docID) {
+ return docToOffset.get(docID) == 0;
+ }
+
+ @Override
+ public BytesRef getTerm(int docID, BytesRef ret) {
+ final int pointer = (int) docToOffset.get(docID);
+ return bytes.fill(ret, pointer);
+ }
}
// TODO: this if DocTermsIndex was already created, we
// should share it...
public DocTerms getTerms(IndexReader reader, String field) throws IOException {
- return getTerms(reader, field, new DocTermsCreator(field));
+ return getTerms(reader, field, DEFAULT_FASTER_BUT_MORE_RAM);
}
public DocTerms getTerms(IndexReader reader, String field, boolean fasterButMoreRAM) throws IOException {
- return getTerms(reader, field, new DocTermsCreator(field,
- fasterButMoreRAM ? DocTermsCreator.FASTER_BUT_MORE_RAM : 0));
+ return (DocTerms) caches.get(DocTerms.class).get(reader, new Entry(field, Boolean.valueOf(fasterButMoreRAM)), false);
}
- @SuppressWarnings("unchecked")
- public DocTerms getTerms(IndexReader reader, String field, EntryCreator creator) throws IOException {
- return (DocTerms)caches.get(DocTerms.class).get(reader, new Entry(field, creator));
+ static final class DocTermsCache extends Cache {
+ DocTermsCache(FieldCacheImpl wrapper) {
+ super(wrapper);
+ }
+
+ @Override
+ protected Object createValue(IndexReader reader, Entry entryKey, boolean setDocsWithField)
+ throws IOException {
+
+ Terms terms = MultiFields.getTerms(reader, entryKey.field);
+
+ final boolean fasterButMoreRAM = ((Boolean) entryKey.custom).booleanValue();
+
+ final int termCountHardLimit = reader.maxDoc();
+
+ // Holds the actual term data, expanded.
+ final PagedBytes bytes = new PagedBytes(15);
+
+ int startBPV;
+
+ if (terms != null) {
+ // Try for coarse estimate for number of bits; this
+ // should be an underestimate most of the time, which
+ // is fine -- GrowableWriter will reallocate as needed
+ long numUniqueTerms = 0;
+ try {
+ numUniqueTerms = terms.getUniqueTermCount();
+ } catch (UnsupportedOperationException uoe) {
+ numUniqueTerms = -1;
+ }
+ if (numUniqueTerms != -1) {
+ if (numUniqueTerms > termCountHardLimit) {
+ numUniqueTerms = termCountHardLimit;
+ }
+ startBPV = PackedInts.bitsRequired(numUniqueTerms*4);
+ } else {
+ startBPV = 1;
+ }
+ } else {
+ startBPV = 1;
+ }
+
+ final GrowableWriter docToOffset = new GrowableWriter(startBPV, reader.maxDoc(), fasterButMoreRAM);
+
+ // pointer==0 means not set
+ bytes.copyUsingLengthPrefix(new BytesRef());
+
+ if (terms != null) {
+ int termCount = 0;
+ final TermsEnum termsEnum = terms.iterator();
+ DocsEnum docs = null;
+ while(true) {
+ if (termCount++ == termCountHardLimit) {
+ // app is misusing the API (there is more than
+ // one term per doc); in this case we make best
+ // effort to load what we can (see LUCENE-2142)
+ break;
+ }
+
+ final BytesRef term = termsEnum.next();
+ if (term == null) {
+ break;
+ }
+ final long pointer = bytes.copyUsingLengthPrefix(term);
+ docs = termsEnum.docs(null, docs);
+ while (true) {
+ final int docID = docs.nextDoc();
+ if (docID == DocsEnum.NO_MORE_DOCS) {
+ break;
+ }
+ docToOffset.set(docID, pointer);
+ }
+ }
+ }
+
+ // maybe an int-only impl?
+ return new DocTermsImpl(bytes.freeze(true), docToOffset.getMutable());
+ }
}
- @SuppressWarnings("unchecked")
public DocTermOrds getDocTermOrds(IndexReader reader, String field) throws IOException {
- return (DocTermOrds) caches.get(DocTermOrds.class).get(reader, new Entry(field, new DocTermOrdsCreator(field, 0)));
+ return (DocTermOrds) caches.get(DocTermOrds.class).get(reader, new Entry(field, null), false);
+ }
+
+ static final class DocTermOrdsCache extends Cache {
+ DocTermOrdsCache(FieldCacheImpl wrapper) {
+ super(wrapper);
+ }
+
+ @Override
+ protected Object createValue(IndexReader reader, Entry entryKey, boolean setDocsWithField)
+ throws IOException {
+ return new DocTermOrds(reader, entryKey.field);
+ }
}
private volatile PrintStream infoStream;
diff --git a/lucene/src/java/org/apache/lucene/search/FieldCacheRangeFilter.java b/lucene/src/java/org/apache/lucene/search/FieldCacheRangeFilter.java
index 5755c9eaff1..e51ef272554 100644
--- a/lucene/src/java/org/apache/lucene/search/FieldCacheRangeFilter.java
+++ b/lucene/src/java/org/apache/lucene/search/FieldCacheRangeFilter.java
@@ -134,7 +134,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getBytes(IndexReader,String)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getBytes(IndexReader,String,boolean)}. This works with all
* byte fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -143,7 +143,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getBytes(IndexReader,String,FieldCache.ByteParser)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getBytes(IndexReader,String,FieldCache.ByteParser,boolean)}. This works with all
* byte fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -172,7 +172,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
- final byte[] values = FieldCache.DEFAULT.getBytes(context.reader, field, (FieldCache.ByteParser) parser);
+ final byte[] values = FieldCache.DEFAULT.getBytes(context.reader, field, (FieldCache.ByteParser) parser, false);
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
boolean matchDoc(int doc) {
@@ -184,7 +184,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getShorts(IndexReader,String)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getShorts(IndexReader,String,boolean)}. This works with all
* short fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -193,7 +193,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getShorts(IndexReader,String,FieldCache.ShortParser)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getShorts(IndexReader,String,FieldCache.ShortParser,boolean)}. This works with all
* short fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -222,7 +222,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
- final short[] values = FieldCache.DEFAULT.getShorts(context.reader, field, (FieldCache.ShortParser) parser);
+ final short[] values = FieldCache.DEFAULT.getShorts(context.reader, field, (FieldCache.ShortParser) parser, false);
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
boolean matchDoc(int doc) {
@@ -234,7 +234,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getInts(IndexReader,String)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getInts(IndexReader,String,boolean)}. This works with all
* int fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -243,7 +243,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getInts(IndexReader,String,FieldCache.IntParser)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getInts(IndexReader,String,FieldCache.IntParser,boolean)}. This works with all
* int fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -272,7 +272,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
- final int[] values = FieldCache.DEFAULT.getInts(context.reader, field, (FieldCache.IntParser) parser);
+ final int[] values = FieldCache.DEFAULT.getInts(context.reader, field, (FieldCache.IntParser) parser, false);
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
boolean matchDoc(int doc) {
@@ -284,7 +284,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getLongs(IndexReader,String)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getLongs(IndexReader,String,boolean)}. This works with all
* long fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -293,7 +293,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getLongs(IndexReader,String,FieldCache.LongParser)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getLongs(IndexReader,String,FieldCache.LongParser,boolean)}. This works with all
* long fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -322,7 +322,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
- final long[] values = FieldCache.DEFAULT.getLongs(context.reader, field, (FieldCache.LongParser) parser);
+ final long[] values = FieldCache.DEFAULT.getLongs(context.reader, field, (FieldCache.LongParser) parser, false);
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
boolean matchDoc(int doc) {
@@ -334,7 +334,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getFloats(IndexReader,String)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getFloats(IndexReader,String,boolean)}. This works with all
* float fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -343,7 +343,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getFloats(IndexReader,String,FieldCache.FloatParser)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getFloats(IndexReader,String,FieldCache.FloatParser,boolean)}. This works with all
* float fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -376,7 +376,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
- final float[] values = FieldCache.DEFAULT.getFloats(context.reader, field, (FieldCache.FloatParser) parser);
+ final float[] values = FieldCache.DEFAULT.getFloats(context.reader, field, (FieldCache.FloatParser) parser, false);
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
boolean matchDoc(int doc) {
@@ -388,7 +388,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getDoubles(IndexReader,String)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getDoubles(IndexReader,String,boolean)}. This works with all
* double fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -397,7 +397,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
}
/**
- * Creates a numeric range filter using {@link FieldCache#getDoubles(IndexReader,String,FieldCache.DoubleParser)}. This works with all
+ * Creates a numeric range filter using {@link FieldCache#getDoubles(IndexReader,String,FieldCache.DoubleParser,boolean)}. This works with all
* double fields containing exactly one numeric term in the field. The range can be half-open by setting one
* of the values to null
.
*/
@@ -430,7 +430,7 @@ public abstract class FieldCacheRangeFilter extends Filter {
if (inclusiveLowerPoint > inclusiveUpperPoint)
return DocIdSet.EMPTY_DOCIDSET;
- final double[] values = FieldCache.DEFAULT.getDoubles(context.reader, field, (FieldCache.DoubleParser) parser);
+ final double[] values = FieldCache.DEFAULT.getDoubles(context.reader, field, (FieldCache.DoubleParser) parser, false);
// ignore deleted docs if range doesn't contain 0
return new FieldCacheDocIdSet(context.reader.maxDoc(), acceptDocs) {
@Override
diff --git a/lucene/src/java/org/apache/lucene/search/FieldComparator.java b/lucene/src/java/org/apache/lucene/search/FieldComparator.java
index fed7e9940e7..3e50ee82cf9 100644
--- a/lucene/src/java/org/apache/lucene/search/FieldComparator.java
+++ b/lucene/src/java/org/apache/lucene/search/FieldComparator.java
@@ -17,19 +17,23 @@ package org.apache.lucene.search;
* limitations under the License.
*/
+import java.io.IOException;
+
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
-import org.apache.lucene.index.values.IndexDocValues;
import org.apache.lucene.index.values.IndexDocValues.Source;
+import org.apache.lucene.index.values.IndexDocValues;
+import org.apache.lucene.search.FieldCache.ByteParser;
import org.apache.lucene.search.FieldCache.DocTerms;
import org.apache.lucene.search.FieldCache.DocTermsIndex;
-import org.apache.lucene.search.cache.*;
-import org.apache.lucene.search.cache.CachedArray.*;
+import org.apache.lucene.search.FieldCache.DoubleParser;
+import org.apache.lucene.search.FieldCache.FloatParser;
+import org.apache.lucene.search.FieldCache.IntParser;
+import org.apache.lucene.search.FieldCache.LongParser;
+import org.apache.lucene.search.FieldCache.ShortParser;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.packed.PackedInts;
-import java.io.IOException;
-
/**
* Expert: a FieldComparator compares hits so as to determine their
* sort order when collecting the top results with {@link
@@ -185,38 +189,43 @@ public abstract class FieldComparator {
}
}
- public static abstract class NumericComparator extends FieldComparator {
- protected final CachedArrayCreator creator;
- protected T cached;
- protected final boolean checkMissing;
- protected Bits valid;
+ public static abstract class NumericComparator extends FieldComparator {
+ protected final T missingValue;
+ protected final String field;
+ protected Bits docsWithField;
- public NumericComparator( CachedArrayCreator c, boolean checkMissing ) {
- this.creator = c;
- this.checkMissing = checkMissing;
+ public NumericComparator(String field, T missingValue) {
+ this.field = field;
+ this.missingValue = missingValue;
}
- protected FieldComparator setup(T cached) {
- this.cached = cached;
- if (checkMissing)
- valid = cached.valid;
+ @Override
+ public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
+ if (missingValue != null) {
+ docsWithField = FieldCache.DEFAULT.getDocsWithField(context.reader, field);
+ // optimization to remove unneeded checks on the bit interface:
+ if (docsWithField instanceof Bits.MatchAllBits) {
+ docsWithField = null;
+ }
+ } else {
+ docsWithField = null;
+ }
return this;
}
}
/** Parses field's values as byte (using {@link
* FieldCache#getBytes} and sorts by ascending value */
- public static final class ByteComparator extends NumericComparator {
- private byte[] docValues;
+ public static final class ByteComparator extends NumericComparator {
private final byte[] values;
- private final byte missingValue;
+ private final ByteParser parser;
+ private byte[] currentReaderValues;
private byte bottom;
- ByteComparator(int numHits, ByteValuesCreator creator, Byte missingValue ) {
- super( creator, missingValue!=null );
+ ByteComparator(int numHits, String field, FieldCache.Parser parser, Byte missingValue) {
+ super(field, missingValue);
values = new byte[numHits];
- this.missingValue = checkMissing
- ? missingValue.byteValue() : 0;
+ this.parser = (ByteParser) parser;
}
@Override
@@ -226,27 +235,31 @@ public abstract class FieldComparator {
@Override
public int compareBottom(int doc) {
- byte v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ byte v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
+ }
return bottom - v2;
}
@Override
public void copy(int slot, int doc) {
- byte v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ byte v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
-
+ }
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
- setup(FieldCache.DEFAULT.getBytes(context.reader, creator.field, creator));
- docValues = cached.values;
- return this;
+ currentReaderValues = FieldCache.DEFAULT.getBytes(context.reader, field, parser, missingValue != null);
+ return super.setNextReader(context);
}
@Override
@@ -263,17 +276,16 @@ public abstract class FieldComparator {
/** Parses field's values as double (using {@link
* FieldCache#getDoubles} and sorts by ascending value */
- public static final class DoubleComparator extends NumericComparator {
- private double[] docValues;
+ public static final class DoubleComparator extends NumericComparator {
private final double[] values;
- private final double missingValue;
+ private final DoubleParser parser;
+ private double[] currentReaderValues;
private double bottom;
- DoubleComparator(int numHits, DoubleValuesCreator creator, Double missingValue ) {
- super( creator, missingValue != null );
+ DoubleComparator(int numHits, String field, FieldCache.Parser parser, Double missingValue) {
+ super(field, missingValue);
values = new double[numHits];
- this.missingValue = checkMissing
- ? missingValue.doubleValue() : 0;
+ this.parser = (DoubleParser) parser;
}
@Override
@@ -291,9 +303,12 @@ public abstract class FieldComparator {
@Override
public int compareBottom(int doc) {
- double v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ double v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
+ }
if (bottom > v2) {
return 1;
@@ -306,18 +321,20 @@ public abstract class FieldComparator {
@Override
public void copy(int slot, int doc) {
- double v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ double v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
+ }
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
- setup(FieldCache.DEFAULT.getDoubles(context.reader, creator.field, creator));
- docValues = cached.values;
- return this;
+ currentReaderValues = FieldCache.DEFAULT.getDoubles(context.reader, field, parser, missingValue != null);
+ return super.setNextReader(context);
}
@Override
@@ -334,8 +351,8 @@ public abstract class FieldComparator {
/** Uses float index values to sort by ascending value */
public static final class FloatDocValuesComparator extends FieldComparator {
private final double[] values;
- private Source currentReaderValues;
private final String field;
+ private Source currentReaderValues;
private double bottom;
FloatDocValuesComparator(int numHits, String field) {
@@ -395,17 +412,16 @@ public abstract class FieldComparator {
/** Parses field's values as float (using {@link
* FieldCache#getFloats} and sorts by ascending value */
- public static final class FloatComparator extends NumericComparator {
- private float[] docValues;
+ public static final class FloatComparator extends NumericComparator {
private final float[] values;
- private final float missingValue;
+ private final FloatParser parser;
+ private float[] currentReaderValues;
private float bottom;
- FloatComparator(int numHits, FloatValuesCreator creator, Float missingValue ) {
- super( creator, missingValue != null );
+ FloatComparator(int numHits, String field, FieldCache.Parser parser, Float missingValue) {
+ super(field, missingValue);
values = new float[numHits];
- this.missingValue = checkMissing
- ? missingValue.floatValue() : 0;
+ this.parser = (FloatParser) parser;
}
@Override
@@ -426,10 +442,12 @@ public abstract class FieldComparator {
@Override
public int compareBottom(int doc) {
// TODO: are there sneaky non-branch ways to compute sign of float?
- float v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ float v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
-
+ }
if (bottom > v2) {
return 1;
@@ -442,18 +460,20 @@ public abstract class FieldComparator {
@Override
public void copy(int slot, int doc) {
- float v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ float v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
+ }
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
- setup(FieldCache.DEFAULT.getFloats(context.reader, creator.field, creator));
- docValues = cached.values;
- return this;
+ currentReaderValues = FieldCache.DEFAULT.getFloats(context.reader, field, parser, missingValue != null);
+ return super.setNextReader(context);
}
@Override
@@ -469,17 +489,16 @@ public abstract class FieldComparator {
/** Parses field's values as short (using {@link
* FieldCache#getShorts} and sorts by ascending value */
- public static final class ShortComparator extends NumericComparator {
- private short[] docValues;
+ public static final class ShortComparator extends NumericComparator {
private final short[] values;
+ private final ShortParser parser;
+ private short[] currentReaderValues;
private short bottom;
- private final short missingValue;
- ShortComparator(int numHits, ShortValuesCreator creator, Short missingValue ) {
- super( creator, missingValue != null );
+ ShortComparator(int numHits, String field, FieldCache.Parser parser, Short missingValue) {
+ super(field, missingValue);
values = new short[numHits];
- this.missingValue = checkMissing
- ? missingValue.shortValue() : 0;
+ this.parser = (ShortParser) parser;
}
@Override
@@ -489,27 +508,32 @@ public abstract class FieldComparator {
@Override
public int compareBottom(int doc) {
- short v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ short v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
+ }
return bottom - v2;
}
@Override
public void copy(int slot, int doc) {
- short v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ short v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
+ }
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
- setup( FieldCache.DEFAULT.getShorts(context.reader, creator.field, creator));
- docValues = cached.values;
- return this;
+ currentReaderValues = FieldCache.DEFAULT.getShorts(context.reader, field, parser, missingValue != null);
+ return super.setNextReader(context);
}
@Override
@@ -525,17 +549,16 @@ public abstract class FieldComparator {
/** Parses field's values as int (using {@link
* FieldCache#getInts} and sorts by ascending value */
- public static final class IntComparator extends NumericComparator {
- private int[] docValues;
+ public static final class IntComparator extends NumericComparator {
private final int[] values;
+ private final IntParser parser;
+ private int[] currentReaderValues;
private int bottom; // Value of bottom of queue
- final int missingValue;
-
- IntComparator(int numHits, IntValuesCreator creator, Integer missingValue ) {
- super( creator, missingValue != null );
+
+ IntComparator(int numHits, String field, FieldCache.Parser parser, Integer missingValue) {
+ super(field, missingValue);
values = new int[numHits];
- this.missingValue = checkMissing
- ? missingValue.intValue() : 0;
+ this.parser = (IntParser) parser;
}
@Override
@@ -561,9 +584,12 @@ public abstract class FieldComparator {
// -1/+1/0 sign
// Cannot return bottom - values[slot2] because that
// may overflow
- int v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ int v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
+ }
if (bottom > v2) {
return 1;
@@ -576,18 +602,20 @@ public abstract class FieldComparator {
@Override
public void copy(int slot, int doc) {
- int v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ int v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
+ }
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
- setup(FieldCache.DEFAULT.getInts(context.reader, creator.field, creator));
- docValues = cached.values;
- return this;
+ currentReaderValues = FieldCache.DEFAULT.getInts(context.reader, field, parser, missingValue != null);
+ return super.setNextReader(context);
}
@Override
@@ -669,19 +697,18 @@ public abstract class FieldComparator {
/** Parses field's values as long (using {@link
* FieldCache#getLongs} and sorts by ascending value */
- public static final class LongComparator extends NumericComparator {
- private long[] docValues;
+ public static final class LongComparator extends NumericComparator {
private final long[] values;
+ private final LongParser parser;
+ private long[] currentReaderValues;
private long bottom;
- private final long missingValue;
- LongComparator(int numHits, LongValuesCreator creator, Long missingValue ) {
- super( creator, missingValue != null );
+ LongComparator(int numHits, String field, FieldCache.Parser parser, Long missingValue) {
+ super(field, missingValue);
values = new long[numHits];
- this.missingValue = checkMissing
- ? missingValue.longValue() : 0;
+ this.parser = (LongParser) parser;
}
-
+
@Override
public int compare(int slot1, int slot2) {
// TODO: there are sneaky non-branch ways to compute
@@ -701,11 +728,13 @@ public abstract class FieldComparator {
public int compareBottom(int doc) {
// TODO: there are sneaky non-branch ways to compute
// -1/+1/0 sign
- long v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ long v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
+ }
-
if (bottom > v2) {
return 1;
} else if (bottom < v2) {
@@ -717,18 +746,20 @@ public abstract class FieldComparator {
@Override
public void copy(int slot, int doc) {
- long v2 = docValues[doc];
- if (valid != null && v2==0 && !valid.get(doc))
+ long v2 = currentReaderValues[doc];
+ // Test for v2 == 0 to save Bits.get method call for
+ // the common case (doc has value and value is non-zero):
+ if (docsWithField != null && v2 == 0 && !docsWithField.get(doc)) {
v2 = missingValue;
+ }
values[slot] = v2;
}
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
- setup(FieldCache.DEFAULT.getLongs(context.reader, creator.field, creator));
- docValues = cached.values;
- return this;
+ currentReaderValues = FieldCache.DEFAULT.getLongs(context.reader, field, parser, missingValue != null);
+ return super.setNextReader(context);
}
@Override
diff --git a/lucene/src/java/org/apache/lucene/search/SortField.java b/lucene/src/java/org/apache/lucene/search/SortField.java
index 4267480c60a..42cf6aaa974 100644
--- a/lucene/src/java/org/apache/lucene/search/SortField.java
+++ b/lucene/src/java/org/apache/lucene/search/SortField.java
@@ -20,7 +20,6 @@ package org.apache.lucene.search;
import java.io.IOException;
import java.util.Comparator;
-import org.apache.lucene.search.cache.*;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.StringHelper;
@@ -104,12 +103,14 @@ public class SortField {
private String field;
private Type type; // defaults to determining type dynamically
boolean reverse = false; // defaults to natural order
- private CachedArrayCreator> creator;
- public Object missingValue = null; // used for 'sortMissingFirst/Last'
+ private FieldCache.Parser parser;
// Used for CUSTOM sort
private FieldComparatorSource comparatorSource;
+ // Used for 'sortMissingFirst/Last'
+ public Object missingValue = null;
+
/** Creates a sort by terms in the given field with the type of term
* values explicitly given.
* @param field Name of field to sort by. Can be null
if
@@ -141,10 +142,7 @@ public class SortField {
* by testing which numeric parser the parser subclasses.
* @throws IllegalArgumentException if the parser fails to
* subclass an existing numeric parser, or field is null
- *
- * @deprecated (4.0) use EntryCreator version
*/
- @Deprecated
public SortField(String field, FieldCache.Parser parser) {
this(field, parser, false);
}
@@ -159,65 +157,27 @@ public class SortField {
* @param reverse True if natural order should be reversed.
* @throws IllegalArgumentException if the parser fails to
* subclass an existing numeric parser, or field is null
- *
- * @deprecated (4.0) use EntryCreator version
*/
- @Deprecated
public SortField(String field, FieldCache.Parser parser, boolean reverse) {
- if (field == null) {
- throw new IllegalArgumentException("field can only be null when type is SCORE or DOC");
- }
- this.field = field;
- this.reverse = reverse;
-
- if (parser instanceof FieldCache.IntParser) {
- this.creator = new IntValuesCreator( field, (FieldCache.IntParser)parser );
- }
- else if (parser instanceof FieldCache.FloatParser) {
- this.creator = new FloatValuesCreator( field, (FieldCache.FloatParser)parser );
- }
- else if (parser instanceof FieldCache.ShortParser) {
- this.creator = new ShortValuesCreator( field, (FieldCache.ShortParser)parser );
- }
- else if (parser instanceof FieldCache.ByteParser) {
- this.creator = new ByteValuesCreator( field, (FieldCache.ByteParser)parser );
- }
- else if (parser instanceof FieldCache.LongParser) {
- this.creator = new LongValuesCreator( field, (FieldCache.LongParser)parser );
- }
- else if (parser instanceof FieldCache.DoubleParser) {
- this.creator = new DoubleValuesCreator( field, (FieldCache.DoubleParser)parser );
- }
- else
+ if (parser instanceof FieldCache.IntParser) initFieldType(field, Type.INT);
+ else if (parser instanceof FieldCache.FloatParser) initFieldType(field, Type.FLOAT);
+ else if (parser instanceof FieldCache.ShortParser) initFieldType(field, Type.SHORT);
+ else if (parser instanceof FieldCache.ByteParser) initFieldType(field, Type.BYTE);
+ else if (parser instanceof FieldCache.LongParser) initFieldType(field, Type.LONG);
+ else if (parser instanceof FieldCache.DoubleParser) initFieldType(field, Type.DOUBLE);
+ else {
throw new IllegalArgumentException("Parser instance does not subclass existing numeric parser from FieldCache (got " + parser + ")");
-
- this.type = this.creator.getSortType();
- }
-
- /**
- * Sort by a cached entry value
- * @param creator
- * @param reverse
- */
- public SortField( CachedArrayCreator> creator, boolean reverse )
- {
- this.field = creator.field;
- this.reverse = reverse;
- this.creator = creator;
- this.type = creator.getSortType();
- }
-
- public SortField setMissingValue( Object v )
- {
- missingValue = v;
- if( missingValue != null ) {
- if( this.creator == null ) {
- throw new IllegalArgumentException( "Missing value only works for sort fields with a CachedArray" );
- }
-
- // Set the flag to get bits
- creator.setFlag( CachedArrayCreator.OPTION_CACHE_BITS );
}
+
+ this.reverse = reverse;
+ this.parser = parser;
+ }
+
+ public SortField setMissingValue(Object missingValue) {
+ if (type != Type.BYTE && type != Type.SHORT && type != Type.INT && type != Type.FLOAT && type != Type.LONG && type != Type.DOUBLE) {
+ throw new IllegalArgumentException( "Missing value only works for numeric types" );
+ }
+ this.missingValue = missingValue;
return this;
}
@@ -246,23 +206,12 @@ public class SortField {
private void initFieldType(String field, Type type) {
this.type = type;
if (field == null) {
- if (type != Type.SCORE && type != Type.DOC)
+ if (type != Type.SCORE && type != Type.DOC) {
throw new IllegalArgumentException("field can only be null when type is SCORE or DOC");
+ }
} else {
this.field = field;
}
-
- if( creator != null ) {
- throw new IllegalStateException( "creator already exists: "+creator );
- }
- switch( type ) {
- case BYTE: creator = new ByteValuesCreator( field, null ); break;
- case SHORT: creator = new ShortValuesCreator( field, null ); break;
- case INT: creator = new IntValuesCreator( field, null ); break;
- case LONG: creator = new LongValuesCreator( field, null ); break;
- case FLOAT: creator = new FloatValuesCreator( field, null ); break;
- case DOUBLE: creator = new DoubleValuesCreator( field, null ); break;
- }
}
/** Returns the name of the field. Could return null
@@ -283,15 +232,9 @@ public class SortField {
/** Returns the instance of a {@link FieldCache} parser that fits to the given sort type.
* May return null
if no parser was specified. Sorting is using the default parser then.
* @return An instance of a {@link FieldCache} parser, or null
.
- * @deprecated (4.0) use getEntryCreator()
*/
- @Deprecated
public FieldCache.Parser getParser() {
- return (creator==null) ? null : creator.getParser();
- }
-
- public CachedArrayCreator> getEntryCreator() {
- return creator;
+ return parser;
}
/** Returns whether the sort should be reversed.
@@ -365,7 +308,6 @@ public class SortField {
break;
}
- if (creator != null) buffer.append('(').append(creator).append(')');
if (reverse) buffer.append('!');
return buffer.toString();
@@ -385,7 +327,6 @@ public class SortField {
&& other.type == this.type
&& other.reverse == this.reverse
&& (other.comparatorSource == null ? this.comparatorSource == null : other.comparatorSource.equals(this.comparatorSource))
- && (other.creator == null ? this.creator == null : other.creator.equals(this.creator))
);
}
@@ -399,7 +340,6 @@ public class SortField {
int hash = type.hashCode() ^ 0x346565dd + Boolean.valueOf(reverse).hashCode() ^ 0xaf5998bb;
if (field != null) hash += field.hashCode()^0xff5685dd;
if (comparatorSource != null) hash += comparatorSource.hashCode();
- if (creator != null) hash += creator.hashCode()^0x3aaf56ff;
return hash;
}
@@ -448,27 +388,27 @@ public class SortField {
if (useIndexValues) {
return new FieldComparator.IntDocValuesComparator(numHits, field);
} else {
- return new FieldComparator.IntComparator(numHits, (IntValuesCreator)creator, (Integer) missingValue);
+ return new FieldComparator.IntComparator(numHits, field, parser, (Integer) missingValue);
}
case FLOAT:
if (useIndexValues) {
return new FieldComparator.FloatDocValuesComparator(numHits, field);
} else {
- return new FieldComparator.FloatComparator(numHits, (FloatValuesCreator) creator, (Float) missingValue);
+ return new FieldComparator.FloatComparator(numHits, field, parser, (Float) missingValue);
}
case LONG:
- return new FieldComparator.LongComparator(numHits, (LongValuesCreator)creator, (Long)missingValue );
+ return new FieldComparator.LongComparator(numHits, field, parser, (Long) missingValue);
case DOUBLE:
- return new FieldComparator.DoubleComparator(numHits, (DoubleValuesCreator)creator, (Double)missingValue );
+ return new FieldComparator.DoubleComparator(numHits, field, parser, (Double) missingValue);
case BYTE:
- return new FieldComparator.ByteComparator(numHits, (ByteValuesCreator)creator, (Byte)missingValue );
+ return new FieldComparator.ByteComparator(numHits, field, parser, (Byte) missingValue);
case SHORT:
- return new FieldComparator.ShortComparator(numHits, (ShortValuesCreator)creator, (Short)missingValue );
+ return new FieldComparator.ShortComparator(numHits, field, parser, (Short) missingValue);
case CUSTOM:
assert comparatorSource != null;
diff --git a/lucene/src/java/org/apache/lucene/search/cache/ByteValuesCreator.java b/lucene/src/java/org/apache/lucene/search/cache/ByteValuesCreator.java
deleted file mode 100644
index 52a132642fb..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/ByteValuesCreator.java
+++ /dev/null
@@ -1,146 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.FieldCache;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.FieldCache.ByteParser;
-import org.apache.lucene.search.FieldCache.Parser;
-import org.apache.lucene.search.cache.CachedArray.ByteValues;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.FixedBitSet;
-
-public class ByteValuesCreator extends CachedArrayCreator
-{
- protected ByteParser parser;
-
- public ByteValuesCreator( String field, ByteParser parser, int options )
- {
- super( field, options );
- this.parser = parser;
- }
-
- public ByteValuesCreator( String field, ByteParser parser )
- {
- super( field );
- this.parser = parser;
- }
-
- @Override
- public Class getArrayType() {
- return Byte.class;
- }
-
- @Override
- public Parser getParser() {
- return parser;
- }
-
- @Override
- public SortField.Type getSortType() {
- return SortField.Type.BYTE;
- }
-
- //--------------------------------------------------------------------------------
- //--------------------------------------------------------------------------------
-
- @Override
- public ByteValues create(IndexReader reader) throws IOException {
- return validate( new ByteValues(), reader );
- }
-
- @Override
- public synchronized ByteValues validate(ByteValues entry, IndexReader reader) throws IOException {
- boolean ok = false;
-
- if( hasOption(OPTION_CACHE_VALUES) ) {
- ok = true;
- if( entry.values == null ) {
- fillByteValues(entry, reader, field);
- }
- else {
- assertSameParser( entry, parser );
- }
- }
- if( hasOption(OPTION_CACHE_BITS) ) {
- ok = true;
- if( entry.valid == null ) {
- fillValidBits(entry, reader, field);
- }
- }
- if( !ok ) {
- throw new RuntimeException( "the config must cache values and/or bits" );
- }
- return entry;
- }
-
- protected void fillByteValues( ByteValues vals, IndexReader reader, String field ) throws IOException
- {
- if( parser == null ) {
- parser = FieldCache.DEFAULT_BYTE_PARSER;
- }
- setParserAndResetCounts(vals, parser);
-
- Terms terms = MultiFields.getTerms(reader, field);
- int maxDoc = reader.maxDoc();
- vals.values = new byte[maxDoc];
- if (terms != null) {
- final TermsEnum termsEnum = terms.iterator();
- FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
- DocsEnum docs = null;
- try {
- while(true) {
- final BytesRef term = termsEnum.next();
- if (term == null) {
- break;
- }
- final byte termval = parser.parseByte(term);
- docs = termsEnum.docs(null, docs);
- while (true) {
- final int docID = docs.nextDoc();
- if (docID == DocIdSetIterator.NO_MORE_DOCS) {
- break;
- }
- vals.values[docID] = termval;
- vals.numDocs++;
- if( validBits != null ) {
- validBits.set( docID );
- }
- }
- vals.numTerms++;
- }
- } catch (FieldCache.StopFillCacheException stop) {}
-
- if( vals.valid == null ) {
- vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
- }
- }
- if( vals.valid == null && vals.numDocs < 1 ) {
- vals.valid = new Bits.MatchNoBits( maxDoc );
- }
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/CachedArray.java b/lucene/src/java/org/apache/lucene/search/cache/CachedArray.java
deleted file mode 100644
index 34a299c4c27..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/CachedArray.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.util.Bits;
-
-public abstract class CachedArray
-{
- public Integer parserHashCode; // a flag to make sure you don't change what you are asking for in subsequent requests
- public int numDocs;
- public int numTerms;
-
- /**
- * NOTE: these Bits may have false positives for deleted documents. That is,
- * Documents that are deleted may be marked as valid but the array value is not.
- */
- public Bits valid;
-
- public CachedArray() {
- this.parserHashCode = null;
- this.numDocs = 0;
- this.numTerms = 0;
- }
-
- /**
- * @return the native array
- */
- public abstract Object getRawArray();
-
- //-------------------------------------------------------------
- // Concrete Values
- //-------------------------------------------------------------
-
- public static class ByteValues extends CachedArray {
- public byte[] values = null;
- @Override public byte[] getRawArray() { return values; }
- };
-
- public static class ShortValues extends CachedArray {
- public short[] values = null;
- @Override public short[] getRawArray() { return values; }
- };
-
- public static class IntValues extends CachedArray {
- public int[] values = null;
- @Override public int[] getRawArray() { return values; }
- };
-
- public static class FloatValues extends CachedArray {
- public float[] values = null;
- @Override public float[] getRawArray() { return values; }
- };
-
- public static class LongValues extends CachedArray {
- public long[] values = null;
- @Override public long[] getRawArray() { return values; }
- };
-
- public static class DoubleValues extends CachedArray {
- public double[] values = null;
- @Override public double[] getRawArray() { return values; }
- };
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/CachedArrayCreator.java b/lucene/src/java/org/apache/lucene/search/cache/CachedArrayCreator.java
deleted file mode 100644
index 80cd1570d28..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/CachedArrayCreator.java
+++ /dev/null
@@ -1,152 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.FieldCache.Parser;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.FixedBitSet;
-
-public abstract class CachedArrayCreator extends EntryCreatorWithOptions
-{
- public static final int OPTION_VALIDATE = 1;
- public static final int OPTION_CACHE_VALUES = 2;
- public static final int OPTION_CACHE_BITS = 4;
-
- // Composite Options Fields
- public static final int CACHE_VALUES_AND_BITS = OPTION_CACHE_VALUES ^ OPTION_CACHE_BITS;
- public static final int CACHE_VALUES_AND_BITS_VALIDATE = OPTION_CACHE_VALUES ^ OPTION_CACHE_BITS ^ OPTION_VALIDATE;
-
- public final String field;
-
- public CachedArrayCreator( String field )
- {
- super( OPTION_CACHE_VALUES ^ OPTION_VALIDATE );
- if( field == null ) {
- throw new IllegalArgumentException( "field can not be null" );
- }
- this.field = field;
- }
-
- public CachedArrayCreator( String field, int flags )
- {
- super( flags );
- if( field == null ) {
- throw new IllegalArgumentException( "field can not be null" );
- }
- this.field = field;
- }
-
- /**
- * Note that the 'flags' are not part of the key -- subsequent calls to the cache
- * with different options will use the same cache entry.
- */
- @Override
- public EntryKey getCacheKey() {
- return new SimpleEntryKey( CachedArray.class, getArrayType(), field );
- //return new Integer( CachedArrayCreator.class.hashCode() ^ getArrayType().hashCode() ^ field.hashCode() );
- }
-
- /** Return the type that the array will hold */
- public abstract Class getArrayType();
- public abstract Parser getParser();
- public abstract SortField.Type getSortType();
-
- protected void setParserAndResetCounts(T value, Parser parser)
- {
- int parserHashCode = parser.hashCode();
- if( value.parserHashCode != null && value.parserHashCode != parserHashCode ) {
- throw new RuntimeException( "Parser changed in subsequent call. "
- +value.parserHashCode+" != "+parserHashCode + " :: " + parser );
- }
- value.parserHashCode = parserHashCode;
- value.numDocs = value.numTerms = 0;
- }
-
- protected void assertSameParser(T value, Parser parser)
- {
- if( parser != null && value.parserHashCode != null ) {
- int parserHashCode = parser.hashCode();
- if( value.parserHashCode != parserHashCode ) {
- throw new RuntimeException( "Parser changed in subsequent call. "
- +value.parserHashCode+" != "+parserHashCode + " :: " + parser );
- }
- }
- }
-
- /**
- * Utility function to help check what bits are valid
- */
- protected Bits checkMatchAllBits( FixedBitSet valid, int numDocs, int maxDocs )
- {
- if( numDocs != maxDocs ) {
- if( hasOption( OPTION_CACHE_BITS ) ) {
- for( int i=0; i {
-
- private final String field;
-
- public DocTermOrdsCreator(String field, int flag) {
- super(flag);
- this.field = field;
- }
-
- @Override
- public DocTermOrds create(IndexReader reader) throws IOException {
- return new DocTermOrds(reader, field);
- }
-
- @Override
- public DocTermOrds validate(DocTermOrds entry, IndexReader reader) throws IOException {
- return entry;
- }
-
- @Override
- public EntryKey getCacheKey() {
- return new SimpleEntryKey(DocTermOrdsCreator.class, field);
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/DocTermsCreator.java b/lucene/src/java/org/apache/lucene/search/cache/DocTermsCreator.java
deleted file mode 100644
index 7505ea6b858..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/DocTermsCreator.java
+++ /dev/null
@@ -1,169 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.FieldCache.DocTerms;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.PagedBytes;
-import org.apache.lucene.util.packed.GrowableWriter;
-import org.apache.lucene.util.packed.PackedInts;
-
-// TODO: this if DocTermsIndex was already created, we should share it...
-public class DocTermsCreator extends EntryCreatorWithOptions
-{
- public static final int FASTER_BUT_MORE_RAM = 2;
-
- public String field;
-
- public DocTermsCreator( String field )
- {
- super( FASTER_BUT_MORE_RAM ); // By default turn on FASTER_BUT_MORE_RAM
- if( field == null ) {
- throw new IllegalArgumentException( "field can not be null" );
- }
- this.field = field;
- }
-
- public DocTermsCreator( String field, int flags )
- {
- super( flags );
- if( field == null ) {
- throw new IllegalArgumentException( "field can not be null" );
- }
- this.field = field;
- }
-
- @Override
- public SimpleEntryKey getCacheKey() {
- return new SimpleEntryKey( DocTermsCreator.class, field );
- }
-
- @Override
- public DocTerms create(IndexReader reader) throws IOException {
-
- Terms terms = MultiFields.getTerms(reader, field);
-
- final boolean fasterButMoreRAM = hasOption( FASTER_BUT_MORE_RAM );
- final int termCountHardLimit = reader.maxDoc();
-
- // Holds the actual term data, expanded.
- final PagedBytes bytes = new PagedBytes(15);
-
- int startBPV;
-
- if (terms != null) {
- // Try for coarse estimate for number of bits; this
- // should be an underestimate most of the time, which
- // is fine -- GrowableWriter will reallocate as needed
- long numUniqueTerms = 0;
- try {
- numUniqueTerms = terms.getUniqueTermCount();
- } catch (UnsupportedOperationException uoe) {
- numUniqueTerms = -1;
- }
- if (numUniqueTerms != -1) {
- if (numUniqueTerms > termCountHardLimit) {
- numUniqueTerms = termCountHardLimit;
- }
- startBPV = PackedInts.bitsRequired(numUniqueTerms*4);
- } else {
- startBPV = 1;
- }
- } else {
- startBPV = 1;
- }
-
- final GrowableWriter docToOffset = new GrowableWriter(startBPV, reader.maxDoc(), fasterButMoreRAM);
-
- // pointer==0 means not set
- bytes.copyUsingLengthPrefix(new BytesRef());
-
- if (terms != null) {
- int termCount = 0;
- final TermsEnum termsEnum = terms.iterator();
- final Bits liveDocs = MultiFields.getLiveDocs(reader);
- DocsEnum docs = null;
- while(true) {
- if (termCount++ == termCountHardLimit) {
- // app is misusing the API (there is more than
- // one term per doc); in this case we make best
- // effort to load what we can (see LUCENE-2142)
- break;
- }
-
- final BytesRef term = termsEnum.next();
- if (term == null) {
- break;
- }
- final long pointer = bytes.copyUsingLengthPrefix(term);
- docs = termsEnum.docs(liveDocs, docs);
- while (true) {
- final int docID = docs.nextDoc();
- if (docID == DocIdSetIterator.NO_MORE_DOCS) {
- break;
- }
- docToOffset.set(docID, pointer);
- }
- }
- }
-
- // maybe an int-only impl?
- return new DocTermsImpl(bytes.freeze(true), docToOffset.getMutable());
- }
-
- @Override
- public DocTerms validate(DocTerms entry, IndexReader reader) throws IOException {
- // TODO? nothing? perhaps subsequent call with FASTER_BUT_MORE_RAM?
- return entry;
- }
-
- private static class DocTermsImpl extends DocTerms {
- private final PagedBytes.Reader bytes;
- private final PackedInts.Reader docToOffset;
-
- public DocTermsImpl(PagedBytes.Reader bytes, PackedInts.Reader docToOffset) {
- this.bytes = bytes;
- this.docToOffset = docToOffset;
- }
-
- @Override
- public int size() {
- return docToOffset.size();
- }
-
- @Override
- public boolean exists(int docID) {
- return docToOffset.get(docID) == 0;
- }
-
- @Override
- public BytesRef getTerm(int docID, BytesRef ret) {
- final long pointer = docToOffset.get(docID);
- return bytes.fill(ret, pointer);
- }
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/DocTermsIndexCreator.java b/lucene/src/java/org/apache/lucene/search/cache/DocTermsIndexCreator.java
deleted file mode 100644
index dc35006bffc..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/DocTermsIndexCreator.java
+++ /dev/null
@@ -1,353 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-import java.util.Comparator;
-
-import org.apache.lucene.index.DocsAndPositionsEnum;
-import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.OrdTermState;
-import org.apache.lucene.index.TermState;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.FieldCache.DocTermsIndex;
-import org.apache.lucene.util.ArrayUtil;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.PagedBytes;
-import org.apache.lucene.util.packed.GrowableWriter;
-import org.apache.lucene.util.packed.PackedInts;
-
-public class DocTermsIndexCreator extends EntryCreatorWithOptions
-{
- public static final int FASTER_BUT_MORE_RAM = 2;
-
- public String field;
-
- public DocTermsIndexCreator( String field )
- {
- super( FASTER_BUT_MORE_RAM ); // By default turn on FASTER_BUT_MORE_RAM
- if( field == null ) {
- throw new IllegalArgumentException( "field can not be null" );
- }
- this.field = field;
- }
-
- public DocTermsIndexCreator( String field, int flags )
- {
- super( flags );
- if( field == null ) {
- throw new IllegalArgumentException( "field can not be null" );
- }
- this.field = field;
- }
-
- @Override
- public EntryKey getCacheKey() {
- return new SimpleEntryKey( DocTermsIndexCreator.class, field );
- }
-
- @Override
- public DocTermsIndex create(IndexReader reader) throws IOException
- {
- Terms terms = MultiFields.getTerms(reader, field);
-
- final boolean fasterButMoreRAM = hasOption(FASTER_BUT_MORE_RAM);
-
- final PagedBytes bytes = new PagedBytes(15);
-
- int startBytesBPV;
- int startTermsBPV;
- int startNumUniqueTerms;
-
- int maxDoc = reader.maxDoc();
- final int termCountHardLimit;
- if (maxDoc == Integer.MAX_VALUE) {
- termCountHardLimit = Integer.MAX_VALUE;
- } else {
- termCountHardLimit = maxDoc+1;
- }
-
- if (terms != null) {
- // Try for coarse estimate for number of bits; this
- // should be an underestimate most of the time, which
- // is fine -- GrowableWriter will reallocate as needed
- long numUniqueTerms = 0;
- try {
- numUniqueTerms = terms.getUniqueTermCount();
- } catch (UnsupportedOperationException uoe) {
- numUniqueTerms = -1;
- }
- if (numUniqueTerms != -1) {
-
- if (numUniqueTerms > termCountHardLimit) {
- // app is misusing the API (there is more than
- // one term per doc); in this case we make best
- // effort to load what we can (see LUCENE-2142)
- numUniqueTerms = termCountHardLimit;
- }
-
- startBytesBPV = PackedInts.bitsRequired(numUniqueTerms*4);
- startTermsBPV = PackedInts.bitsRequired(numUniqueTerms);
-
- startNumUniqueTerms = (int) numUniqueTerms;
- } else {
- startBytesBPV = 1;
- startTermsBPV = 1;
- startNumUniqueTerms = 1;
- }
- } else {
- startBytesBPV = 1;
- startTermsBPV = 1;
- startNumUniqueTerms = 1;
- }
-
- GrowableWriter termOrdToBytesOffset = new GrowableWriter(startBytesBPV, 1+startNumUniqueTerms, fasterButMoreRAM);
- final GrowableWriter docToTermOrd = new GrowableWriter(startTermsBPV, reader.maxDoc(), fasterButMoreRAM);
-
- // 0 is reserved for "unset"
- bytes.copyUsingLengthPrefix(new BytesRef());
- int termOrd = 1;
-
- if (terms != null) {
- final TermsEnum termsEnum = terms.iterator();
- DocsEnum docs = null;
-
- while(true) {
- final BytesRef term = termsEnum.next();
- if (term == null) {
- break;
- }
- if (termOrd >= termCountHardLimit) {
- break;
- }
-
- if (termOrd == termOrdToBytesOffset.size()) {
- // NOTE: this code only runs if the incoming
- // reader impl doesn't implement
- // getUniqueTermCount (which should be uncommon)
- termOrdToBytesOffset = termOrdToBytesOffset.resize(ArrayUtil.oversize(1+termOrd, 1));
- }
- termOrdToBytesOffset.set(termOrd, bytes.copyUsingLengthPrefix(term));
- docs = termsEnum.docs(null, docs);
- while (true) {
- final int docID = docs.nextDoc();
- if (docID == DocIdSetIterator.NO_MORE_DOCS) {
- break;
- }
- docToTermOrd.set(docID, termOrd);
- }
- termOrd++;
- }
-
- if (termOrdToBytesOffset.size() > termOrd) {
- termOrdToBytesOffset = termOrdToBytesOffset.resize(termOrd);
- }
- }
-
- // maybe an int-only impl?
- return new DocTermsIndexImpl(bytes.freeze(true), termOrdToBytesOffset.getMutable(), docToTermOrd.getMutable(), termOrd);
- }
-
- @Override
- public DocTermsIndex validate(DocTermsIndex entry, IndexReader reader) throws IOException {
- // TODO? nothing? perhaps subsequent call with FASTER_BUT_MORE_RAM?
- return entry;
- }
-
- //-----------------------------------------------------------------------------
- //-----------------------------------------------------------------------------
-
- public static class DocTermsIndexImpl extends DocTermsIndex {
- private final PagedBytes.Reader bytes;
- private final PackedInts.Reader termOrdToBytesOffset;
- private final PackedInts.Reader docToTermOrd;
- private final int numOrd;
-
- public DocTermsIndexImpl(PagedBytes.Reader bytes, PackedInts.Reader termOrdToBytesOffset, PackedInts.Reader docToTermOrd, int numOrd) {
- this.bytes = bytes;
- this.docToTermOrd = docToTermOrd;
- this.termOrdToBytesOffset = termOrdToBytesOffset;
- this.numOrd = numOrd;
- }
-
- @Override
- public PackedInts.Reader getDocToOrd() {
- return docToTermOrd;
- }
-
- @Override
- public int numOrd() {
- return numOrd;
- }
-
- @Override
- public int getOrd(int docID) {
- return (int) docToTermOrd.get(docID);
- }
-
- @Override
- public int size() {
- return docToTermOrd.size();
- }
-
- @Override
- public BytesRef lookup(int ord, BytesRef ret) {
- return bytes.fill(ret, termOrdToBytesOffset.get(ord));
- }
-
- @Override
- public TermsEnum getTermsEnum() {
- return this.new DocTermsIndexEnum();
- }
-
- class DocTermsIndexEnum extends TermsEnum {
- int currentOrd;
- int currentBlockNumber;
- int end; // end position in the current block
- final byte[][] blocks;
- final int[] blockEnds;
-
- final BytesRef term = new BytesRef();
-
- public DocTermsIndexEnum() {
- currentOrd = 0;
- currentBlockNumber = 0;
- blocks = bytes.getBlocks();
- blockEnds = bytes.getBlockEnds();
- currentBlockNumber = bytes.fillAndGetIndex(term, termOrdToBytesOffset.get(0));
- end = blockEnds[currentBlockNumber];
- }
-
- @Override
- public SeekStatus seekCeil(BytesRef text, boolean useCache /* ignored */) throws IOException {
- int low = 1;
- int high = numOrd-1;
-
- while (low <= high) {
- int mid = (low + high) >>> 1;
- seekExact(mid);
- int cmp = term.compareTo(text);
-
- if (cmp < 0)
- low = mid + 1;
- else if (cmp > 0)
- high = mid - 1;
- else
- return SeekStatus.FOUND; // key found
- }
-
- if (low == numOrd) {
- return SeekStatus.END;
- } else {
- seekExact(low);
- return SeekStatus.NOT_FOUND;
- }
- }
-
- public void seekExact(long ord) throws IOException {
- assert(ord >= 0 && ord <= numOrd);
- // TODO: if gap is small, could iterate from current position? Or let user decide that?
- currentBlockNumber = bytes.fillAndGetIndex(term, termOrdToBytesOffset.get((int)ord));
- end = blockEnds[currentBlockNumber];
- currentOrd = (int)ord;
- }
-
- @Override
- public BytesRef next() throws IOException {
- int start = term.offset + term.length;
- if (start >= end) {
- // switch byte blocks
- if (currentBlockNumber +1 >= blocks.length) {
- return null;
- }
- currentBlockNumber++;
- term.bytes = blocks[currentBlockNumber];
- end = blockEnds[currentBlockNumber];
- start = 0;
- if (end<=0) return null; // special case of empty last array
- }
-
- currentOrd++;
-
- byte[] block = term.bytes;
- if ((block[start] & 128) == 0) {
- term.length = block[start];
- term.offset = start+1;
- } else {
- term.length = (((block[start] & 0x7f)) << 8) | (block[1+start] & 0xff);
- term.offset = start+2;
- }
-
- return term;
- }
-
- @Override
- public BytesRef term() throws IOException {
- return term;
- }
-
- @Override
- public long ord() throws IOException {
- return currentOrd;
- }
-
- @Override
- public int docFreq() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public long totalTermFreq() {
- return -1;
- }
-
- @Override
- public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public Comparator getComparator() throws IOException {
- return BytesRef.getUTF8SortedAsUnicodeComparator();
- }
-
- @Override
- public void seekExact(BytesRef term, TermState state) throws IOException {
- assert state != null && state instanceof OrdTermState;
- this.seekExact(((OrdTermState)state).ord);
- }
-
- @Override
- public TermState termState() throws IOException {
- OrdTermState state = new OrdTermState();
- state.ord = currentOrd;
- return state;
- }
- }
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/DoubleValuesCreator.java b/lucene/src/java/org/apache/lucene/search/cache/DoubleValuesCreator.java
deleted file mode 100644
index 207da54e6d1..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/DoubleValuesCreator.java
+++ /dev/null
@@ -1,164 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.FieldCache;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.FieldCache.DoubleParser;
-import org.apache.lucene.search.FieldCache.Parser;
-import org.apache.lucene.search.cache.CachedArray.DoubleValues;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.FixedBitSet;
-
-public class DoubleValuesCreator extends CachedArrayCreator
-{
- protected DoubleParser parser;
-
- public DoubleValuesCreator( String field, DoubleParser parser, int options )
- {
- super( field, options );
- this.parser = parser;
- }
-
- public DoubleValuesCreator( String field, DoubleParser parser )
- {
- super( field );
- this.parser = parser;
- }
-
- @Override
- public Class getArrayType() {
- return Double.class;
- }
-
- @Override
- public Parser getParser() {
- return parser;
- }
-
- @Override
- public SortField.Type getSortType() {
- return SortField.Type.DOUBLE;
- }
-
- //--------------------------------------------------------------------------------
- //--------------------------------------------------------------------------------
-
- @Override
- public DoubleValues create(IndexReader reader) throws IOException {
- return validate( new DoubleValues(), reader );
- }
-
- @Override
- public synchronized DoubleValues validate(DoubleValues entry, IndexReader reader) throws IOException {
- boolean ok = false;
-
- if( hasOption(OPTION_CACHE_VALUES) ) {
- ok = true;
- if( entry.values == null ) {
- fillDoubleValues(entry, reader, field);
- }
- else {
- assertSameParser( entry, parser );
- }
- }
- if( hasOption(OPTION_CACHE_BITS) ) {
- ok = true;
- if( entry.valid == null ) {
- fillValidBits(entry, reader, field);
- }
- }
- if( !ok ) {
- throw new RuntimeException( "the config must cache values and/or bits" );
- }
- return entry;
- }
-
- protected void fillDoubleValues( DoubleValues vals, IndexReader reader, String field ) throws IOException
- {
- if( parser == null ) {
- try {
- parser = FieldCache.DEFAULT_DOUBLE_PARSER;
- fillDoubleValues( vals, reader, field );
- return;
- }
- catch (NumberFormatException ne) {
- vals.parserHashCode = null; // wipe the previous one
- parser = FieldCache.NUMERIC_UTILS_DOUBLE_PARSER;
- fillDoubleValues( vals, reader, field );
- return;
- }
- }
- setParserAndResetCounts(vals, parser);
-
- Terms terms = MultiFields.getTerms(reader, field);
- int maxDoc = reader.maxDoc();
- vals.values = null;
- if (terms != null) {
- final TermsEnum termsEnum = terms.iterator();
- FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
- DocsEnum docs = null;
- try {
- while(true) {
- final BytesRef term = termsEnum.next();
- if (term == null) {
- break;
- }
- final double termval = parser.parseDouble(term);
- docs = termsEnum.docs(null, docs);
- while (true) {
- final int docID = docs.nextDoc();
- if (docID == DocIdSetIterator.NO_MORE_DOCS) {
- break;
- }
- if(vals.values == null) {
- vals.values = new double[maxDoc];
- }
- vals.values[docID] = termval;
- vals.numDocs++;
- if( validBits != null ) {
- validBits.set( docID );
- }
- }
- vals.numTerms++;
- }
- } catch (FieldCache.StopFillCacheException stop) {}
-
- if( vals.valid == null ) {
- vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
- }
- }
-
- if(vals.values == null) {
- vals.values = new double[maxDoc];
- }
-
- if( vals.valid == null && vals.numDocs < 1 ) {
- vals.valid = new Bits.MatchNoBits( maxDoc );
- }
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/EntryCreator.java b/lucene/src/java/org/apache/lucene/search/cache/EntryCreator.java
deleted file mode 100644
index 43e42f2ff06..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/EntryCreator.java
+++ /dev/null
@@ -1,72 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-import org.apache.lucene.index.IndexReader;
-
-/**
- * Create Cached Values for a given key
- *
- * @lucene.experimental
- */
-public abstract class EntryCreator
-{
- public abstract T create( IndexReader reader ) throws IOException;
- public abstract T validate( T entry, IndexReader reader ) throws IOException;
-
- /**
- * Indicate if a cached cached value should be checked before usage.
- * This is useful if an application wants to support subsequent calls
- * to the same cached object that may alter the cached object. If
- * an application wants to avoid this (synchronized) check, it should
- * return 'false'
- *
- * @return 'true' if the Cache should call 'validate' before returning a cached object
- */
- public boolean shouldValidate() {
- return true;
- }
-
- /**
- * @return A key to identify valid cache entries for subsequent requests
- */
- public abstract EntryKey getCacheKey();
-
-
- //------------------------------------------------------------------------
- // The Following code is a hack to make things work while the
- // EntryCreator is stored in in the FieldCache.
- // When the FieldCache is replaced with a simpler map LUCENE-2665
- // This can be removed
- //------------------------------------------------------------------------
-
- @Override
- public boolean equals(Object obj) {
- if( obj instanceof EntryCreator ) {
- return getCacheKey().equals( ((EntryCreator)obj).getCacheKey() );
- }
- return false;
- }
-
- @Override
- public int hashCode() {
- return getCacheKey().hashCode();
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/EntryCreatorWithOptions.java b/lucene/src/java/org/apache/lucene/search/cache/EntryCreatorWithOptions.java
deleted file mode 100644
index 53ffcb53fcb..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/EntryCreatorWithOptions.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package org.apache.lucene.search.cache;
-
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-public abstract class EntryCreatorWithOptions extends EntryCreator
-{
- public static final int OPTION_VALIDATE = 1;
-
- private int flags;
-
- public EntryCreatorWithOptions( int flag ) {
- this.flags = flag;
- }
-
- @Override
- public boolean shouldValidate() {
- return hasOption( OPTION_VALIDATE );
- }
-
- public boolean hasOption( int key )
- {
- return (flags & key) == key;
- }
-
- public void setFlag(int flag) {
- this.flags |= flag;
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/EntryKey.java b/lucene/src/java/org/apache/lucene/search/cache/EntryKey.java
deleted file mode 100644
index 93c0c1a38e4..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/EntryKey.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/**
- * A Simple marker class -- Perhaps it could/should just be an Object
- */
-public abstract class EntryKey {
-
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/FloatValuesCreator.java b/lucene/src/java/org/apache/lucene/search/cache/FloatValuesCreator.java
deleted file mode 100644
index 369081cf3fe..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/FloatValuesCreator.java
+++ /dev/null
@@ -1,165 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.FieldCache;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.FieldCache.FloatParser;
-import org.apache.lucene.search.FieldCache.Parser;
-import org.apache.lucene.search.cache.CachedArray.FloatValues;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.FixedBitSet;
-
-public class FloatValuesCreator extends CachedArrayCreator
-{
- protected FloatParser parser;
-
- public FloatValuesCreator( String field, FloatParser parser, int options )
- {
- super( field, options );
- this.parser = parser;
- }
-
- public FloatValuesCreator( String field, FloatParser parser )
- {
- super( field );
- this.parser = parser;
- }
-
- @Override
- public Class getArrayType() {
- return Float.class;
- }
-
- @Override
- public Parser getParser() {
- return parser;
- }
-
- @Override
- public SortField.Type getSortType() {
- return SortField.Type.FLOAT;
- }
-
-
- //--------------------------------------------------------------------------------
- //--------------------------------------------------------------------------------
-
- @Override
- public FloatValues create(IndexReader reader) throws IOException {
- return validate( new FloatValues(), reader );
- }
-
- @Override
- public synchronized FloatValues validate(FloatValues entry, IndexReader reader) throws IOException {
- boolean ok = false;
-
- if( hasOption(OPTION_CACHE_VALUES) ) {
- ok = true;
- if( entry.values == null ) {
- fillFloatValues(entry, reader, field);
- }
- else {
- assertSameParser( entry, parser );
- }
- }
- if( hasOption(OPTION_CACHE_BITS) ) {
- ok = true;
- if( entry.valid == null ) {
- fillValidBits(entry, reader, field);
- }
- }
- if( !ok ) {
- throw new RuntimeException( "the config must cache values and/or bits" );
- }
- return entry;
- }
-
- protected void fillFloatValues( FloatValues vals, IndexReader reader, String field ) throws IOException
- {
- if( parser == null ) {
- try {
- parser = FieldCache.DEFAULT_FLOAT_PARSER;
- fillFloatValues( vals, reader, field );
- return;
- }
- catch (NumberFormatException ne) {
- vals.parserHashCode = null; // wipe the previous one
- parser = FieldCache.NUMERIC_UTILS_FLOAT_PARSER;
- fillFloatValues( vals, reader, field );
- return;
- }
- }
- setParserAndResetCounts(vals, parser);
-
- Terms terms = MultiFields.getTerms(reader, field);
- int maxDoc = reader.maxDoc();
- vals.values = null;
- if (terms != null) {
- final TermsEnum termsEnum = terms.iterator();
- FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
- DocsEnum docs = null;
- try {
- while(true) {
- final BytesRef term = termsEnum.next();
- if (term == null) {
- break;
- }
- final float termval = parser.parseFloat(term);
- docs = termsEnum.docs(null, docs);
- while (true) {
- final int docID = docs.nextDoc();
- if (docID == DocIdSetIterator.NO_MORE_DOCS) {
- break;
- }
- if(vals.values == null) {
- vals.values = new float[maxDoc];
- }
- vals.values[docID] = termval;
- vals.numDocs++;
- if( validBits != null ) {
- validBits.set( docID );
- }
- }
- vals.numTerms++;
- }
- } catch (FieldCache.StopFillCacheException stop) {}
-
- if( vals.valid == null ) {
- vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
- }
- }
-
- if(vals.values == null) {
- vals.values = new float[maxDoc];
- }
-
- if( vals.valid == null && vals.numDocs < 1 ) {
- vals.valid = new Bits.MatchNoBits( maxDoc );
- }
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/IntValuesCreator.java b/lucene/src/java/org/apache/lucene/search/cache/IntValuesCreator.java
deleted file mode 100644
index e39ce77cf1f..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/IntValuesCreator.java
+++ /dev/null
@@ -1,165 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.FieldCache;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.FieldCache.IntParser;
-import org.apache.lucene.search.FieldCache.Parser;
-import org.apache.lucene.search.cache.CachedArray.IntValues;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.FixedBitSet;
-
-public class IntValuesCreator extends CachedArrayCreator
-{
- protected IntParser parser;
-
- public IntValuesCreator( String field, IntParser parser, int options )
- {
- super( field, options );
- this.parser = parser;
- }
-
- public IntValuesCreator( String field, IntParser parser )
- {
- super( field );
- this.parser = parser;
- }
-
- @Override
- public Class getArrayType() {
- return Integer.class;
- }
-
- @Override
- public Parser getParser() {
- return parser;
- }
-
- @Override
- public SortField.Type getSortType() {
- return SortField.Type.INT;
- }
-
-
- //--------------------------------------------------------------------------------
- //--------------------------------------------------------------------------------
-
- @Override
- public IntValues create(IndexReader reader) throws IOException {
- return validate( new IntValues(), reader );
- }
-
- @Override
- public synchronized IntValues validate(IntValues entry, IndexReader reader) throws IOException {
- boolean ok = false;
-
- if( hasOption(OPTION_CACHE_VALUES) ) {
- ok = true;
- if( entry.values == null ) {
- fillIntValues(entry, reader, field);
- }
- else {
- assertSameParser( entry, parser );
- }
- }
- if( hasOption(OPTION_CACHE_BITS) ) {
- ok = true;
- if( entry.valid == null ) {
- fillValidBits(entry, reader, field);
- }
- }
- if( !ok ) {
- throw new RuntimeException( "the config must cache values and/or bits" );
- }
- return entry;
- }
-
- protected void fillIntValues( IntValues vals, IndexReader reader, String field ) throws IOException
- {
- if( parser == null ) {
- try {
- parser = FieldCache.DEFAULT_INT_PARSER;
- fillIntValues( vals, reader, field );
- return;
- }
- catch (NumberFormatException ne) {
- vals.parserHashCode = null;
- parser = FieldCache.NUMERIC_UTILS_INT_PARSER;
- fillIntValues( vals, reader, field );
- return;
- }
- }
- setParserAndResetCounts(vals, parser);
-
- Terms terms = MultiFields.getTerms(reader, field);
- int maxDoc = reader.maxDoc();
- vals.values = null;
- if (terms != null) {
- final TermsEnum termsEnum = terms.iterator();
- FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
- DocsEnum docs = null;
- try {
- while(true) {
- final BytesRef term = termsEnum.next();
- if (term == null) {
- break;
- }
- final int termval = parser.parseInt(term);
- docs = termsEnum.docs(null, docs);
- while (true) {
- final int docID = docs.nextDoc();
- if (docID == DocIdSetIterator.NO_MORE_DOCS) {
- break;
- }
- if(vals.values == null) {
- vals.values = new int[maxDoc];
- }
- vals.values[docID] = termval;
- vals.numDocs++;
- if( validBits != null ) {
- validBits.set( docID );
- }
- }
- vals.numTerms++;
- }
- } catch (FieldCache.StopFillCacheException stop) {}
-
- if( vals.valid == null ) {
- vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
- }
- }
-
- if(vals.values == null) {
- vals.values = new int[maxDoc];
- }
-
- if( vals.valid == null && vals.numDocs < 1 ) {
- vals.valid = new Bits.MatchNoBits( maxDoc );
- }
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/LongValuesCreator.java b/lucene/src/java/org/apache/lucene/search/cache/LongValuesCreator.java
deleted file mode 100644
index 761b45730d3..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/LongValuesCreator.java
+++ /dev/null
@@ -1,165 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.FieldCache;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.FieldCache.LongParser;
-import org.apache.lucene.search.FieldCache.Parser;
-import org.apache.lucene.search.cache.CachedArray.LongValues;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.FixedBitSet;
-
-public class LongValuesCreator extends CachedArrayCreator
-{
- protected LongParser parser;
-
- public LongValuesCreator( String field, LongParser parser, int options )
- {
- super( field, options );
- this.parser = parser;
- }
-
- public LongValuesCreator( String field, LongParser parser )
- {
- super( field );
- this.parser = parser;
- }
-
- @Override
- public Class getArrayType() {
- return Long.class;
- }
-
- @Override
- public Parser getParser() {
- return parser;
- }
-
- @Override
- public SortField.Type getSortType() {
- return SortField.Type.LONG;
- }
-
-
- //--------------------------------------------------------------------------------
- //--------------------------------------------------------------------------------
-
- @Override
- public LongValues create(IndexReader reader) throws IOException {
- return validate( new LongValues(), reader );
- }
-
- @Override
- public synchronized LongValues validate(LongValues entry, IndexReader reader) throws IOException {
- boolean ok = false;
-
- if( hasOption(OPTION_CACHE_VALUES) ) {
- ok = true;
- if( entry.values == null ) {
- fillLongValues(entry, reader, field);
- }
- else {
- assertSameParser( entry, parser );
- }
- }
- if( hasOption(OPTION_CACHE_BITS) ) {
- ok = true;
- if( entry.valid == null ) {
- fillValidBits(entry, reader, field);
- }
- }
- if( !ok ) {
- throw new RuntimeException( "the config must cache values and/or bits" );
- }
- return entry;
- }
-
- protected void fillLongValues( LongValues vals, IndexReader reader, String field ) throws IOException
- {
- if( parser == null ) {
- try {
- parser = FieldCache.DEFAULT_LONG_PARSER;
- fillLongValues( vals, reader, field );
- return;
- }
- catch (NumberFormatException ne) {
- vals.parserHashCode = null; // wipe the previous one
- parser = FieldCache.NUMERIC_UTILS_LONG_PARSER;
- fillLongValues( vals, reader, field );
- return;
- }
- }
- setParserAndResetCounts(vals, parser);
-
- Terms terms = MultiFields.getTerms(reader, field);
- int maxDoc = reader.maxDoc();
- vals.values = null;
- if (terms != null) {
- final TermsEnum termsEnum = terms.iterator();
- FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
- DocsEnum docs = null;
- try {
- while(true) {
- final BytesRef term = termsEnum.next();
- if (term == null) {
- break;
- }
- final long termval = parser.parseLong(term);
- docs = termsEnum.docs(null, docs);
- while (true) {
- final int docID = docs.nextDoc();
- if (docID == DocIdSetIterator.NO_MORE_DOCS) {
- break;
- }
- if(vals.values == null) {
- vals.values = new long[maxDoc];
- }
- vals.values[docID] = termval;
- vals.numDocs++;
- if( validBits != null ) {
- validBits.set( docID );
- }
- }
- vals.numTerms++;
- }
- } catch (FieldCache.StopFillCacheException stop) {}
-
- if( vals.valid == null ) {
- vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
- }
- }
-
- if(vals.values == null) {
- vals.values = new long[maxDoc];
- }
-
- if( vals.valid == null && vals.numDocs < 1 ) {
- vals.valid = new Bits.MatchNoBits( maxDoc );
- }
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/ShortValuesCreator.java b/lucene/src/java/org/apache/lucene/search/cache/ShortValuesCreator.java
deleted file mode 100644
index a628c536596..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/ShortValuesCreator.java
+++ /dev/null
@@ -1,147 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.MultiFields;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.FieldCache;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.FieldCache.Parser;
-import org.apache.lucene.search.FieldCache.ShortParser;
-import org.apache.lucene.search.cache.CachedArray.ShortValues;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.FixedBitSet;
-
-public class ShortValuesCreator extends CachedArrayCreator
-{
- protected ShortParser parser;
-
- public ShortValuesCreator( String field, ShortParser parser, int options )
- {
- super( field, options );
- this.parser = parser;
- }
-
- public ShortValuesCreator( String field, ShortParser parser )
- {
- super( field );
- this.parser = parser;
- }
-
- @Override
- public Class getArrayType() {
- return Short.class;
- }
-
- @Override
- public Parser getParser() {
- return parser;
- }
-
- @Override
- public SortField.Type getSortType() {
- return SortField.Type.SHORT;
- }
-
-
- //--------------------------------------------------------------------------------
- //--------------------------------------------------------------------------------
-
- @Override
- public ShortValues create(IndexReader reader) throws IOException {
- return validate( new ShortValues(), reader );
- }
-
- @Override
- public synchronized ShortValues validate(ShortValues entry, IndexReader reader) throws IOException {
- boolean ok = false;
-
- if( hasOption(OPTION_CACHE_VALUES) ) {
- ok = true;
- if( entry.values == null ) {
- fillShortValues(entry, reader, field);
- }
- else {
- assertSameParser( entry, parser );
- }
- }
- if( hasOption(OPTION_CACHE_BITS) ) {
- ok = true;
- if( entry.valid == null ) {
- fillValidBits(entry, reader, field);
- }
- }
- if( !ok ) {
- throw new RuntimeException( "the config must cache values and/or bits" );
- }
- return entry;
- }
-
- protected void fillShortValues( ShortValues vals, IndexReader reader, String field ) throws IOException
- {
- if( parser == null ) {
- parser = FieldCache.DEFAULT_SHORT_PARSER;
- }
- setParserAndResetCounts(vals, parser);
-
- Terms terms = MultiFields.getTerms(reader, field);
- int maxDoc = reader.maxDoc();
- vals.values = new short[maxDoc];
- if (terms != null) {
- final TermsEnum termsEnum = terms.iterator();
- FixedBitSet validBits = (hasOption(OPTION_CACHE_BITS)) ? new FixedBitSet( maxDoc ) : null;
- DocsEnum docs = null;
- try {
- while(true) {
- final BytesRef term = termsEnum.next();
- if (term == null) {
- break;
- }
- final short termval = parser.parseShort(term);
- docs = termsEnum.docs(null, docs);
- while (true) {
- final int docID = docs.nextDoc();
- if (docID == DocIdSetIterator.NO_MORE_DOCS) {
- break;
- }
- vals.values[docID] = termval;
- vals.numDocs++;
- if( validBits != null ) {
- validBits.set( docID );
- }
- }
- vals.numTerms++;
- }
- } catch (FieldCache.StopFillCacheException stop) {}
-
- if( vals.valid == null ) {
- vals.valid = checkMatchAllBits( validBits, vals.numDocs, maxDoc );
- }
- }
- if( vals.valid == null && vals.numDocs < 1 ) {
- vals.valid = new Bits.MatchNoBits( maxDoc );
- }
- }
-}
diff --git a/lucene/src/java/org/apache/lucene/search/cache/SimpleEntryKey.java b/lucene/src/java/org/apache/lucene/search/cache/SimpleEntryKey.java
deleted file mode 100644
index 69e2f515d05..00000000000
--- a/lucene/src/java/org/apache/lucene/search/cache/SimpleEntryKey.java
+++ /dev/null
@@ -1,77 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-public class SimpleEntryKey extends EntryKey
-{
- public final Class clazz;
- public final Object[] args;
- public final int hash;
-
- public SimpleEntryKey( Class clazz, Object ... args ) {
- this.clazz = clazz;
- this.args = args;
-
- int hash = clazz.hashCode();
- if( args != null ) {
- for( Object obj : args ) {
- hash ^= obj.hashCode();
- }
- }
- this.hash = hash;
- }
-
- @Override
- public boolean equals(Object obj) {
- if( obj instanceof SimpleEntryKey ) {
- SimpleEntryKey key = (SimpleEntryKey)obj;
- if( key.hash != hash ||
- key.clazz != clazz ||
- key.args.length != args.length ) {
- return false;
- }
-
- // In the off chance that the hash etc is all the same
- // we should actually check the values
- for( int i=0; i
-
-
-
-
-
-
-Fieldcache
-
-
diff --git a/lucene/src/java/org/apache/lucene/util/FieldCacheSanityChecker.java b/lucene/src/java/org/apache/lucene/util/FieldCacheSanityChecker.java
index f48c305958a..399697771ed 100644
--- a/lucene/src/java/org/apache/lucene/util/FieldCacheSanityChecker.java
+++ b/lucene/src/java/org/apache/lucene/util/FieldCacheSanityChecker.java
@@ -119,6 +119,13 @@ public final class FieldCacheSanityChecker {
final CacheEntry item = cacheEntries[i];
final Object val = item.getValue();
+ // It's OK to have dup entries, where one is eg
+ // float[] and the other is the Bits (from
+ // getDocWithField())
+ if (val instanceof Bits) {
+ continue;
+ }
+
if (val instanceof FieldCache.CreationPlaceholder)
continue;
diff --git a/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index 2fb56da7781..e0c65ccc17f 100644
--- a/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -708,12 +708,12 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
assertEquals("wrong number of hits", 34, hits.length);
// check decoding into field cache
- int[] fci = FieldCache.DEFAULT.getInts(searcher.getIndexReader(), "trieInt");
+ int[] fci = FieldCache.DEFAULT.getInts(searcher.getIndexReader(), "trieInt", false);
for (int val : fci) {
assertTrue("value in id bounds", val >= 0 && val < 35);
}
- long[] fcl = FieldCache.DEFAULT.getLongs(searcher.getIndexReader(), "trieLong");
+ long[] fcl = FieldCache.DEFAULT.getLongs(searcher.getIndexReader(), "trieLong", false);
for (long val : fcl) {
assertTrue("value in id bounds", val >= 0L && val < 35L);
}
diff --git a/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java b/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java
index aef44abdc6f..89ffc8267a2 100644
--- a/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java
+++ b/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java
@@ -17,7 +17,6 @@ package org.apache.lucene.index;
* limitations under the License.
*/
-import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.ArrayList;
@@ -31,20 +30,8 @@ import org.apache.lucene.document.NumericField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DocTermOrds.TermOrdsIterator;
-import org.apache.lucene.index.codecs.BlockTermsReader;
-import org.apache.lucene.index.codecs.BlockTermsWriter;
import org.apache.lucene.index.codecs.Codec;
import org.apache.lucene.index.codecs.PostingsFormat;
-import org.apache.lucene.index.codecs.FieldsConsumer;
-import org.apache.lucene.index.codecs.FieldsProducer;
-import org.apache.lucene.index.codecs.FixedGapTermsIndexReader;
-import org.apache.lucene.index.codecs.FixedGapTermsIndexWriter;
-import org.apache.lucene.index.codecs.PostingsReaderBase;
-import org.apache.lucene.index.codecs.PostingsWriterBase;
-import org.apache.lucene.index.codecs.TermsIndexReaderBase;
-import org.apache.lucene.index.codecs.TermsIndexWriterBase;
-import org.apache.lucene.index.codecs.lucene40.Lucene40PostingsReader;
-import org.apache.lucene.index.codecs.lucene40.Lucene40PostingsWriter;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
@@ -317,7 +304,7 @@ public class TestDocTermOrds extends LuceneTestCase {
_TestUtil.nextInt(random, 2, 10));
- final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id");
+ final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id", false);
/*
for(int docID=0;docID= NUM_ITER) {
+ break;
+ }
+ } else if (op == 1) {
+ Bits docsWithField = cache.getDocsWithField(reader, "sparse");
+ for (int i = 0; i < docsWithField.length(); i++) {
+ assertEquals(i%2 == 0, docsWithField.get(i));
+ }
+ } else {
+ int[] ints = cache.getInts(reader, "sparse", true);
+ Bits docsWithField = cache.getDocsWithField(reader, "sparse");
+ for (int i = 0; i < docsWithField.length(); i++) {
+ if (i%2 == 0) {
+ assertTrue(docsWithField.get(i));
+ assertEquals(i, ints[i]);
+ } else {
+ assertFalse(docsWithField.get(i));
+ }
+ }
+ }
+ }
+ } catch (Throwable t) {
+ failed.set(true);
+ restart.reset();
+ throw new RuntimeException(t);
+ }
+ }
+ };
+ threads[threadIDX].start();
+ }
+
+ for(int threadIDX=0;threadIDX creator;
- Object min;
- Object max;
+ final SortField sortField;
+ final Object min;
+ final Object max;
- SortMissingLastTestHelper( CachedArrayCreator> c, Object min, Object max ) {
- creator = c;
+ SortMissingLastTestHelper(SortField sortField, Object min, Object max) {
+ this.sortField = sortField;
this.min = min;
this.max = max;
}
@@ -364,27 +357,51 @@ public class TestSort extends LuceneTestCase {
// test sorts where the type of field is specified
public void testSortMissingLast() throws Exception {
- SortMissingLastTestHelper[] testers = new SortMissingLastTestHelper[] {
- new SortMissingLastTestHelper( new ByteValuesCreator( "byte", null ), Byte.MIN_VALUE, Byte.MAX_VALUE ),
- new SortMissingLastTestHelper( new ShortValuesCreator( "short", null ), Short.MIN_VALUE, Short.MAX_VALUE ),
- new SortMissingLastTestHelper( new IntValuesCreator( "int", null ), Integer.MIN_VALUE, Integer.MAX_VALUE ),
- new SortMissingLastTestHelper( new LongValuesCreator( "long", null ), Long.MIN_VALUE, Long.MAX_VALUE ),
- new SortMissingLastTestHelper( new FloatValuesCreator( "float", null ), Float.MIN_VALUE, Float.MAX_VALUE ),
- new SortMissingLastTestHelper( new DoubleValuesCreator( "double", null ), Double.MIN_VALUE, Double.MAX_VALUE ),
+ @SuppressWarnings("boxing")
+ SortMissingLastTestHelper[] ascendTesters = new SortMissingLastTestHelper[] {
+ new SortMissingLastTestHelper( new SortField( "byte", SortField.Type.BYTE ), Byte.MIN_VALUE, Byte.MAX_VALUE ),
+ new SortMissingLastTestHelper( new SortField( "short", SortField.Type.SHORT ), Short.MIN_VALUE, Short.MAX_VALUE ),
+ new SortMissingLastTestHelper( new SortField( "int", SortField.Type.INT ), Integer.MIN_VALUE, Integer.MAX_VALUE ),
+ new SortMissingLastTestHelper( new SortField( "long", SortField.Type.LONG ), Long.MIN_VALUE, Long.MAX_VALUE ),
+ new SortMissingLastTestHelper( new SortField( "float", SortField.Type.FLOAT ), Float.MIN_VALUE, Float.MAX_VALUE ),
+ new SortMissingLastTestHelper( new SortField( "double", SortField.Type.DOUBLE ), Double.MIN_VALUE, Double.MAX_VALUE ),
};
- for( SortMissingLastTestHelper t : testers ) {
- sort.setSort (new SortField( t.creator, false ), SortField.FIELD_DOC );
- assertMatches("creator:"+t.creator, full, queryM, sort, "adbc" );
+ @SuppressWarnings("boxing")
+ SortMissingLastTestHelper[] descendTesters = new SortMissingLastTestHelper[] {
+ new SortMissingLastTestHelper( new SortField( "byte", SortField.Type.BYTE, true ), Byte.MIN_VALUE, Byte.MAX_VALUE ),
+ new SortMissingLastTestHelper( new SortField( "short", SortField.Type.SHORT, true ), Short.MIN_VALUE, Short.MAX_VALUE ),
+ new SortMissingLastTestHelper( new SortField( "int", SortField.Type.INT, true ), Integer.MIN_VALUE, Integer.MAX_VALUE ),
+ new SortMissingLastTestHelper( new SortField( "long", SortField.Type.LONG, true ), Long.MIN_VALUE, Long.MAX_VALUE ),
+ new SortMissingLastTestHelper( new SortField( "float", SortField.Type.FLOAT, true ), Float.MIN_VALUE, Float.MAX_VALUE ),
+ new SortMissingLastTestHelper( new SortField( "double", SortField.Type.DOUBLE, true ), Double.MIN_VALUE, Double.MAX_VALUE ),
+ };
+
+ // Default order: ascending
+ for(SortMissingLastTestHelper t : ascendTesters) {
+ sort.setSort(t.sortField, SortField.FIELD_DOC);
+ assertMatches("sortField:"+t.sortField, full, queryM, sort, "adbc");
- sort.setSort (new SortField( t.creator, false ).setMissingValue( t.max ), SortField.FIELD_DOC );
- assertMatches("creator:"+t.creator, full, queryM, sort, "bcad" );
+ sort.setSort(t.sortField.setMissingValue(t.max), SortField.FIELD_DOC);
+ assertMatches("sortField:"+t.sortField, full, queryM, sort, "bcad");
- sort.setSort (new SortField( t.creator, false ).setMissingValue( t.min ), SortField.FIELD_DOC );
- assertMatches("creator:"+t.creator, full, queryM, sort, "adbc" );
+ sort.setSort(t.sortField.setMissingValue(t.min), SortField.FIELD_DOC);
+ assertMatches("sortField:"+t.sortField, full, queryM, sort, "adbc");
+ }
+
+ // Reverse order: descending (Note: Order for un-valued documents remains the same due to tie breaker: a,d)
+ for(SortMissingLastTestHelper t : descendTesters) {
+ sort.setSort(t.sortField, SortField.FIELD_DOC);
+ assertMatches("sortField:"+t.sortField, full, queryM, sort, "cbad");
+
+ sort.setSort(t.sortField.setMissingValue( t.max ), SortField.FIELD_DOC);
+ assertMatches("sortField:"+t.sortField, full, queryM, sort, "adcb");
+
+ sort.setSort(t.sortField.setMissingValue( t.min ), SortField.FIELD_DOC);
+ assertMatches("sortField:"+t.sortField, full, queryM, sort, "cbad");
}
}
-
+
/**
* Test String sorting: small queue to many matches, multi field sort, reverse sort
*/
@@ -572,7 +589,7 @@ public class TestSort extends LuceneTestCase {
@Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
- docValues = FieldCache.DEFAULT.getInts(context.reader, "parser", testIntParser);
+ docValues = FieldCache.DEFAULT.getInts(context.reader, "parser", testIntParser, false);
return this;
}
@@ -1064,7 +1081,7 @@ public class TestSort extends LuceneTestCase {
private void assertMatches(String msg, IndexSearcher searcher, Query query, Sort sort,
String expectedResult) throws IOException {
//ScoreDoc[] result = searcher.search (query, null, 1000, sort).scoreDocs;
- TopDocs hits = searcher.search (query, null, Math.max(1, expectedResult.length()), sort);
+ TopDocs hits = searcher.search(query, null, Math.max(1, expectedResult.length()), sort);
ScoreDoc[] result = hits.scoreDocs;
assertEquals(expectedResult.length(),hits.totalHits);
StringBuilder buff = new StringBuilder(10);
@@ -1076,7 +1093,7 @@ public class TestSort extends LuceneTestCase {
buff.append (v[j].stringValue());
}
}
- assertEquals (msg, expectedResult, buff.toString());
+ assertEquals(msg, expectedResult, buff.toString());
}
public void testEmptyStringVsNullStringSort() throws Exception {
diff --git a/lucene/src/test/org/apache/lucene/search/cache/TestEntryCreators.java b/lucene/src/test/org/apache/lucene/search/cache/TestEntryCreators.java
deleted file mode 100644
index 91690077b5f..00000000000
--- a/lucene/src/test/org/apache/lucene/search/cache/TestEntryCreators.java
+++ /dev/null
@@ -1,234 +0,0 @@
-package org.apache.lucene.search.cache;
-
-/**
- * Copyright 2004 The Apache Software Foundation
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Method;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.TextField;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.search.FieldCache.*;
-import org.apache.lucene.search.FieldCache;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.FixedBitSet;
-import org.junit.BeforeClass;
-
-import static org.hamcrest.CoreMatchers.*;
-
-public class TestEntryCreators extends LuceneTestCase {
- protected IndexReader reader;
- private static int NUM_DOCS;
- private Directory directory;
-
- @BeforeClass
- public static void beforeClass() throws Exception {
- NUM_DOCS = atLeast(500);
- }
-
- static class NumberTypeTester {
- String funcName;
- Class extends CachedArrayCreator> creator;
- Class extends Parser> parser;
- String field;
- Number[] values;
-
- public NumberTypeTester( String f, String func, Class extends CachedArrayCreator> creator, Class extends Parser> parser ) {
- field = f;
- funcName = func;
- this.creator = creator;
- this.parser = parser;
- values = new Number[NUM_DOCS];
- }
- @Override
- public String toString()
- {
- return field;
- }
- }
- private NumberTypeTester[] typeTests;
-
-
- @Override
- public void setUp() throws Exception {
- super.setUp();
- directory = newDirectory();
- RandomIndexWriter writer= new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
-
- typeTests = new NumberTypeTester[] {
- new NumberTypeTester( "theRandomByte", "getBytes", ByteValuesCreator.class, ByteParser.class ),
- new NumberTypeTester( "theRandomShort", "getShorts", ShortValuesCreator.class, ShortParser.class ),
- new NumberTypeTester( "theRandomInt", "getInts", IntValuesCreator.class, IntParser.class ),
- new NumberTypeTester( "theRandomLong", "getLongs", LongValuesCreator.class, LongParser.class ),
- new NumberTypeTester( "theRandomFloat", "getFloats", FloatValuesCreator.class, FloatParser.class ),
- new NumberTypeTester( "theRandomDouble", "getDoubles", DoubleValuesCreator.class, DoubleParser.class ),
- };
-
- for (int i = 0; i < NUM_DOCS; i++){
- Document doc = new Document();
-
- // Test the valid bits
- for( NumberTypeTester tester : typeTests ) {
- if (random.nextInt(20) != 17 && i > 1) {
- tester.values[i] = 10 + random.nextInt( 20 ); // get some field overlap
- FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
- customType.setTokenized(false);
- doc.add(newField(tester.field, String.valueOf(tester.values[i]), customType));
- }
- }
- writer.addDocument(doc);
- }
-
- reader = writer.getReader();
- writer.close();
- }
-
- @Override
- public void tearDown() throws Exception {
- reader.close();
- directory.close();
- super.tearDown();
- }
-
- public void testKeys() throws IOException {
- // Check that the keys are unique for different fields
-
- EntryKey key_1 = new ByteValuesCreator( "field1", null ).getCacheKey();
- EntryKey key_2 = new ByteValuesCreator( "field2", null ).getCacheKey();
- assertThat("different fields should have a different key", key_1, not(key_2) );
-
- key_1 = new ByteValuesCreator( "field1", null ).getCacheKey();
- key_2 = new ShortValuesCreator( "field1", null ).getCacheKey();
- assertThat( "same field different type should have different key", key_1, not( key_2 ) );
-
- key_1 = new ByteValuesCreator( "ff", null ).getCacheKey();
- key_2 = new ByteValuesCreator( "ff", null ).getCacheKey();
- assertThat( "same args should have same key", key_1, is( key_2 ) );
-
- key_1 = new ByteValuesCreator( "ff", null, ByteValuesCreator.OPTION_CACHE_BITS ^ ByteValuesCreator.OPTION_CACHE_VALUES ).getCacheKey();
- key_2 = new ByteValuesCreator( "ff", null ).getCacheKey();
- assertThat( "different options should share same key", key_1, is( key_2 ) );
-
- key_1 = new IntValuesCreator( "ff", FieldCache.DEFAULT_INT_PARSER ).getCacheKey();
- key_2 = new IntValuesCreator( "ff", FieldCache.NUMERIC_UTILS_INT_PARSER ).getCacheKey();
- assertThat( "diferent parser should have same key", key_1, is( key_2 ) );
- }
-
- private CachedArray getWithReflection( FieldCache cache, NumberTypeTester tester, int flags ) throws IOException
- {
- try {
- Method getXXX = cache.getClass().getMethod( tester.funcName, IndexReader.class, String.class, EntryCreator.class );
- Constructor constructor = tester.creator.getConstructor( String.class, tester.parser, Integer.TYPE );
- CachedArrayCreator creator = (CachedArrayCreator)constructor.newInstance( tester.field, null, flags );
- return (CachedArray) getXXX.invoke(cache, reader, tester.field, creator );
- }
- catch( Exception ex ) {
- throw new RuntimeException( "Reflection failed", ex );
- }
- }
-
- public void testCachedArrays() throws IOException
- {
- FieldCache cache = FieldCache.DEFAULT;
-
- // Check the Different CachedArray Types
- CachedArray last = null;
- CachedArray justbits = null;
- String field;
-
- for( NumberTypeTester tester : typeTests ) {
- justbits = getWithReflection( cache, tester, CachedArrayCreator.OPTION_CACHE_BITS );
- assertNull( "should not get values : "+tester, justbits.getRawArray() );
- assertNotNull( "should get bits : "+tester, justbits.valid );
- last = getWithReflection( cache, tester, CachedArrayCreator.CACHE_VALUES_AND_BITS );
- assertEquals( "should use same cached object : "+tester, justbits, last );
- assertNull( "Validate=false shoudl not regenerate : "+tester, justbits.getRawArray() );
- last = getWithReflection( cache, tester, CachedArrayCreator.CACHE_VALUES_AND_BITS_VALIDATE );
- assertEquals( "should use same cached object : "+tester, justbits, last );
- assertNotNull( "Validate=true should add the Array : "+tester, justbits.getRawArray() );
- checkCachedArrayValuesAndBits( tester, last );
- }
-
- // Now switch the the parser (for the same type) and expect an error
- cache.purgeAllCaches();
- int flags = CachedArrayCreator.CACHE_VALUES_AND_BITS_VALIDATE;
- field = "theRandomInt";
- last = cache.getInts(reader, field, new IntValuesCreator( field, FieldCache.DEFAULT_INT_PARSER, flags ) );
- checkCachedArrayValuesAndBits( typeTests[2], last );
- try {
- cache.getInts(reader, field, new IntValuesCreator( field, FieldCache.NUMERIC_UTILS_INT_PARSER, flags ) );
- fail( "Should fail if you ask for the same type with a different parser : " + field );
- } catch( Exception ex ) {} // expected
-
- field = "theRandomLong";
- last = cache.getLongs(reader, field, new LongValuesCreator( field, FieldCache.DEFAULT_LONG_PARSER, flags ) );
- checkCachedArrayValuesAndBits( typeTests[3], last );
- try {
- cache.getLongs(reader, field, new LongValuesCreator( field, FieldCache.NUMERIC_UTILS_LONG_PARSER, flags ) );
- fail( "Should fail if you ask for the same type with a different parser : " + field );
- } catch( Exception ex ) {} // expected
-
- field = "theRandomFloat";
- last = cache.getFloats(reader, field, new FloatValuesCreator( field, FieldCache.DEFAULT_FLOAT_PARSER, flags ) );
- checkCachedArrayValuesAndBits( typeTests[4], last );
- try {
- cache.getFloats(reader, field, new FloatValuesCreator( field, FieldCache.NUMERIC_UTILS_FLOAT_PARSER, flags ) );
- fail( "Should fail if you ask for the same type with a different parser : " + field );
- } catch( Exception ex ) {} // expected
-
- field = "theRandomDouble";
- last = cache.getDoubles(reader, field, new DoubleValuesCreator( field, FieldCache.DEFAULT_DOUBLE_PARSER, flags ) );
- checkCachedArrayValuesAndBits( typeTests[5], last );
- try {
- cache.getDoubles(reader, field, new DoubleValuesCreator( field, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, flags ) );
- fail( "Should fail if you ask for the same type with a different parser : " + field );
- } catch( Exception ex ) {} // expected
- }
-
- private void checkCachedArrayValuesAndBits( NumberTypeTester tester, CachedArray cachedVals )
- {
-// for( int i=0; i distinctTerms = new HashSet();
- for( int i=0; igetInts()
@@ -32,10 +31,17 @@ import java.util.Map;
*
*/
-public class ByteFieldSource extends NumericFieldCacheSource {
+public class ByteFieldSource extends FieldCacheSource {
- public ByteFieldSource(ByteValuesCreator creator) {
- super(creator);
+ private FieldCache.ByteParser parser;
+
+ public ByteFieldSource(String field) {
+ this(field, null);
+ }
+
+ public ByteFieldSource(String field, FieldCache.ByteParser parser) {
+ super(field);
+ this.parser = parser;
}
@Override
@@ -45,8 +51,7 @@ public class ByteFieldSource extends NumericFieldCacheSource {
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
- final ByteValues vals = cache.getBytes(readerContext.reader, field, creator);
- final byte[] arr = vals.values;
+ final byte[] arr = cache.getBytes(readerContext.reader, field, parser, false);
return new DocValues() {
@Override
@@ -96,4 +101,19 @@ public class ByteFieldSource extends NumericFieldCacheSource {
};
}
+
+ public boolean equals(Object o) {
+ if (o.getClass() != ByteFieldSource.class) return false;
+ ByteFieldSource
+ other = (ByteFieldSource) o;
+ return super.equals(other)
+ && (this.parser == null ? other.parser == null :
+ this.parser.getClass() == other.parser.getClass());
+ }
+
+ public int hashCode() {
+ int h = parser == null ? Byte.class.hashCode() : parser.getClass().hashCode();
+ h += super.hashCode();
+ return h;
+ }
}
diff --git a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleFieldSource.java b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleFieldSource.java
index c819a5404bd..fd8826e76b7 100644
--- a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleFieldSource.java
+++ b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/DoubleFieldSource.java
@@ -17,20 +17,19 @@
package org.apache.lucene.queries.function.valuesource;
-import org.apache.lucene.index.IndexReader;
+import java.io.IOException;
+import java.util.Map;
+
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
+import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits;
-import org.apache.lucene.search.cache.DoubleValuesCreator;
-import org.apache.lucene.search.cache.CachedArray.DoubleValues;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueDouble;
-import java.io.IOException;
-import java.util.Map;
-
/**
* Obtains float field values from the {@link org.apache.lucene.search.FieldCache}
* using getFloats()
@@ -39,23 +38,27 @@ import java.util.Map;
*
*/
-public class DoubleFieldSource extends NumericFieldCacheSource {
+public class DoubleFieldSource extends FieldCacheSource {
- public DoubleFieldSource(DoubleValuesCreator creator) {
- super(creator);
+ protected FieldCache.DoubleParser parser;
+
+ public DoubleFieldSource(String field) {
+ this(field, null);
+ }
+
+ public DoubleFieldSource(String field, FieldCache.DoubleParser parser) {
+ super(field);
+ this.parser = parser;
}
- @Override
public String description() {
return "double(" + field + ')';
}
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
- final DoubleValues vals = cache.getDoubles(readerContext.reader, field, creator);
- final double[] arr = vals.values;
- final Bits valid = vals.valid;
-
+ final double[] arr = cache.getDoubles(readerContext.reader, field, parser, true);
+ final Bits valid = cache.getDocsWithField(readerContext.reader, field);
return new DoubleDocValues(this) {
@Override
public double doubleVal(int doc) {
@@ -148,4 +151,18 @@ public class DoubleFieldSource extends NumericFieldCacheSource {
};
}
+
+ public boolean equals(Object o) {
+ if (o.getClass() != DoubleFieldSource.class) return false;
+ DoubleFieldSource other = (DoubleFieldSource) o;
+ return super.equals(other)
+ && (this.parser == null ? other.parser == null :
+ this.parser.getClass() == other.parser.getClass());
+ }
+
+ public int hashCode() {
+ int h = parser == null ? Double.class.hashCode() : parser.getClass().hashCode();
+ h += super.hashCode();
+ return h;
+ }
}
diff --git a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/FloatFieldSource.java b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/FloatFieldSource.java
index 87c2ef95d54..f43a7d2b616 100644
--- a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/FloatFieldSource.java
+++ b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/FloatFieldSource.java
@@ -23,9 +23,8 @@ import java.util.Map;
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.queries.function.docvalues.FloatDocValues;
+import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits;
-import org.apache.lucene.search.cache.FloatValuesCreator;
-import org.apache.lucene.search.cache.CachedArray.FloatValues;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueFloat;
@@ -37,23 +36,28 @@ import org.apache.lucene.util.mutable.MutableValueFloat;
*
*/
-public class FloatFieldSource extends NumericFieldCacheSource {
+public class FloatFieldSource extends FieldCacheSource {
- public FloatFieldSource(FloatValuesCreator creator) {
- super(creator);
+ protected FieldCache.FloatParser parser;
+
+ public FloatFieldSource(String field) {
+ this(field, null);
+ }
+
+ public FloatFieldSource(String field, FieldCache.FloatParser parser) {
+ super(field);
+ this.parser = parser;
}
- @Override
public String description() {
return "float(" + field + ')';
}
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
- final FloatValues vals = cache.getFloats(readerContext.reader, field, creator);
- final float[] arr = vals.values;
- final Bits valid = vals.valid;
-
+ final float[] arr = cache.getFloats(readerContext.reader, field, parser, true);
+ final Bits valid = cache.getDocsWithField(readerContext.reader, field);
+
return new FloatDocValues(this) {
@Override
public float floatVal(int doc) {
@@ -91,4 +95,18 @@ public class FloatFieldSource extends NumericFieldCacheSource {
};
}
+
+ public boolean equals(Object o) {
+ if (o.getClass() != FloatFieldSource.class) return false;
+ FloatFieldSource other = (FloatFieldSource)o;
+ return super.equals(other)
+ && (this.parser==null ? other.parser==null :
+ this.parser.getClass() == other.parser.getClass());
+ }
+
+ public int hashCode() {
+ int h = parser==null ? Float.class.hashCode() : parser.getClass().hashCode();
+ h += super.hashCode();
+ return h;
+ };
}
diff --git a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/IntFieldSource.java b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/IntFieldSource.java
index 33c8b559b62..ea547401cf5 100644
--- a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/IntFieldSource.java
+++ b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/IntFieldSource.java
@@ -17,20 +17,19 @@
package org.apache.lucene.queries.function.valuesource;
-import org.apache.lucene.index.IndexReader;
+import java.io.IOException;
+import java.util.Map;
+
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.queries.function.docvalues.IntDocValues;
+import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits;
-import org.apache.lucene.search.cache.IntValuesCreator;
-import org.apache.lucene.search.cache.CachedArray.IntValues;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueInt;
-import java.io.IOException;
-import java.util.Map;
-
/**
* Obtains int field values from the {@link org.apache.lucene.search.FieldCache}
* using getInts()
@@ -38,10 +37,16 @@ import java.util.Map;
*
*/
-public class IntFieldSource extends NumericFieldCacheSource {
+public class IntFieldSource extends FieldCacheSource {
+ final FieldCache.IntParser parser;
- public IntFieldSource(IntValuesCreator creator) {
- super(creator);
+ public IntFieldSource(String field) {
+ this(field, null);
+ }
+
+ public IntFieldSource(String field, FieldCache.IntParser parser) {
+ super(field);
+ this.parser = parser;
}
@Override
@@ -52,9 +57,8 @@ public class IntFieldSource extends NumericFieldCacheSource {
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
- final IntValues vals = cache.getInts(readerContext.reader, field, creator);
- final int[] arr = vals.values;
- final Bits valid = vals.valid;
+ final int[] arr = cache.getInts(readerContext.reader, field, parser, true);
+ final Bits valid = cache.getDocsWithField(readerContext.reader, field);
return new IntDocValues(this) {
final MutableValueInt val = new MutableValueInt();
@@ -155,4 +159,18 @@ public class IntFieldSource extends NumericFieldCacheSource {
};
}
+
+ public boolean equals(Object o) {
+ if (o.getClass() != IntFieldSource.class) return false;
+ IntFieldSource other = (IntFieldSource)o;
+ return super.equals(other)
+ && (this.parser==null ? other.parser==null :
+ this.parser.getClass() == other.parser.getClass());
+ }
+
+ public int hashCode() {
+ int h = parser==null ? Integer.class.hashCode() : parser.getClass().hashCode();
+ h += super.hashCode();
+ return h;
+ };
}
diff --git a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/LongFieldSource.java b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/LongFieldSource.java
index f3abd5f5c7a..a3968960c1d 100644
--- a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/LongFieldSource.java
+++ b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/LongFieldSource.java
@@ -17,21 +17,19 @@
package org.apache.lucene.queries.function.valuesource;
-import org.apache.lucene.index.IndexReader;
+import java.io.IOException;
+import java.util.Map;
+
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queries.function.DocValues;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.queries.function.docvalues.LongDocValues;
+import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits;
-import org.apache.lucene.search.cache.LongValuesCreator;
-import org.apache.lucene.search.cache.CachedArray.LongValues;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueLong;
-
-import java.io.IOException;
-import java.util.Map;
-
/**
* Obtains float field values from the {@link org.apache.lucene.search.FieldCache}
* using getFloats()
@@ -40,10 +38,17 @@ import java.util.Map;
*
*/
-public class LongFieldSource extends NumericFieldCacheSource {
+public class LongFieldSource extends FieldCacheSource {
- public LongFieldSource(LongValuesCreator creator) {
- super(creator);
+ protected FieldCache.LongParser parser;
+
+ public LongFieldSource(String field) {
+ this(field, null);
+ }
+
+ public LongFieldSource(String field, FieldCache.LongParser parser) {
+ super(field);
+ this.parser = parser;
}
@Override
@@ -61,9 +66,8 @@ public class LongFieldSource extends NumericFieldCacheSource {
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
- final LongValues vals = cache.getLongs(readerContext.reader, field, creator);
- final long[] arr = vals.values;
- final Bits valid = vals.valid;
+ final long[] arr = cache.getLongs(readerContext.reader, field, parser, true);
+ final Bits valid = cache.getDocsWithField(readerContext.reader, field);
return new LongDocValues(this) {
@Override
@@ -141,4 +145,17 @@ public class LongFieldSource extends NumericFieldCacheSource {
return new MutableValueLong();
}
+ public boolean equals(Object o) {
+ if (o.getClass() != this.getClass()) return false;
+ LongFieldSource other = (LongFieldSource) o;
+ return super.equals(other)
+ && (this.parser == null ? other.parser == null :
+ this.parser.getClass() == other.parser.getClass());
+ }
+
+ public int hashCode() {
+ int h = parser == null ? this.getClass().hashCode() : parser.getClass().hashCode();
+ h += super.hashCode();
+ return h;
+ }
}
diff --git a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/NumericFieldCacheSource.java b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/NumericFieldCacheSource.java
deleted file mode 100644
index fee2f63ab27..00000000000
--- a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/NumericFieldCacheSource.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.lucene.queries.function.valuesource;
-
-import org.apache.lucene.search.cache.CachedArray;
-import org.apache.lucene.search.cache.CachedArrayCreator;
-
-/**
- *
- *
- */
-public abstract class NumericFieldCacheSource extends FieldCacheSource {
- protected final CachedArrayCreator creator;
-
- public NumericFieldCacheSource( CachedArrayCreator creator ) {
- super( creator.field );
- this.creator = creator;
- }
-
- @Override
- public final boolean equals(Object o) {
- if (o.getClass() != this.getClass()) return false;
- NumericFieldCacheSource other = (NumericFieldCacheSource) o;
- return super.equals(other)
- && (this.creator == null ? other.creator == null :
- this.creator.getClass() == other.creator.getClass());
- }
-
- @Override
- public final int hashCode() {
- int h = creator == null ? this.getClass().hashCode() : creator.getClass().hashCode();
- h += super.hashCode();
- return h;
- }
-}
diff --git a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/ShortFieldSource.java b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/ShortFieldSource.java
index 2284175cf5a..46c99a9a5e7 100644
--- a/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/ShortFieldSource.java
+++ b/modules/queries/src/java/org/apache/lucene/queries/function/valuesource/ShortFieldSource.java
@@ -16,25 +16,30 @@ package org.apache.lucene.queries.function.valuesource;
* limitations under the License.
*/
-import org.apache.lucene.queries.function.DocValues;
-import org.apache.lucene.search.cache.ShortValuesCreator;
-import org.apache.lucene.search.cache.CachedArray.ShortValues;
-import org.apache.lucene.index.IndexReader.AtomicReaderContext;
-
import java.io.IOException;
import java.util.Map;
+import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.queries.function.DocValues;
+import org.apache.lucene.search.FieldCache;
+
/**
*
*
**/
-public class ShortFieldSource extends NumericFieldCacheSource {
+public class ShortFieldSource extends FieldCacheSource {
- public ShortFieldSource(ShortValuesCreator creator) {
- super(creator);
+ final FieldCache.ShortParser parser;
+
+ public ShortFieldSource(String field) {
+ this(field, null);
}
+ public ShortFieldSource(String field, FieldCache.ShortParser parser) {
+ super(field);
+ this.parser = parser;
+ }
@Override
public String description() {
@@ -43,8 +48,7 @@ public class ShortFieldSource extends NumericFieldCacheSource {
@Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
- final ShortValues vals = cache.getShorts(readerContext.reader, field, creator);
- final short[] arr = vals.values;
+ final short[] arr = cache.getShorts(readerContext.reader, field, parser, false);
return new DocValues() {
@Override
@@ -89,4 +93,19 @@ public class ShortFieldSource extends NumericFieldCacheSource {
};
}
+
+ public boolean equals(Object o) {
+ if (o.getClass() != ShortFieldSource.class) return false;
+ ShortFieldSource
+ other = (ShortFieldSource) o;
+ return super.equals(other)
+ && (this.parser == null ? other.parser == null :
+ this.parser.getClass() == other.parser.getClass());
+ }
+
+ public int hashCode() {
+ int h = parser == null ? Short.class.hashCode() : parser.getClass().hashCode();
+ h += super.hashCode();
+ return h;
+ }
}
diff --git a/modules/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java b/modules/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java
index 779d296b532..ecaf9ce6d48 100755
--- a/modules/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java
+++ b/modules/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java
@@ -22,7 +22,6 @@ import org.apache.lucene.queries.function.FunctionTestSetup;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.search.*;
-import org.apache.lucene.search.cache.*;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
@@ -77,9 +76,6 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
@Test
public void testCustomScoreFloat() throws Exception {
// INT field can be parsed as float
- FloatValuesCreator valuesCreator = new FloatValuesCreator(INT_FIELD, null, CachedArrayCreator.CACHE_VALUES_AND_BITS);
- FloatFieldSource fieldSource = new FloatFieldSource(valuesCreator);
-
doTestCustomScore(INT_AS_FLOAT_VALUESOURCE, 1.0);
doTestCustomScore(INT_AS_FLOAT_VALUESOURCE, 5.0);
@@ -177,7 +173,7 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
@Override
protected CustomScoreProvider getCustomScoreProvider(AtomicReaderContext context) throws IOException {
- final int[] values = FieldCache.DEFAULT.getInts(context.reader, INT_FIELD);
+ final int[] values = FieldCache.DEFAULT.getInts(context.reader, INT_FIELD, false);
return new CustomScoreProvider(context) {
@Override
public float customScore(int doc, float subScore, float valSrcScore) throws IOException {
@@ -237,8 +233,8 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
// Test that FieldScoreQuery returns docs with expected score.
private void doTestCustomScore(ValueSource valueSource, double dboost) throws Exception {
- FunctionQuery functionQuery = new FunctionQuery(valueSource);
float boost = (float) dboost;
+ FunctionQuery functionQuery = new FunctionQuery(valueSource);
IndexSearcher s = new IndexSearcher(dir, true);
// regular (boolean) query.
diff --git a/modules/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java b/modules/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java
index c4aced439ab..8446321edef 100644
--- a/modules/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java
+++ b/modules/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java
@@ -12,7 +12,6 @@ import org.apache.lucene.queries.function.valuesource.ByteFieldSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.ShortFieldSource;
-import org.apache.lucene.search.cache.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
@@ -54,13 +53,11 @@ public abstract class FunctionTestSetup extends LuceneTestCase {
protected static final String INT_FIELD = "iii";
protected static final String FLOAT_FIELD = "fff";
- private static final int CREATOR_FLAGS = CachedArrayCreator.CACHE_VALUES_AND_BITS;
-
- protected ValueSource BYTE_VALUESOURCE = new ByteFieldSource(new ByteValuesCreator(INT_FIELD, null, CREATOR_FLAGS));
- protected ValueSource SHORT_VALUESOURCE = new ShortFieldSource(new ShortValuesCreator(INT_FIELD, null, CREATOR_FLAGS));
- protected ValueSource INT_VALUESOURCE = new IntFieldSource(new IntValuesCreator(INT_FIELD, null, CREATOR_FLAGS));
- protected ValueSource INT_AS_FLOAT_VALUESOURCE = new FloatFieldSource(new FloatValuesCreator(INT_FIELD, null, CREATOR_FLAGS));
- protected ValueSource FLOAT_VALUESOURCE = new FloatFieldSource(new FloatValuesCreator(FLOAT_FIELD, null, CREATOR_FLAGS));
+ protected ValueSource BYTE_VALUESOURCE = new ByteFieldSource(INT_FIELD);
+ protected ValueSource SHORT_VALUESOURCE = new ShortFieldSource(INT_FIELD);
+ protected ValueSource INT_VALUESOURCE = new IntFieldSource(INT_FIELD);
+ protected ValueSource INT_AS_FLOAT_VALUESOURCE = new FloatFieldSource(INT_FIELD);
+ protected ValueSource FLOAT_VALUESOURCE = new FloatFieldSource(FLOAT_FIELD);
private static final String DOC_TEXT_LINES[] = {
"Well, this is just some plain text we use for creating the ",
diff --git a/modules/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java b/modules/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java
index 9e2e1d2ddfc..9cc5e965a34 100755
--- a/modules/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java
+++ b/modules/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java
@@ -27,7 +27,6 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.cache.*;
import org.junit.BeforeClass;
import org.junit.Test;
diff --git a/solr/core/src/java/org/apache/solr/schema/ByteField.java b/solr/core/src/java/org/apache/solr/schema/ByteField.java
index 37ad2796119..f584bf9528b 100644
--- a/solr/core/src/java/org/apache/solr/schema/ByteField.java
+++ b/solr/core/src/java/org/apache/solr/schema/ByteField.java
@@ -20,8 +20,6 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.ByteFieldSource;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.cache.ByteValuesCreator;
-import org.apache.lucene.search.cache.CachedArrayCreator;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
@@ -48,7 +46,7 @@ public class ByteField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- return new ByteFieldSource( new ByteValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
+ return new ByteFieldSource(field.name);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/schema/DoubleField.java b/solr/core/src/java/org/apache/solr/schema/DoubleField.java
index d7d7d0c1243..22e34c96407 100644
--- a/solr/core/src/java/org/apache/solr/schema/DoubleField.java
+++ b/solr/core/src/java/org/apache/solr/schema/DoubleField.java
@@ -21,8 +21,6 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.cache.CachedArrayCreator;
-import org.apache.lucene.search.cache.DoubleValuesCreator;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
@@ -48,7 +46,7 @@ public class DoubleField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- return new DoubleFieldSource( new DoubleValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
+ return new DoubleFieldSource(field.name);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/schema/FloatField.java b/solr/core/src/java/org/apache/solr/schema/FloatField.java
index 15ed002e644..e784d70d756 100644
--- a/solr/core/src/java/org/apache/solr/schema/FloatField.java
+++ b/solr/core/src/java/org/apache/solr/schema/FloatField.java
@@ -20,8 +20,6 @@ package org.apache.solr.schema;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.cache.CachedArrayCreator;
-import org.apache.lucene.search.cache.FloatValuesCreator;
import org.apache.solr.search.QParser;
import org.apache.lucene.index.IndexableField;
import org.apache.solr.response.TextResponseWriter;
@@ -46,7 +44,7 @@ public class FloatField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- return new FloatFieldSource( new FloatValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
+ return new FloatFieldSource(field.name);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/schema/IntField.java b/solr/core/src/java/org/apache/solr/schema/IntField.java
index 72bc5971f58..3c1b1944cd8 100644
--- a/solr/core/src/java/org/apache/solr/schema/IntField.java
+++ b/solr/core/src/java/org/apache/solr/schema/IntField.java
@@ -20,8 +20,6 @@ package org.apache.solr.schema;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.cache.CachedArrayCreator;
-import org.apache.lucene.search.cache.IntValuesCreator;
import org.apache.solr.search.QParser;
import org.apache.lucene.index.IndexableField;
import org.apache.solr.response.TextResponseWriter;
@@ -46,7 +44,7 @@ public class IntField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- return new IntFieldSource(new IntValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
+ return new IntFieldSource(field.name);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/schema/LongField.java b/solr/core/src/java/org/apache/solr/schema/LongField.java
index 6c3a9dc7b1a..7c8e066ac45 100644
--- a/solr/core/src/java/org/apache/solr/schema/LongField.java
+++ b/solr/core/src/java/org/apache/solr/schema/LongField.java
@@ -21,8 +21,6 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.cache.CachedArrayCreator;
-import org.apache.lucene.search.cache.LongValuesCreator;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
@@ -48,7 +46,7 @@ public class LongField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- return new LongFieldSource( new LongValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
+ return new LongFieldSource(field.name);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/schema/ShortField.java b/solr/core/src/java/org/apache/solr/schema/ShortField.java
index a68eb202215..7520d58d21c 100644
--- a/solr/core/src/java/org/apache/solr/schema/ShortField.java
+++ b/solr/core/src/java/org/apache/solr/schema/ShortField.java
@@ -20,8 +20,6 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.ShortFieldSource;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.cache.CachedArrayCreator;
-import org.apache.lucene.search.cache.ShortValuesCreator;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
@@ -51,7 +49,7 @@ public class ShortField extends FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- return new ShortFieldSource(new ShortValuesCreator( field.name, null, CachedArrayCreator.CACHE_VALUES_AND_BITS ) );
+ return new ShortFieldSource(field.name);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/schema/TrieField.java b/solr/core/src/java/org/apache/solr/schema/TrieField.java
index b9b4f36594a..57246f27756 100644
--- a/solr/core/src/java/org/apache/solr/schema/TrieField.java
+++ b/solr/core/src/java/org/apache/solr/schema/TrieField.java
@@ -20,16 +20,12 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.*;
-import org.apache.lucene.search.cache.CachedArrayCreator;
-import org.apache.lucene.search.cache.DoubleValuesCreator;
+
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
-import org.apache.lucene.search.cache.FloatValuesCreator;
-import org.apache.lucene.search.cache.IntValuesCreator;
-import org.apache.lucene.search.cache.LongValuesCreator;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.NumericUtils;
@@ -137,7 +133,6 @@ public class TrieField extends org.apache.solr.schema.FieldType {
public SortField getSortField(SchemaField field, boolean top) {
field.checkSortability();
- int flags = CachedArrayCreator.CACHE_VALUES_AND_BITS;
Object missingValue = null;
boolean sortMissingLast = field.sortMissingLast();
boolean sortMissingFirst = field.sortMissingFirst();
@@ -150,8 +145,7 @@ public class TrieField extends org.apache.solr.schema.FieldType {
else if( sortMissingFirst ) {
missingValue = top ? Integer.MAX_VALUE : Integer.MIN_VALUE;
}
- return new SortField( new IntValuesCreator( field.getName(),
- FieldCache.NUMERIC_UTILS_INT_PARSER, flags ), top).setMissingValue( missingValue );
+ return new SortField( field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER, top).setMissingValue(missingValue);
case FLOAT:
if( sortMissingLast ) {
@@ -160,8 +154,7 @@ public class TrieField extends org.apache.solr.schema.FieldType {
else if( sortMissingFirst ) {
missingValue = top ? Float.POSITIVE_INFINITY : Float.NEGATIVE_INFINITY;
}
- return new SortField( new FloatValuesCreator( field.getName(),
- FieldCache.NUMERIC_UTILS_FLOAT_PARSER, flags ), top).setMissingValue( missingValue );
+ return new SortField( field.getName(), FieldCache.NUMERIC_UTILS_FLOAT_PARSER, top).setMissingValue(missingValue);
case DATE: // fallthrough
case LONG:
@@ -171,8 +164,7 @@ public class TrieField extends org.apache.solr.schema.FieldType {
else if( sortMissingFirst ) {
missingValue = top ? Long.MAX_VALUE : Long.MIN_VALUE;
}
- return new SortField( new LongValuesCreator( field.getName(),
- FieldCache.NUMERIC_UTILS_LONG_PARSER, flags ), top).setMissingValue( missingValue );
+ return new SortField( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER, top).setMissingValue(missingValue);
case DOUBLE:
if( sortMissingLast ) {
@@ -181,8 +173,7 @@ public class TrieField extends org.apache.solr.schema.FieldType {
else if( sortMissingFirst ) {
missingValue = top ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY;
}
- return new SortField( new DoubleValuesCreator( field.getName(),
- FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, flags ), top).setMissingValue( missingValue );
+ return new SortField( field.getName(), FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, top).setMissingValue(missingValue);
default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + field.name);
@@ -192,18 +183,17 @@ public class TrieField extends org.apache.solr.schema.FieldType {
@Override
public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser);
- int flags = CachedArrayCreator.CACHE_VALUES_AND_BITS;
switch (type) {
case INTEGER:
- return new IntFieldSource( new IntValuesCreator( field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER, flags ) );
+ return new IntFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER );
case FLOAT:
- return new FloatFieldSource( new FloatValuesCreator( field.getName(), FieldCache.NUMERIC_UTILS_FLOAT_PARSER, flags ));
+ return new FloatFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_FLOAT_PARSER );
case DATE:
- return new TrieDateFieldSource( new LongValuesCreator( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER, flags ));
+ return new TrieDateFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER );
case LONG:
- return new LongFieldSource( new LongValuesCreator( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER, flags ) );
+ return new LongFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER );
case DOUBLE:
- return new DoubleFieldSource( new DoubleValuesCreator( field.getName(), FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, flags ));
+ return new DoubleFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_DOUBLE_PARSER );
default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + field.name);
}
@@ -573,8 +563,8 @@ public class TrieField extends org.apache.solr.schema.FieldType {
class TrieDateFieldSource extends LongFieldSource {
- public TrieDateFieldSource(LongValuesCreator creator) {
- super(creator);
+ public TrieDateFieldSource(String field, FieldCache.LongParser parser) {
+ super(field, parser);
}
@Override