mirror of https://github.com/apache/lucene.git
fix compilation
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4547@1410865 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
160630e394
commit
6735e95b1f
|
@ -42,6 +42,7 @@ import org.apache.lucene.util.packed.PackedInts;
|
|||
* @since lucene 1.4
|
||||
* @see org.apache.lucene.util.FieldCacheSanityChecker
|
||||
*/
|
||||
// nocommit abstract class...?
|
||||
public interface FieldCache {
|
||||
|
||||
public static abstract class Bytes {
|
||||
|
@ -303,8 +304,7 @@ public interface FieldCache {
|
|||
* <code>reader.maxDoc()</code>, with turned on bits for each docid that
|
||||
* does have a value for this field.
|
||||
*/
|
||||
public Bits getDocsWithField(AtomicReader reader, String field)
|
||||
throws IOException;
|
||||
public Bits getDocsWithField(AtomicReader reader, String field) throws IOException;
|
||||
|
||||
/** Checks the internal cache for an appropriate entry, and if none is
|
||||
* found, reads the terms in <code>field</code> as a single byte and returns an array
|
||||
|
@ -477,8 +477,7 @@ public interface FieldCache {
|
|||
* @return The values in the given field for each document.
|
||||
* @throws IOException If any error occurs.
|
||||
*/
|
||||
public Doubles getDoubles(AtomicReader reader, String field, DoubleParser parser, boolean setDocsWithField)
|
||||
throws IOException;
|
||||
public Doubles getDoubles(AtomicReader reader, String field, DoubleParser parser, boolean setDocsWithField) throws IOException;
|
||||
|
||||
/** Returned by {@link #getTerms} */
|
||||
public abstract static class DocTerms {
|
||||
|
@ -513,8 +512,7 @@ public interface FieldCache {
|
|||
* faster lookups (default is "true"). Note that the
|
||||
* first call for a given reader and field "wins",
|
||||
* subsequent calls will share the same cache entry. */
|
||||
public DocTerms getTerms (AtomicReader reader, String field, float acceptableOverheadRatio)
|
||||
throws IOException;
|
||||
public DocTerms getTerms (AtomicReader reader, String field, float acceptableOverheadRatio) throws IOException;
|
||||
|
||||
/** Returned by {@link #getTermsIndex} */
|
||||
public abstract static class DocTermsIndex {
|
||||
|
@ -581,8 +579,7 @@ public interface FieldCache {
|
|||
* @return The values in the given field for each document.
|
||||
* @throws IOException If any error occurs.
|
||||
*/
|
||||
public DocTermsIndex getTermsIndex (AtomicReader reader, String field)
|
||||
throws IOException;
|
||||
public DocTermsIndex getTermsIndex (AtomicReader reader, String field) throws IOException;
|
||||
|
||||
/** Expert: just like {@link
|
||||
* #getTermsIndex(AtomicReader,String)}, but you can specify
|
||||
|
@ -590,8 +587,7 @@ public interface FieldCache {
|
|||
* faster lookups (default is "true"). Note that the
|
||||
* first call for a given reader and field "wins",
|
||||
* subsequent calls will share the same cache entry. */
|
||||
public DocTermsIndex getTermsIndex (AtomicReader reader, String field, float acceptableOverheadRatio)
|
||||
throws IOException;
|
||||
public DocTermsIndex getTermsIndex (AtomicReader reader, String field, float acceptableOverheadRatio) throws IOException;
|
||||
|
||||
/**
|
||||
* Checks the internal cache for an appropriate entry, and if none is found, reads the term values
|
||||
|
@ -610,15 +606,44 @@ public interface FieldCache {
|
|||
* Can be useful for logging/debugging.
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public static abstract class CacheEntry {
|
||||
public abstract Object getReaderKey();
|
||||
public abstract String getFieldName();
|
||||
public abstract Class<?> getCacheType();
|
||||
public abstract Object getCustom();
|
||||
public abstract Object getValue();
|
||||
private String size = null;
|
||||
protected final void setEstimatedSize(String size) {
|
||||
this.size = size;
|
||||
public final class CacheEntry {
|
||||
|
||||
private final Object readerKey;
|
||||
private final String fieldName;
|
||||
private final Class<?> cacheType;
|
||||
private final Object custom;
|
||||
private final Object value;
|
||||
private String size;
|
||||
|
||||
public CacheEntry(Object readerKey, String fieldName,
|
||||
Class<?> cacheType,
|
||||
Object custom,
|
||||
Object value) {
|
||||
this.readerKey = readerKey;
|
||||
this.fieldName = fieldName;
|
||||
this.cacheType = cacheType;
|
||||
this.custom = custom;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public Object getReaderKey() {
|
||||
return readerKey;
|
||||
}
|
||||
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
public Class<?> getCacheType() {
|
||||
return cacheType;
|
||||
}
|
||||
|
||||
public Object getCustom() {
|
||||
return custom;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -626,8 +651,8 @@ public interface FieldCache {
|
|||
* @see #getEstimatedSize
|
||||
*/
|
||||
public void estimateSize() {
|
||||
long size = RamUsageEstimator.sizeOf(getValue());
|
||||
setEstimatedSize(RamUsageEstimator.humanReadableUnits(size));
|
||||
long bytesUsed = RamUsageEstimator.sizeOf(getValue());
|
||||
size = RamUsageEstimator.humanReadableUnits(bytesUsed);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -638,7 +663,6 @@ public interface FieldCache {
|
|||
return size;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
|
@ -655,7 +679,6 @@ public interface FieldCache {
|
|||
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -45,7 +45,7 @@ import org.apache.lucene.util.PagedBytes;
|
|||
import org.apache.lucene.util.packed.GrowableWriter;
|
||||
import org.apache.lucene.util.packed.PackedInts;
|
||||
|
||||
// nocommit rename to UninvertFieldCache or something ...
|
||||
// nocommit rename to UninvertFieldCacheImpl or something ...
|
||||
|
||||
/**
|
||||
* Expert: The default cache implementation, storing all values in memory.
|
||||
|
@ -59,6 +59,7 @@ class FieldCacheImpl implements FieldCache {
|
|||
FieldCacheImpl() {
|
||||
init();
|
||||
}
|
||||
|
||||
private synchronized void init() {
|
||||
caches = new HashMap<Class<?>,Cache>(9);
|
||||
caches.put(Byte.TYPE, new ByteCache(this));
|
||||
|
@ -73,16 +74,19 @@ class FieldCacheImpl implements FieldCache {
|
|||
caches.put(DocsWithFieldCache.class, new DocsWithFieldCache(this));
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void purgeAllCaches() {
|
||||
init();
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void purge(AtomicReader r) {
|
||||
for(Cache c : caches.values()) {
|
||||
c.purge(r);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized CacheEntry[] getCacheEntries() {
|
||||
List<CacheEntry> result = new ArrayList<CacheEntry>(17);
|
||||
for(final Map.Entry<Class<?>,Cache> cacheEntry: caches.entrySet()) {
|
||||
|
@ -95,7 +99,7 @@ class FieldCacheImpl implements FieldCache {
|
|||
final Map<Entry, Object> innerCache = readerCacheEntry.getValue();
|
||||
for (final Map.Entry<Entry, Object> mapEntry : innerCache.entrySet()) {
|
||||
Entry entry = mapEntry.getKey();
|
||||
result.add(new CacheEntryImpl(readerKey, entry.field,
|
||||
result.add(new CacheEntry(readerKey, entry.field,
|
||||
cacheType, entry.custom,
|
||||
mapEntry.getValue()));
|
||||
}
|
||||
|
@ -105,40 +109,6 @@ class FieldCacheImpl implements FieldCache {
|
|||
return result.toArray(new CacheEntry[result.size()]);
|
||||
}
|
||||
|
||||
private static final class CacheEntryImpl extends CacheEntry {
|
||||
private final Object readerKey;
|
||||
private final String fieldName;
|
||||
private final Class<?> cacheType;
|
||||
private final Object custom;
|
||||
private final Object value;
|
||||
CacheEntryImpl(Object readerKey, String fieldName,
|
||||
Class<?> cacheType,
|
||||
Object custom,
|
||||
Object value) {
|
||||
this.readerKey = readerKey;
|
||||
this.fieldName = fieldName;
|
||||
this.cacheType = cacheType;
|
||||
this.custom = custom;
|
||||
this.value = value;
|
||||
|
||||
// :HACK: for testing.
|
||||
// if (null != locale || SortField.CUSTOM != sortFieldType) {
|
||||
// throw new RuntimeException("Locale/sortFieldType: " + this);
|
||||
// }
|
||||
|
||||
}
|
||||
@Override
|
||||
public Object getReaderKey() { return readerKey; }
|
||||
@Override
|
||||
public String getFieldName() { return fieldName; }
|
||||
@Override
|
||||
public Class<?> getCacheType() { return cacheType; }
|
||||
@Override
|
||||
public Object getCustom() { return custom; }
|
||||
@Override
|
||||
public Object getValue() { return value; }
|
||||
}
|
||||
|
||||
// per-segment fieldcaches don't purge until the shared core closes.
|
||||
final SegmentReader.CoreClosedListener purgeCore = new SegmentReader.CoreClosedListener() {
|
||||
@Override
|
||||
|
@ -382,7 +352,8 @@ class FieldCacheImpl implements FieldCache {
|
|||
protected abstract void visitDoc(int docID);
|
||||
}
|
||||
|
||||
private static class BytesFromArray extends Bytes {
|
||||
// nocommit move up?
|
||||
static class BytesFromArray extends Bytes {
|
||||
private final byte[] values;
|
||||
|
||||
public BytesFromArray(byte[] values) {
|
||||
|
@ -450,7 +421,8 @@ class FieldCacheImpl implements FieldCache {
|
|||
return (Shorts) caches.get(Short.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
||||
}
|
||||
|
||||
private static class ShortsFromArray extends Shorts {
|
||||
// nocommit move up?
|
||||
static class ShortsFromArray extends Shorts {
|
||||
private final short[] values;
|
||||
|
||||
public ShortsFromArray(short[] values) {
|
||||
|
@ -539,7 +511,8 @@ class FieldCacheImpl implements FieldCache {
|
|||
return (Ints) caches.get(Integer.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
||||
}
|
||||
|
||||
private static class IntsFromArray extends Ints {
|
||||
// nocommit move up?
|
||||
static class IntsFromArray extends Ints {
|
||||
private final int[] values;
|
||||
|
||||
public IntsFromArray(int[] values) {
|
||||
|
@ -608,6 +581,7 @@ class FieldCacheImpl implements FieldCache {
|
|||
return (Bits) caches.get(DocsWithFieldCache.class).get(reader, new Entry(field, null), false);
|
||||
}
|
||||
|
||||
// nocommit move up?
|
||||
static final class DocsWithFieldCache extends Cache {
|
||||
DocsWithFieldCache(FieldCacheImpl wrapper) {
|
||||
super(wrapper);
|
||||
|
@ -675,7 +649,8 @@ class FieldCacheImpl implements FieldCache {
|
|||
return (Floats) caches.get(Float.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
||||
}
|
||||
|
||||
private static class FloatsFromArray extends Floats {
|
||||
// nocommit move up?
|
||||
static class FloatsFromArray extends Floats {
|
||||
private final float[] values;
|
||||
|
||||
public FloatsFromArray(float[] values) {
|
||||
|
@ -750,7 +725,8 @@ class FieldCacheImpl implements FieldCache {
|
|||
return (Longs) caches.get(Long.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
||||
}
|
||||
|
||||
private static class LongsFromArray extends Longs {
|
||||
// nocommit move up?
|
||||
static class LongsFromArray extends Longs {
|
||||
private final long[] values;
|
||||
|
||||
public LongsFromArray(long[] values) {
|
||||
|
@ -826,7 +802,8 @@ class FieldCacheImpl implements FieldCache {
|
|||
return (Doubles) caches.get(Double.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
||||
}
|
||||
|
||||
private static class DoublesFromArray extends Doubles {
|
||||
// nocommit move up?
|
||||
static class DoublesFromArray extends Doubles {
|
||||
private final double[] values;
|
||||
|
||||
public DoublesFromArray(double[] values) {
|
||||
|
|
|
@ -49,7 +49,7 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
* to this class.
|
||||
*/
|
||||
// nocommit don't suppress any:
|
||||
@SuppressCodecs({"Direct", "Memory", "Lucene41", "MockRandom"})
|
||||
@SuppressCodecs({"Direct", "Memory", "Lucene41", "MockRandom", "Lucene40", "Compressing"})
|
||||
public class TestDemoDocValue extends LuceneTestCase {
|
||||
|
||||
public void testDemoNumber() throws IOException {
|
||||
|
|
|
@ -57,12 +57,12 @@ public class DoubleFieldSource extends FieldCacheSource {
|
|||
|
||||
@Override
|
||||
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final double[] arr = cache.getDoubles(readerContext.reader(), field, parser, true);
|
||||
final FieldCache.Doubles arr = cache.getDoubles(readerContext.reader(), field, parser, true);
|
||||
final Bits valid = cache.getDocsWithField(readerContext.reader(), field);
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return arr[doc];
|
||||
return arr.get(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -131,7 +131,6 @@ public class DoubleFieldSource extends FieldCacheSource {
|
|||
@Override
|
||||
public ValueFiller getValueFiller() {
|
||||
return new ValueFiller() {
|
||||
private final double[] doubleArr = arr;
|
||||
private final MutableValueDouble mval = new MutableValueDouble();
|
||||
|
||||
@Override
|
||||
|
@ -141,7 +140,7 @@ public class DoubleFieldSource extends FieldCacheSource {
|
|||
|
||||
@Override
|
||||
public void fillValue(int doc) {
|
||||
mval.value = doubleArr[doc];
|
||||
mval.value = arr.get(doc);
|
||||
mval.exists = valid.get(doc);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -64,10 +64,10 @@ public class BBoxSimilarityValueSource extends ValueSource {
|
|||
@Override
|
||||
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
AtomicReader reader = readerContext.reader();
|
||||
final double[] minX = FieldCache.DEFAULT.getDoubles(reader, strategy.field_minX, true);
|
||||
final double[] minY = FieldCache.DEFAULT.getDoubles(reader, strategy.field_minY, true);
|
||||
final double[] maxX = FieldCache.DEFAULT.getDoubles(reader, strategy.field_maxX, true);
|
||||
final double[] maxY = FieldCache.DEFAULT.getDoubles(reader, strategy.field_maxY, true);
|
||||
final FieldCache.Doubles minX = FieldCache.DEFAULT.getDoubles(reader, strategy.field_minX, true);
|
||||
final FieldCache.Doubles minY = FieldCache.DEFAULT.getDoubles(reader, strategy.field_minY, true);
|
||||
final FieldCache.Doubles maxX = FieldCache.DEFAULT.getDoubles(reader, strategy.field_maxX, true);
|
||||
final FieldCache.Doubles maxY = FieldCache.DEFAULT.getDoubles(reader, strategy.field_maxY, true);
|
||||
|
||||
final Bits validMinX = FieldCache.DEFAULT.getDocsWithField(reader, strategy.field_minX);
|
||||
final Bits validMaxX = FieldCache.DEFAULT.getDocsWithField(reader, strategy.field_maxX);
|
||||
|
@ -81,8 +81,8 @@ public class BBoxSimilarityValueSource extends ValueSource {
|
|||
// make sure it has minX and area
|
||||
if (validMinX.get(doc) && validMaxX.get(doc)) {
|
||||
rect.reset(
|
||||
minX[doc], maxX[doc],
|
||||
minY[doc], maxY[doc]);
|
||||
minX.get(doc), maxX.get(doc),
|
||||
minY.get(doc), maxY.get(doc));
|
||||
return (float) similarity.score(rect, null);
|
||||
} else {
|
||||
return (float) similarity.score(null, null);
|
||||
|
@ -93,8 +93,8 @@ public class BBoxSimilarityValueSource extends ValueSource {
|
|||
// make sure it has minX and area
|
||||
if (validMinX.get(doc) && validMaxX.get(doc)) {
|
||||
rect.reset(
|
||||
minX[doc], maxX[doc],
|
||||
minY[doc], maxY[doc]);
|
||||
minX.get(doc), maxX.get(doc),
|
||||
minY.get(doc), maxY.get(doc));
|
||||
Explanation exp = new Explanation();
|
||||
similarity.score(rect, exp);
|
||||
return exp;
|
||||
|
|
|
@ -63,8 +63,8 @@ public class DistanceValueSource extends ValueSource {
|
|||
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
AtomicReader reader = readerContext.reader();
|
||||
|
||||
final double[] ptX = FieldCache.DEFAULT.getDoubles(reader, strategy.getFieldNameX(), true);
|
||||
final double[] ptY = FieldCache.DEFAULT.getDoubles(reader, strategy.getFieldNameY(), true);
|
||||
final FieldCache.Doubles ptX = FieldCache.DEFAULT.getDoubles(reader, strategy.getFieldNameX(), true);
|
||||
final FieldCache.Doubles ptY = FieldCache.DEFAULT.getDoubles(reader, strategy.getFieldNameY(), true);
|
||||
final Bits validX = FieldCache.DEFAULT.getDocsWithField(reader, strategy.getFieldNameX());
|
||||
final Bits validY = FieldCache.DEFAULT.getDocsWithField(reader, strategy.getFieldNameY());
|
||||
|
||||
|
@ -84,7 +84,7 @@ public class DistanceValueSource extends ValueSource {
|
|||
// make sure it has minX and area
|
||||
if (validX.get(doc)) {
|
||||
assert validY.get(doc);
|
||||
return calculator.distance(from, ptX[doc], ptY[doc]);
|
||||
return calculator.distance(from, ptX.get(doc), ptY.get(doc));
|
||||
}
|
||||
return nullValue;
|
||||
}
|
||||
|
|
|
@ -22,21 +22,24 @@ import java.util.Arrays;
|
|||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.index.AtomicReader;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.index.StoredFieldVisitor;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.AtomicReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.MultiReader;
|
||||
import org.apache.lucene.index.IndexReaderContext;
|
||||
import org.apache.lucene.index.MultiReader;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.index.StoredFieldVisitor;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
import org.apache.lucene.util.OpenBitSetIterator;
|
||||
|
||||
|
@ -395,6 +398,21 @@ public class TestDocSet extends LuceneTestCase {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NumericDocValues getNumericDocValues(String field, boolean direct) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BinaryDocValues getBinaryDocValues(String field, boolean direct) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortedDocValues getSortedDocValues(String field, boolean direct) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doClose() {
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue