mirror of https://github.com/apache/lucene.git
pull from DV under FC.getXXX if possible
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4547@1410878 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6735e95b1f
commit
f9bfb920c6
|
@ -414,7 +414,7 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getDirectNumeric(FieldInfo fieldInfo) throws IOException {
|
public NumericDocValues getNumeric(FieldInfo fieldInfo) throws IOException {
|
||||||
final OneField field = fields.get(fieldInfo.name);
|
final OneField field = fields.get(fieldInfo.name);
|
||||||
|
|
||||||
// SegmentCoreReaders already verifies this field is
|
// SegmentCoreReaders already verifies this field is
|
||||||
|
@ -454,7 +454,7 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BinaryDocValues getDirectBinary(FieldInfo fieldInfo) throws IOException {
|
public BinaryDocValues getBinary(FieldInfo fieldInfo) throws IOException {
|
||||||
final OneField field = fields.get(fieldInfo.name);
|
final OneField field = fields.get(fieldInfo.name);
|
||||||
|
|
||||||
// SegmentCoreReaders already verifies this field is
|
// SegmentCoreReaders already verifies this field is
|
||||||
|
@ -497,7 +497,7 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedDocValues getDirectSorted(FieldInfo fieldInfo) throws IOException {
|
public SortedDocValues getSorted(FieldInfo fieldInfo) throws IOException {
|
||||||
final OneField field = fields.get(fieldInfo.name);
|
final OneField field = fields.get(fieldInfo.name);
|
||||||
|
|
||||||
// SegmentCoreReaders already verifies this field is
|
// SegmentCoreReaders already verifies this field is
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.codecs;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReader;
|
import org.apache.lucene.index.AtomicReader;
|
||||||
|
import org.apache.lucene.index.BinaryDocValues;
|
||||||
import org.apache.lucene.index.DocValues.Source;
|
import org.apache.lucene.index.DocValues.Source;
|
||||||
import org.apache.lucene.index.FieldInfos;
|
import org.apache.lucene.index.FieldInfos;
|
||||||
import org.apache.lucene.index.MergeState;
|
import org.apache.lucene.index.MergeState;
|
||||||
|
@ -36,10 +37,14 @@ public abstract class BinaryDocValuesConsumer {
|
||||||
for (AtomicReader reader : mergeState.readers) {
|
for (AtomicReader reader : mergeState.readers) {
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
final Bits liveDocs = reader.getLiveDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
final Source source = reader.docValues(mergeState.fieldInfo.name).getDirectSource();
|
|
||||||
|
// nocommit what if this is null...? need default source?
|
||||||
|
final BinaryDocValues source = reader.getBinaryDocValues(mergeState.fieldInfo.name);
|
||||||
|
|
||||||
for (int i = 0; i < maxDoc; i++) {
|
for (int i = 0; i < maxDoc; i++) {
|
||||||
if (liveDocs == null || liveDocs.get(i)) {
|
if (liveDocs == null || liveDocs.get(i)) {
|
||||||
add(source.getBytes(i, bytes));
|
source.get(i, bytes);
|
||||||
|
add(bytes);
|
||||||
}
|
}
|
||||||
docCount++;
|
docCount++;
|
||||||
mergeState.checkAbort.work(300);
|
mergeState.checkAbort.work(300);
|
||||||
|
|
|
@ -20,10 +20,11 @@ package org.apache.lucene.codecs;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReader;
|
import org.apache.lucene.index.AtomicReader;
|
||||||
|
import org.apache.lucene.index.DocValues.Source;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.FieldInfos;
|
import org.apache.lucene.index.FieldInfos;
|
||||||
import org.apache.lucene.index.MergeState;
|
import org.apache.lucene.index.MergeState;
|
||||||
import org.apache.lucene.index.DocValues.Source;
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
|
|
||||||
public abstract class NumericDocValuesConsumer {
|
public abstract class NumericDocValuesConsumer {
|
||||||
|
@ -35,10 +36,11 @@ public abstract class NumericDocValuesConsumer {
|
||||||
for (AtomicReader reader : mergeState.readers) {
|
for (AtomicReader reader : mergeState.readers) {
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
final Bits liveDocs = reader.getLiveDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
final Source source = reader.docValues(mergeState.fieldInfo.name).getDirectSource();
|
// nocommit what if this is null...? need default source?
|
||||||
|
final NumericDocValues source = reader.getNumericDocValues(mergeState.fieldInfo.name);
|
||||||
for (int i = 0; i < maxDoc; i++) {
|
for (int i = 0; i < maxDoc; i++) {
|
||||||
if (liveDocs == null || liveDocs.get(i)) {
|
if (liveDocs == null || liveDocs.get(i)) {
|
||||||
add(source.getInt(i));
|
add(source.get(i));
|
||||||
}
|
}
|
||||||
docCount++;
|
docCount++;
|
||||||
mergeState.checkAbort.work(300);
|
mergeState.checkAbort.work(300);
|
||||||
|
|
|
@ -35,86 +35,9 @@ public abstract class SimpleDVProducer implements Closeable {
|
||||||
this.maxDoc = maxDoc;
|
this.maxDoc = maxDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract NumericDocValues getDirectNumeric(FieldInfo field) throws IOException;
|
public abstract NumericDocValues getNumeric(FieldInfo field) throws IOException;
|
||||||
|
|
||||||
/** Loads all values into RAM. */
|
public abstract BinaryDocValues getBinary(FieldInfo field) throws IOException;
|
||||||
public NumericDocValues getNumeric(FieldInfo field) throws IOException {
|
|
||||||
NumericDocValues source = getDirectNumeric(field);
|
|
||||||
// nocommit more ram efficient?
|
|
||||||
final long[] values = new long[maxDoc];
|
|
||||||
for(int docID=0;docID<maxDoc;docID++) {
|
|
||||||
values[docID] = source.get(docID);
|
|
||||||
}
|
|
||||||
return new NumericDocValues() {
|
|
||||||
@Override
|
|
||||||
public long get(int docID) {
|
|
||||||
return values[docID];
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
public abstract BinaryDocValues getDirectBinary(FieldInfo field) throws IOException;
|
public abstract SortedDocValues getSorted(FieldInfo field) throws IOException;
|
||||||
|
|
||||||
/** Loads all values into RAM. */
|
|
||||||
public BinaryDocValues getBinary(FieldInfo field) throws IOException {
|
|
||||||
|
|
||||||
BinaryDocValues source = getDirectBinary(field);
|
|
||||||
|
|
||||||
// nocommit more ram efficient
|
|
||||||
final byte[][] values = new byte[maxDoc][];
|
|
||||||
BytesRef scratch = new BytesRef();
|
|
||||||
for(int docID=0;docID<maxDoc;docID++) {
|
|
||||||
source.get(docID, scratch);
|
|
||||||
values[docID] = new byte[scratch.length];
|
|
||||||
System.arraycopy(scratch.bytes, scratch.offset, values[docID], 0, scratch.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
return new BinaryDocValues() {
|
|
||||||
@Override
|
|
||||||
public void get(int docID, BytesRef result) {
|
|
||||||
result.bytes = values[docID];
|
|
||||||
result.offset = 0;
|
|
||||||
result.length = result.bytes.length;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
public abstract SortedDocValues getDirectSorted(FieldInfo field) throws IOException;
|
|
||||||
|
|
||||||
/** Loads all values into RAM. */
|
|
||||||
public SortedDocValues getSorted(FieldInfo field) throws IOException {
|
|
||||||
SortedDocValues source = getDirectSorted(field);
|
|
||||||
final int valueCount = source.getValueCount();
|
|
||||||
final byte[][] values = new byte[valueCount][];
|
|
||||||
BytesRef scratch = new BytesRef();
|
|
||||||
for(int ord=0;ord<valueCount;ord++) {
|
|
||||||
source.lookupOrd(ord, scratch);
|
|
||||||
values[ord] = new byte[scratch.length];
|
|
||||||
System.arraycopy(scratch.bytes, scratch.offset, values[ord], 0, scratch.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
final int[] ords = new int[maxDoc];
|
|
||||||
for(int docID=0;docID<maxDoc;docID++) {
|
|
||||||
ords[docID] = source.getOrd(docID);
|
|
||||||
}
|
|
||||||
|
|
||||||
return new SortedDocValues() {
|
|
||||||
@Override
|
|
||||||
public int getOrd(int docID) {
|
|
||||||
return ords[docID];
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void lookupOrd(int ord, BytesRef result) {
|
|
||||||
result.bytes = values[ord];
|
|
||||||
result.offset = 0;
|
|
||||||
result.length = result.bytes.length;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getValueCount() {
|
|
||||||
return valueCount;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.AtomicReader;
|
||||||
import org.apache.lucene.index.DocValues.SortedSource;
|
import org.apache.lucene.index.DocValues.SortedSource;
|
||||||
import org.apache.lucene.index.DocValues;
|
import org.apache.lucene.index.DocValues;
|
||||||
import org.apache.lucene.index.MergeState;
|
import org.apache.lucene.index.MergeState;
|
||||||
|
import org.apache.lucene.index.SortedDocValues;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
|
@ -57,7 +58,7 @@ public abstract class SortedDocValuesConsumer {
|
||||||
AtomicReader reader;
|
AtomicReader reader;
|
||||||
FixedBitSet liveTerms;
|
FixedBitSet liveTerms;
|
||||||
int ord = -1;
|
int ord = -1;
|
||||||
SortedSource source;
|
SortedDocValues values;
|
||||||
BytesRef scratch = new BytesRef();
|
BytesRef scratch = new BytesRef();
|
||||||
|
|
||||||
// nocommit can we factor out the compressed fields
|
// nocommit can we factor out the compressed fields
|
||||||
|
@ -67,10 +68,10 @@ public abstract class SortedDocValuesConsumer {
|
||||||
int[] segOrdToMergedOrd;
|
int[] segOrdToMergedOrd;
|
||||||
|
|
||||||
public BytesRef nextTerm() {
|
public BytesRef nextTerm() {
|
||||||
while (ord < source.getValueCount()-1) {
|
while (ord < values.getValueCount()-1) {
|
||||||
ord++;
|
ord++;
|
||||||
if (liveTerms == null || liveTerms.get(ord)) {
|
if (liveTerms == null || liveTerms.get(ord)) {
|
||||||
source.getByOrd(ord, scratch);
|
values.lookupOrd(ord, scratch);
|
||||||
return scratch;
|
return scratch;
|
||||||
} else {
|
} else {
|
||||||
// Skip "deleted" terms (ie, terms that were not
|
// Skip "deleted" terms (ie, terms that were not
|
||||||
|
@ -98,26 +99,20 @@ public abstract class SortedDocValuesConsumer {
|
||||||
|
|
||||||
// First pass: mark "live" terms
|
// First pass: mark "live" terms
|
||||||
for (AtomicReader reader : mergeState.readers) {
|
for (AtomicReader reader : mergeState.readers) {
|
||||||
DocValues docvalues = reader.docValues(mergeState.fieldInfo.name);
|
// nocommit what if this is null...? need default source?
|
||||||
final SortedSource source;
|
|
||||||
int maxDoc = reader.maxDoc();
|
int maxDoc = reader.maxDoc();
|
||||||
if (docvalues == null) {
|
|
||||||
source = DocValues.getDefaultSortedSource(mergeState.fieldInfo.getDocValuesType(), maxDoc);
|
|
||||||
} else {
|
|
||||||
source = (SortedSource) docvalues.getDirectSource();
|
|
||||||
}
|
|
||||||
|
|
||||||
SegmentState state = new SegmentState();
|
SegmentState state = new SegmentState();
|
||||||
state.reader = reader;
|
state.reader = reader;
|
||||||
state.source = source;
|
state.values = reader.getSortedDocValues(mergeState.fieldInfo.name);
|
||||||
segStates.add(state);
|
segStates.add(state);
|
||||||
assert source.getValueCount() < Integer.MAX_VALUE;
|
assert state.values.getValueCount() < Integer.MAX_VALUE;
|
||||||
if (reader.hasDeletions()) {
|
if (reader.hasDeletions()) {
|
||||||
state.liveTerms = new FixedBitSet(source.getValueCount());
|
state.liveTerms = new FixedBitSet(state.values.getValueCount());
|
||||||
Bits liveDocs = reader.getLiveDocs();
|
Bits liveDocs = reader.getLiveDocs();
|
||||||
for(int docID=0;docID<maxDoc;docID++) {
|
for(int docID=0;docID<maxDoc;docID++) {
|
||||||
if (liveDocs.get(docID)) {
|
if (liveDocs.get(docID)) {
|
||||||
state.liveTerms.set(source.ord(docID));
|
state.liveTerms.set(state.values.getOrd(docID));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -135,7 +130,7 @@ public abstract class SortedDocValuesConsumer {
|
||||||
// nocommit we could defer this to 3rd pass (and
|
// nocommit we could defer this to 3rd pass (and
|
||||||
// reduce transient RAM spike) but then
|
// reduce transient RAM spike) but then
|
||||||
// we'd spend more effort computing the mapping...:
|
// we'd spend more effort computing the mapping...:
|
||||||
segState.segOrdToMergedOrd = new int[segState.source.getValueCount()];
|
segState.segOrdToMergedOrd = new int[segState.values.getValueCount()];
|
||||||
q.add(segState);
|
q.add(segState);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -184,7 +179,7 @@ public abstract class SortedDocValuesConsumer {
|
||||||
int maxDoc = segState.reader.maxDoc();
|
int maxDoc = segState.reader.maxDoc();
|
||||||
for(int docID=0;docID<maxDoc;docID++) {
|
for(int docID=0;docID<maxDoc;docID++) {
|
||||||
if (liveDocs == null || liveDocs.get(docID)) {
|
if (liveDocs == null || liveDocs.get(docID)) {
|
||||||
int segOrd = segState.source.ord(docID);
|
int segOrd = segState.values.getOrd(docID);
|
||||||
int mergedOrd = segState.segOrdToMergedOrd[segOrd];
|
int mergedOrd = segState.segOrdToMergedOrd[segOrd];
|
||||||
consumer.addDoc(mergedOrd);
|
consumer.addDoc(mergedOrd);
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,7 +42,9 @@ public class DoubleDocValuesField extends StoredField {
|
||||||
*/
|
*/
|
||||||
public static final FieldType TYPE = new FieldType();
|
public static final FieldType TYPE = new FieldType();
|
||||||
static {
|
static {
|
||||||
TYPE.setDocValueType(DocValues.Type.FLOAT_64);
|
// nocommit kinda messy ... if user calls .numericValue
|
||||||
|
// they get back strange int ... hmmm
|
||||||
|
TYPE.setDocValueType(DocValues.Type.FIXED_INTS_64);
|
||||||
TYPE.freeze();
|
TYPE.freeze();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,6 +56,8 @@ public class DoubleDocValuesField extends StoredField {
|
||||||
*/
|
*/
|
||||||
public DoubleDocValuesField(String name, double value) {
|
public DoubleDocValuesField(String name, double value) {
|
||||||
super(name, TYPE);
|
super(name, TYPE);
|
||||||
fieldsData = Double.valueOf(value);
|
// nocommit kinda messy ... if user calls .numericValue
|
||||||
|
// they get back strange int ... hmmm
|
||||||
|
fieldsData = Double.doubleToRawLongBits(value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,9 @@ public class FloatDocValuesField extends StoredField {
|
||||||
*/
|
*/
|
||||||
public static final FieldType TYPE = new FieldType();
|
public static final FieldType TYPE = new FieldType();
|
||||||
static {
|
static {
|
||||||
TYPE.setDocValueType(DocValues.Type.FLOAT_32);
|
// nocommit kinda messy ... if user calls .numericValue
|
||||||
|
// they get back strange int ... hmmm
|
||||||
|
TYPE.setDocValueType(DocValues.Type.FIXED_INTS_32);
|
||||||
TYPE.freeze();
|
TYPE.freeze();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,6 +55,8 @@ public class FloatDocValuesField extends StoredField {
|
||||||
*/
|
*/
|
||||||
public FloatDocValuesField(String name, float value) {
|
public FloatDocValuesField(String name, float value) {
|
||||||
super(name, TYPE);
|
super(name, TYPE);
|
||||||
fieldsData = Float.valueOf(value);
|
// nocommit kinda messy ... if user calls .numericValue
|
||||||
|
// they get back strange int ... hmmm
|
||||||
|
fieldsData = Float.floatToRawIntBits(value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -164,13 +164,13 @@ public abstract class AtomicReader extends IndexReader {
|
||||||
public abstract DocValues docValues(String field) throws IOException;
|
public abstract DocValues docValues(String field) throws IOException;
|
||||||
|
|
||||||
// nocommit javadocs
|
// nocommit javadocs
|
||||||
public abstract NumericDocValues getNumericDocValues(String field, boolean direct) throws IOException;
|
public abstract NumericDocValues getNumericDocValues(String field) throws IOException;
|
||||||
|
|
||||||
// nocommit javadocs
|
// nocommit javadocs
|
||||||
public abstract BinaryDocValues getBinaryDocValues(String field, boolean direct) throws IOException;
|
public abstract BinaryDocValues getBinaryDocValues(String field) throws IOException;
|
||||||
|
|
||||||
// nocommit javadocs
|
// nocommit javadocs
|
||||||
public abstract SortedDocValues getSortedDocValues(String field, boolean direct) throws IOException;
|
public abstract SortedDocValues getSortedDocValues(String field) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns {@link DocValues} for this field's normalization values.
|
* Returns {@link DocValues} for this field's normalization values.
|
||||||
|
|
|
@ -412,21 +412,21 @@ public class FilterAtomicReader extends AtomicReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNumericDocValues(String field, boolean direct) throws IOException {
|
public NumericDocValues getNumericDocValues(String field) throws IOException {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
return in.getNumericDocValues(field, direct);
|
return in.getNumericDocValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BinaryDocValues getBinaryDocValues(String field, boolean direct) throws IOException {
|
public BinaryDocValues getBinaryDocValues(String field) throws IOException {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
return in.getBinaryDocValues(field, direct);
|
return in.getBinaryDocValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedDocValues getSortedDocValues(String field, boolean direct) throws IOException {
|
public SortedDocValues getSortedDocValues(String field) throws IOException {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
return in.getSortedDocValues(field, direct);
|
return in.getSortedDocValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -271,24 +271,24 @@ public final class ParallelAtomicReader extends AtomicReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNumericDocValues(String field, boolean direct) throws IOException {
|
public NumericDocValues getNumericDocValues(String field) throws IOException {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
AtomicReader reader = fieldToReader.get(field);
|
AtomicReader reader = fieldToReader.get(field);
|
||||||
return reader == null ? null : reader.getNumericDocValues(field, direct);
|
return reader == null ? null : reader.getNumericDocValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BinaryDocValues getBinaryDocValues(String field, boolean direct) throws IOException {
|
public BinaryDocValues getBinaryDocValues(String field) throws IOException {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
AtomicReader reader = fieldToReader.get(field);
|
AtomicReader reader = fieldToReader.get(field);
|
||||||
return reader == null ? null : reader.getBinaryDocValues(field, direct);
|
return reader == null ? null : reader.getBinaryDocValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedDocValues getSortedDocValues(String field, boolean direct) throws IOException {
|
public SortedDocValues getSortedDocValues(String field) throws IOException {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
AtomicReader reader = fieldToReader.get(field);
|
AtomicReader reader = fieldToReader.get(field);
|
||||||
return reader == null ? null : reader.getSortedDocValues(field, direct);
|
return reader == null ? null : reader.getSortedDocValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -66,8 +66,6 @@ final class SegmentCoreReaders {
|
||||||
final TermVectorsReader termVectorsReaderOrig;
|
final TermVectorsReader termVectorsReaderOrig;
|
||||||
final CompoundFileDirectory cfsReader;
|
final CompoundFileDirectory cfsReader;
|
||||||
|
|
||||||
private final Map<FieldInfo,Object> docValuesCache = new HashMap<FieldInfo,Object>();
|
|
||||||
|
|
||||||
final CloseableThreadLocal<StoredFieldsReader> fieldsReaderLocal = new CloseableThreadLocal<StoredFieldsReader>() {
|
final CloseableThreadLocal<StoredFieldsReader> fieldsReaderLocal = new CloseableThreadLocal<StoredFieldsReader>() {
|
||||||
@Override
|
@Override
|
||||||
protected StoredFieldsReader initialValue() {
|
protected StoredFieldsReader initialValue() {
|
||||||
|
@ -155,72 +153,60 @@ final class SegmentCoreReaders {
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit shrink the sync'd part to a cache miss
|
// nocommit shrink the sync'd part to a cache miss
|
||||||
synchronized NumericDocValues getNumericDocValues(String field, boolean direct) throws IOException {
|
synchronized NumericDocValues getNumericDocValues(String field) throws IOException {
|
||||||
FieldInfo fi = fieldInfos.fieldInfo(field);
|
FieldInfo fi = fieldInfos.fieldInfo(field);
|
||||||
if (fi == null) {
|
if (fi == null) {
|
||||||
|
// Field does not exist
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (fi.getDocValuesType() == null) {
|
||||||
|
// Field was not indexed with doc values
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (!DocValues.isNumber(fi.getDocValuesType())) {
|
if (!DocValues.isNumber(fi.getDocValuesType())) {
|
||||||
throw new IllegalArgumentException("field \"" + field + "\" was not indexed as a numeric doc values field");
|
// DocValues were not numeric
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (direct) {
|
return simpleDVProducer.getNumeric(fi);
|
||||||
return simpleDVProducer.getDirectNumeric(fi);
|
|
||||||
} else {
|
|
||||||
if (!docValuesCache.containsKey(fi)) {
|
|
||||||
NumericDocValues dv = simpleDVProducer.getNumeric(fi);
|
|
||||||
if (dv != null) {
|
|
||||||
docValuesCache.put(fi, dv);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return (NumericDocValues) docValuesCache.get(fi);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit shrink the sync'd part to a cache miss
|
// nocommit shrink the sync'd part to a cache miss
|
||||||
synchronized BinaryDocValues getBinaryDocValues(String field, boolean direct) throws IOException {
|
synchronized BinaryDocValues getBinaryDocValues(String field) throws IOException {
|
||||||
FieldInfo fi = fieldInfos.fieldInfo(field);
|
FieldInfo fi = fieldInfos.fieldInfo(field);
|
||||||
if (fi == null) {
|
if (fi == null) {
|
||||||
|
// Field does not exist
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (fi.getDocValuesType() == null) {
|
||||||
|
// Field was not indexed with doc values
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (!DocValues.isBytes(fi.getDocValuesType())) {
|
if (!DocValues.isBytes(fi.getDocValuesType())) {
|
||||||
throw new IllegalArgumentException("field \"" + field + "\" was not indexed as a binary doc values field");
|
// DocValues were not binary
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (direct) {
|
return simpleDVProducer.getBinary(fi);
|
||||||
return simpleDVProducer.getDirectBinary(fi);
|
|
||||||
} else {
|
|
||||||
if (!docValuesCache.containsKey(fi)) {
|
|
||||||
BinaryDocValues dv = simpleDVProducer.getBinary(fi);
|
|
||||||
if (dv != null) {
|
|
||||||
docValuesCache.put(fi, dv);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return (BinaryDocValues) docValuesCache.get(fi);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit shrink the sync'd part to a cache miss
|
// nocommit shrink the sync'd part to a cache miss
|
||||||
synchronized SortedDocValues getSortedDocValues(String field, boolean direct) throws IOException {
|
synchronized SortedDocValues getSortedDocValues(String field) throws IOException {
|
||||||
FieldInfo fi = fieldInfos.fieldInfo(field);
|
FieldInfo fi = fieldInfos.fieldInfo(field);
|
||||||
if (fi == null) {
|
if (fi == null) {
|
||||||
|
// Field does not exist
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (fi.getDocValuesType() == null) {
|
||||||
|
// Field was not indexed with doc values
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (!DocValues.isSortedBytes(fi.getDocValuesType())) {
|
if (!DocValues.isSortedBytes(fi.getDocValuesType())) {
|
||||||
throw new IllegalArgumentException("field \"" + field + "\" was not indexed as a sorted doc values field");
|
// DocValues were not sorted
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (direct) {
|
return simpleDVProducer.getSorted(fi);
|
||||||
return simpleDVProducer.getDirectSorted(fi);
|
|
||||||
} else {
|
|
||||||
if (!docValuesCache.containsKey(fi)) {
|
|
||||||
SortedDocValues dv = simpleDVProducer.getSorted(fi);
|
|
||||||
if (dv != null) {
|
|
||||||
docValuesCache.put(fi, dv);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return (SortedDocValues) docValuesCache.get(fi);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit binary, sorted too
|
// nocommit binary, sorted too
|
||||||
|
|
|
@ -226,18 +226,18 @@ public final class SegmentReader extends AtomicReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNumericDocValues(String field, boolean direct) throws IOException {
|
public NumericDocValues getNumericDocValues(String field) throws IOException {
|
||||||
return core.getNumericDocValues(field, direct);
|
return core.getNumericDocValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BinaryDocValues getBinaryDocValues(String field, boolean direct) throws IOException {
|
public BinaryDocValues getBinaryDocValues(String field) throws IOException {
|
||||||
return core.getBinaryDocValues(field, direct);
|
return core.getBinaryDocValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedDocValues getSortedDocValues(String field, boolean direct) throws IOException {
|
public SortedDocValues getSortedDocValues(String field) throws IOException {
|
||||||
return core.getSortedDocValues(field, direct);
|
return core.getSortedDocValues(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -89,19 +89,19 @@ public final class SlowCompositeReaderWrapper extends AtomicReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNumericDocValues(String field, boolean direct) throws IOException {
|
public NumericDocValues getNumericDocValues(String field) throws IOException {
|
||||||
// nocommit todo
|
// nocommit todo
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BinaryDocValues getBinaryDocValues(String field, boolean direct) throws IOException {
|
public BinaryDocValues getBinaryDocValues(String field) throws IOException {
|
||||||
// nocommit todo
|
// nocommit todo
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedDocValues getSortedDocValues(String field, boolean direct) throws IOException {
|
public SortedDocValues getSortedDocValues(String field) throws IOException {
|
||||||
// nocommit todo
|
// nocommit todo
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,13 +26,16 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.WeakHashMap;
|
import java.util.WeakHashMap;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.AtomicReader;
|
||||||
|
import org.apache.lucene.index.BinaryDocValues;
|
||||||
import org.apache.lucene.index.DocTermOrds;
|
import org.apache.lucene.index.DocTermOrds;
|
||||||
import org.apache.lucene.index.DocsAndPositionsEnum;
|
import org.apache.lucene.index.DocsAndPositionsEnum;
|
||||||
import org.apache.lucene.index.DocsEnum;
|
import org.apache.lucene.index.DocsEnum;
|
||||||
import org.apache.lucene.index.AtomicReader;
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
import org.apache.lucene.index.OrdTermState;
|
import org.apache.lucene.index.OrdTermState;
|
||||||
import org.apache.lucene.index.SegmentReader;
|
import org.apache.lucene.index.SegmentReader;
|
||||||
|
import org.apache.lucene.index.SortedDocValues;
|
||||||
import org.apache.lucene.index.TermState;
|
import org.apache.lucene.index.TermState;
|
||||||
import org.apache.lucene.index.Terms;
|
import org.apache.lucene.index.Terms;
|
||||||
import org.apache.lucene.index.TermsEnum;
|
import org.apache.lucene.index.TermsEnum;
|
||||||
|
@ -93,12 +96,12 @@ class FieldCacheImpl implements FieldCache {
|
||||||
final Cache cache = cacheEntry.getValue();
|
final Cache cache = cacheEntry.getValue();
|
||||||
final Class<?> cacheType = cacheEntry.getKey();
|
final Class<?> cacheType = cacheEntry.getKey();
|
||||||
synchronized(cache.readerCache) {
|
synchronized(cache.readerCache) {
|
||||||
for (final Map.Entry<Object,Map<Entry, Object>> readerCacheEntry : cache.readerCache.entrySet()) {
|
for (final Map.Entry<Object,Map<CacheKey, Object>> readerCacheEntry : cache.readerCache.entrySet()) {
|
||||||
final Object readerKey = readerCacheEntry.getKey();
|
final Object readerKey = readerCacheEntry.getKey();
|
||||||
if (readerKey == null) continue;
|
if (readerKey == null) continue;
|
||||||
final Map<Entry, Object> innerCache = readerCacheEntry.getValue();
|
final Map<CacheKey, Object> innerCache = readerCacheEntry.getValue();
|
||||||
for (final Map.Entry<Entry, Object> mapEntry : innerCache.entrySet()) {
|
for (final Map.Entry<CacheKey, Object> mapEntry : innerCache.entrySet()) {
|
||||||
Entry entry = mapEntry.getKey();
|
CacheKey entry = mapEntry.getKey();
|
||||||
result.add(new CacheEntry(readerKey, entry.field,
|
result.add(new CacheEntry(readerKey, entry.field,
|
||||||
cacheType, entry.custom,
|
cacheType, entry.custom,
|
||||||
mapEntry.getValue()));
|
mapEntry.getValue()));
|
||||||
|
@ -152,9 +155,9 @@ class FieldCacheImpl implements FieldCache {
|
||||||
|
|
||||||
final FieldCacheImpl wrapper;
|
final FieldCacheImpl wrapper;
|
||||||
|
|
||||||
final Map<Object,Map<Entry,Object>> readerCache = new WeakHashMap<Object,Map<Entry,Object>>();
|
final Map<Object,Map<CacheKey,Object>> readerCache = new WeakHashMap<Object,Map<CacheKey,Object>>();
|
||||||
|
|
||||||
protected abstract Object createValue(AtomicReader reader, Entry key, boolean setDocsWithField)
|
protected abstract Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
|
|
||||||
/** Remove this reader from the cache, if present. */
|
/** Remove this reader from the cache, if present. */
|
||||||
|
@ -167,13 +170,13 @@ class FieldCacheImpl implements FieldCache {
|
||||||
|
|
||||||
/** Sets the key to the value for the provided reader;
|
/** Sets the key to the value for the provided reader;
|
||||||
* if the key is already set then this doesn't change it. */
|
* if the key is already set then this doesn't change it. */
|
||||||
public void put(AtomicReader reader, Entry key, Object value) {
|
public void put(AtomicReader reader, CacheKey key, Object value) {
|
||||||
final Object readerKey = reader.getCoreCacheKey();
|
final Object readerKey = reader.getCoreCacheKey();
|
||||||
synchronized (readerCache) {
|
synchronized (readerCache) {
|
||||||
Map<Entry,Object> innerCache = readerCache.get(readerKey);
|
Map<CacheKey,Object> innerCache = readerCache.get(readerKey);
|
||||||
if (innerCache == null) {
|
if (innerCache == null) {
|
||||||
// First time this reader is using FieldCache
|
// First time this reader is using FieldCache
|
||||||
innerCache = new HashMap<Entry,Object>();
|
innerCache = new HashMap<CacheKey,Object>();
|
||||||
readerCache.put(readerKey, innerCache);
|
readerCache.put(readerKey, innerCache);
|
||||||
wrapper.initReader(reader);
|
wrapper.initReader(reader);
|
||||||
}
|
}
|
||||||
|
@ -186,15 +189,15 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public Object get(AtomicReader reader, Entry key, boolean setDocsWithField) throws IOException {
|
public Object get(AtomicReader reader, CacheKey key, boolean setDocsWithField) throws IOException {
|
||||||
Map<Entry,Object> innerCache;
|
Map<CacheKey,Object> innerCache;
|
||||||
Object value;
|
Object value;
|
||||||
final Object readerKey = reader.getCoreCacheKey();
|
final Object readerKey = reader.getCoreCacheKey();
|
||||||
synchronized (readerCache) {
|
synchronized (readerCache) {
|
||||||
innerCache = readerCache.get(readerKey);
|
innerCache = readerCache.get(readerKey);
|
||||||
if (innerCache == null) {
|
if (innerCache == null) {
|
||||||
// First time this reader is using FieldCache
|
// First time this reader is using FieldCache
|
||||||
innerCache = new HashMap<Entry,Object>();
|
innerCache = new HashMap<CacheKey,Object>();
|
||||||
readerCache.put(readerKey, innerCache);
|
readerCache.put(readerKey, innerCache);
|
||||||
wrapper.initReader(reader);
|
wrapper.initReader(reader);
|
||||||
value = null;
|
value = null;
|
||||||
|
@ -250,12 +253,12 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Expert: Every composite-key in the internal cache is of this type. */
|
/** Expert: Every composite-key in the internal cache is of this type. */
|
||||||
static class Entry {
|
static class CacheKey {
|
||||||
final String field; // which Field
|
final String field; // which Field
|
||||||
final Object custom; // which custom comparator or parser
|
final Object custom; // which custom comparator or parser
|
||||||
|
|
||||||
/** Creates one of these objects for a custom comparator/parser. */
|
/** Creates one of these objects for a custom comparator/parser. */
|
||||||
Entry (String field, Object custom) {
|
CacheKey(String field, Object custom) {
|
||||||
this.field = field;
|
this.field = field;
|
||||||
this.custom = custom;
|
this.custom = custom;
|
||||||
}
|
}
|
||||||
|
@ -263,8 +266,8 @@ class FieldCacheImpl implements FieldCache {
|
||||||
/** Two of these are equal iff they reference the same field and type. */
|
/** Two of these are equal iff they reference the same field and type. */
|
||||||
@Override
|
@Override
|
||||||
public boolean equals (Object o) {
|
public boolean equals (Object o) {
|
||||||
if (o instanceof Entry) {
|
if (o instanceof CacheKey) {
|
||||||
Entry other = (Entry) o;
|
CacheKey other = (CacheKey) o;
|
||||||
if (other.field.equals(field)) {
|
if (other.field.equals(field)) {
|
||||||
if (other.custom == null) {
|
if (other.custom == null) {
|
||||||
if (custom == null) return true;
|
if (custom == null) return true;
|
||||||
|
@ -283,17 +286,6 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// inherit javadocs
|
|
||||||
public Bytes getBytes (AtomicReader reader, String field, boolean setDocsWithField) throws IOException {
|
|
||||||
return getBytes(reader, field, null, setDocsWithField);
|
|
||||||
}
|
|
||||||
|
|
||||||
// inherit javadocs
|
|
||||||
public Bytes getBytes(AtomicReader reader, String field, ByteParser parser, boolean setDocsWithField)
|
|
||||||
throws IOException {
|
|
||||||
return (Bytes) caches.get(Byte.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static abstract class Uninvert {
|
private static abstract class Uninvert {
|
||||||
|
|
||||||
public Bits docsWithField;
|
public Bits docsWithField;
|
||||||
|
@ -352,6 +344,38 @@ class FieldCacheImpl implements FieldCache {
|
||||||
protected abstract void visitDoc(int docID);
|
protected abstract void visitDoc(int docID);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// null Bits means no docs matched
|
||||||
|
void setDocsWithField(AtomicReader reader, String field, Bits docsWithField) {
|
||||||
|
final int maxDoc = reader.maxDoc();
|
||||||
|
final Bits bits;
|
||||||
|
if (docsWithField == null) {
|
||||||
|
bits = new Bits.MatchNoBits(maxDoc);
|
||||||
|
} else if (docsWithField instanceof FixedBitSet) {
|
||||||
|
final int numSet = ((FixedBitSet) docsWithField).cardinality();
|
||||||
|
if (numSet >= maxDoc) {
|
||||||
|
// The cardinality of the BitSet is maxDoc if all documents have a value.
|
||||||
|
assert numSet == maxDoc;
|
||||||
|
bits = new Bits.MatchAllBits(maxDoc);
|
||||||
|
} else {
|
||||||
|
bits = docsWithField;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
bits = docsWithField;
|
||||||
|
}
|
||||||
|
caches.get(DocsWithFieldCache.class).put(reader, new CacheKey(field, null), bits);
|
||||||
|
}
|
||||||
|
|
||||||
|
// inherit javadocs
|
||||||
|
public Bytes getBytes (AtomicReader reader, String field, boolean setDocsWithField) throws IOException {
|
||||||
|
return getBytes(reader, field, null, setDocsWithField);
|
||||||
|
}
|
||||||
|
|
||||||
|
// inherit javadocs
|
||||||
|
public Bytes getBytes(AtomicReader reader, String field, ByteParser parser, boolean setDocsWithField)
|
||||||
|
throws IOException {
|
||||||
|
return (Bytes) caches.get(Byte.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField);
|
||||||
|
}
|
||||||
|
|
||||||
// nocommit move up?
|
// nocommit move up?
|
||||||
static class BytesFromArray extends Bytes {
|
static class BytesFromArray extends Bytes {
|
||||||
private final byte[] values;
|
private final byte[] values;
|
||||||
|
@ -372,26 +396,38 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object createValue(AtomicReader reader, Entry entryKey, boolean setDocsWithField)
|
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
ByteParser parser = (ByteParser) entryKey.custom;
|
int maxDoc = reader.maxDoc();
|
||||||
|
final byte[] values;
|
||||||
|
|
||||||
|
NumericDocValues valuesIn = reader.getNumericDocValues(key.field);
|
||||||
|
if (valuesIn != null) {
|
||||||
|
// nocommit should we throw exc if parser isn't
|
||||||
|
// null? if setDocsWithField is true?
|
||||||
|
values = new byte[maxDoc];
|
||||||
|
for(int docID=0;docID<maxDoc;docID++) {
|
||||||
|
values[docID] = (byte) valuesIn.get(docID);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
|
||||||
|
final ByteParser parser = (ByteParser) key.custom;
|
||||||
if (parser == null) {
|
if (parser == null) {
|
||||||
// Confusing: must delegate to wrapper (vs simply
|
// Confusing: must delegate to wrapper (vs simply
|
||||||
// setting parser = DEFAULT_SHORT_PARSER) so cache
|
// setting parser = DEFAULT_SHORT_PARSER) so cache
|
||||||
// key includes DEFAULT_SHORT_PARSER:
|
// key includes DEFAULT_SHORT_PARSER:
|
||||||
return wrapper.getBytes(reader, entryKey.field, DEFAULT_BYTE_PARSER, setDocsWithField);
|
return wrapper.getBytes(reader, key.field, DEFAULT_BYTE_PARSER, setDocsWithField);
|
||||||
}
|
}
|
||||||
|
|
||||||
final ByteParser finalParser = parser;
|
values = new byte[maxDoc];
|
||||||
|
|
||||||
final byte[] values = new byte[reader.maxDoc()];
|
|
||||||
Uninvert u = new Uninvert() {
|
Uninvert u = new Uninvert() {
|
||||||
private byte currentValue;
|
private byte currentValue;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visitTerm(BytesRef term) {
|
public void visitTerm(BytesRef term) {
|
||||||
currentValue = finalParser.parseByte(term);
|
currentValue = parser.parseByte(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -400,10 +436,11 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
u.uninvert(reader, entryKey.field, setDocsWithField);
|
u.uninvert(reader, key.field, setDocsWithField);
|
||||||
|
|
||||||
if (setDocsWithField) {
|
if (setDocsWithField) {
|
||||||
wrapper.setDocsWithField(reader, entryKey.field, u.docsWithField);
|
wrapper.setDocsWithField(reader, key.field, u.docsWithField);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new BytesFromArray(values);
|
return new BytesFromArray(values);
|
||||||
|
@ -418,7 +455,7 @@ class FieldCacheImpl implements FieldCache {
|
||||||
// inherit javadocs
|
// inherit javadocs
|
||||||
public Shorts getShorts(AtomicReader reader, String field, ShortParser parser, boolean setDocsWithField)
|
public Shorts getShorts(AtomicReader reader, String field, ShortParser parser, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return (Shorts) caches.get(Short.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
return (Shorts) caches.get(Short.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField);
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit move up?
|
// nocommit move up?
|
||||||
|
@ -441,26 +478,36 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object createValue(AtomicReader reader, Entry entryKey, boolean setDocsWithField)
|
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
ShortParser parser = (ShortParser) entryKey.custom;
|
int maxDoc = reader.maxDoc();
|
||||||
|
final short[] values;
|
||||||
|
|
||||||
|
NumericDocValues valuesIn = reader.getNumericDocValues(key.field);
|
||||||
|
if (valuesIn != null) {
|
||||||
|
// nocommit should we throw exc if parser isn't
|
||||||
|
// null? if setDocsWithField is true?
|
||||||
|
values = new short[maxDoc];
|
||||||
|
for(int docID=0;docID<maxDoc;docID++) {
|
||||||
|
values[docID] = (short) valuesIn.get(docID);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
final ShortParser parser = (ShortParser) key.custom;
|
||||||
if (parser == null) {
|
if (parser == null) {
|
||||||
// Confusing: must delegate to wrapper (vs simply
|
// Confusing: must delegate to wrapper (vs simply
|
||||||
// setting parser = DEFAULT_SHORT_PARSER) so cache
|
// setting parser = DEFAULT_SHORT_PARSER) so cache
|
||||||
// key includes DEFAULT_SHORT_PARSER:
|
// key includes DEFAULT_SHORT_PARSER:
|
||||||
return wrapper.getShorts(reader, entryKey.field, DEFAULT_SHORT_PARSER, setDocsWithField);
|
return wrapper.getShorts(reader, key.field, DEFAULT_SHORT_PARSER, setDocsWithField);
|
||||||
}
|
}
|
||||||
|
|
||||||
final ShortParser finalParser = parser;
|
values = new short[maxDoc];
|
||||||
|
|
||||||
final short[] values = new short[reader.maxDoc()];
|
|
||||||
Uninvert u = new Uninvert() {
|
Uninvert u = new Uninvert() {
|
||||||
private short currentValue;
|
private short currentValue;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visitTerm(BytesRef term) {
|
public void visitTerm(BytesRef term) {
|
||||||
currentValue = finalParser.parseShort(term);
|
currentValue = parser.parseShort(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -469,37 +516,16 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
u.uninvert(reader, entryKey.field, setDocsWithField);
|
u.uninvert(reader, key.field, setDocsWithField);
|
||||||
|
|
||||||
if (setDocsWithField) {
|
if (setDocsWithField) {
|
||||||
wrapper.setDocsWithField(reader, entryKey.field, u.docsWithField);
|
wrapper.setDocsWithField(reader, key.field, u.docsWithField);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new ShortsFromArray(values);
|
return new ShortsFromArray(values);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// null Bits means no docs matched
|
|
||||||
void setDocsWithField(AtomicReader reader, String field, Bits docsWithField) {
|
|
||||||
final int maxDoc = reader.maxDoc();
|
|
||||||
final Bits bits;
|
|
||||||
if (docsWithField == null) {
|
|
||||||
bits = new Bits.MatchNoBits(maxDoc);
|
|
||||||
} else if (docsWithField instanceof FixedBitSet) {
|
|
||||||
final int numSet = ((FixedBitSet) docsWithField).cardinality();
|
|
||||||
if (numSet >= maxDoc) {
|
|
||||||
// The cardinality of the BitSet is maxDoc if all documents have a value.
|
|
||||||
assert numSet == maxDoc;
|
|
||||||
bits = new Bits.MatchAllBits(maxDoc);
|
|
||||||
} else {
|
|
||||||
bits = docsWithField;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
bits = docsWithField;
|
|
||||||
}
|
|
||||||
caches.get(DocsWithFieldCache.class).put(reader, new Entry(field, null), bits);
|
|
||||||
}
|
|
||||||
|
|
||||||
// inherit javadocs
|
// inherit javadocs
|
||||||
public Ints getInts (AtomicReader reader, String field, boolean setDocsWithField) throws IOException {
|
public Ints getInts (AtomicReader reader, String field, boolean setDocsWithField) throws IOException {
|
||||||
return getInts(reader, field, null, setDocsWithField);
|
return getInts(reader, field, null, setDocsWithField);
|
||||||
|
@ -508,7 +534,7 @@ class FieldCacheImpl implements FieldCache {
|
||||||
// inherit javadocs
|
// inherit javadocs
|
||||||
public Ints getInts(AtomicReader reader, String field, IntParser parser, boolean setDocsWithField)
|
public Ints getInts(AtomicReader reader, String field, IntParser parser, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return (Ints) caches.get(Integer.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
return (Ints) caches.get(Integer.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField);
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit move up?
|
// nocommit move up?
|
||||||
|
@ -531,9 +557,22 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object createValue(final AtomicReader reader, Entry entryKey, boolean setDocsWithField)
|
protected Object createValue(final AtomicReader reader, CacheKey key, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
IntParser parser = (IntParser) entryKey.custom;
|
|
||||||
|
int maxDoc = reader.maxDoc();
|
||||||
|
final int[] values;
|
||||||
|
NumericDocValues valuesIn = reader.getNumericDocValues(key.field);
|
||||||
|
if (valuesIn != null) {
|
||||||
|
// nocommit should we throw exc if parser isn't
|
||||||
|
// null? if setDocsWithField is true?
|
||||||
|
values = new int[maxDoc];
|
||||||
|
for(int docID=0;docID<maxDoc;docID++) {
|
||||||
|
values[docID] = (int) valuesIn.get(docID);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
|
||||||
|
final IntParser parser = (IntParser) key.custom;
|
||||||
if (parser == null) {
|
if (parser == null) {
|
||||||
// Confusing: must delegate to wrapper (vs simply
|
// Confusing: must delegate to wrapper (vs simply
|
||||||
// setting parser =
|
// setting parser =
|
||||||
|
@ -541,23 +580,22 @@ class FieldCacheImpl implements FieldCache {
|
||||||
// cache key includes
|
// cache key includes
|
||||||
// DEFAULT_INT_PARSER/NUMERIC_UTILS_INT_PARSER:
|
// DEFAULT_INT_PARSER/NUMERIC_UTILS_INT_PARSER:
|
||||||
try {
|
try {
|
||||||
return wrapper.getInts(reader, entryKey.field, DEFAULT_INT_PARSER, setDocsWithField);
|
return wrapper.getInts(reader, key.field, DEFAULT_INT_PARSER, setDocsWithField);
|
||||||
} catch (NumberFormatException ne) {
|
} catch (NumberFormatException ne) {
|
||||||
return wrapper.getInts(reader, entryKey.field, NUMERIC_UTILS_INT_PARSER, setDocsWithField);
|
return wrapper.getInts(reader, key.field, NUMERIC_UTILS_INT_PARSER, setDocsWithField);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final IntParser finalParser = parser;
|
|
||||||
// nocommit how to avoid double alloc in numeric field
|
// nocommit how to avoid double alloc in numeric field
|
||||||
// case ...
|
// case ...
|
||||||
final int[] values = new int[reader.maxDoc()];
|
values = new int[reader.maxDoc()];
|
||||||
|
|
||||||
Uninvert u = new Uninvert() {
|
Uninvert u = new Uninvert() {
|
||||||
private int currentValue;
|
private int currentValue;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visitTerm(BytesRef term) {
|
public void visitTerm(BytesRef term) {
|
||||||
currentValue = finalParser.parseInt(term);
|
currentValue = parser.parseInt(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -566,31 +604,33 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
u.uninvert(reader, entryKey.field, setDocsWithField);
|
u.uninvert(reader, key.field, setDocsWithField);
|
||||||
|
|
||||||
if (setDocsWithField) {
|
if (setDocsWithField) {
|
||||||
wrapper.setDocsWithField(reader, entryKey.field, u.docsWithField);
|
wrapper.setDocsWithField(reader, key.field, u.docsWithField);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new IntsFromArray(values);
|
return new IntsFromArray(values);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// nocommit must advertise that this does NOT work if you
|
||||||
|
// index only doc values for the field ... it will say no
|
||||||
|
// doc exists...
|
||||||
public Bits getDocsWithField(AtomicReader reader, String field)
|
public Bits getDocsWithField(AtomicReader reader, String field)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return (Bits) caches.get(DocsWithFieldCache.class).get(reader, new Entry(field, null), false);
|
return (Bits) caches.get(DocsWithFieldCache.class).get(reader, new CacheKey(field, null), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit move up?
|
|
||||||
static final class DocsWithFieldCache extends Cache {
|
static final class DocsWithFieldCache extends Cache {
|
||||||
DocsWithFieldCache(FieldCacheImpl wrapper) {
|
DocsWithFieldCache(FieldCacheImpl wrapper) {
|
||||||
super(wrapper);
|
super(wrapper);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object createValue(AtomicReader reader, Entry entryKey, boolean setDocsWithField /* ignored */)
|
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField /* ignored */)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
final String field = entryKey.field;
|
final String field = key.field;
|
||||||
FixedBitSet res = null;
|
FixedBitSet res = null;
|
||||||
Terms terms = reader.terms(field);
|
Terms terms = reader.terms(field);
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
|
@ -646,7 +686,7 @@ class FieldCacheImpl implements FieldCache {
|
||||||
// inherit javadocs
|
// inherit javadocs
|
||||||
public Floats getFloats(AtomicReader reader, String field, FloatParser parser, boolean setDocsWithField)
|
public Floats getFloats(AtomicReader reader, String field, FloatParser parser, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return (Floats) caches.get(Float.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
return (Floats) caches.get(Float.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField);
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit move up?
|
// nocommit move up?
|
||||||
|
@ -669,9 +709,25 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object createValue(AtomicReader reader, Entry entryKey, boolean setDocsWithField)
|
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
FloatParser parser = (FloatParser) entryKey.custom;
|
|
||||||
|
int maxDoc = reader.maxDoc();
|
||||||
|
final float[] values;
|
||||||
|
|
||||||
|
NumericDocValues valuesIn = reader.getNumericDocValues(key.field);
|
||||||
|
if (valuesIn != null) {
|
||||||
|
// nocommit should we throw exc if parser isn't
|
||||||
|
// null? if setDocsWithField is true?
|
||||||
|
values = new float[maxDoc];
|
||||||
|
for(int docID=0;docID<maxDoc;docID++) {
|
||||||
|
// nocommit somewhat dangerous ... eg if user had
|
||||||
|
// indexed as DV.BYTE ...
|
||||||
|
values[docID] = Float.intBitsToFloat((int) valuesIn.get(docID));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
|
||||||
|
final FloatParser parser = (FloatParser) key.custom;
|
||||||
if (parser == null) {
|
if (parser == null) {
|
||||||
// Confusing: must delegate to wrapper (vs simply
|
// Confusing: must delegate to wrapper (vs simply
|
||||||
// setting parser =
|
// setting parser =
|
||||||
|
@ -679,23 +735,22 @@ class FieldCacheImpl implements FieldCache {
|
||||||
// cache key includes
|
// cache key includes
|
||||||
// DEFAULT_FLOAT_PARSER/NUMERIC_UTILS_FLOAT_PARSER:
|
// DEFAULT_FLOAT_PARSER/NUMERIC_UTILS_FLOAT_PARSER:
|
||||||
try {
|
try {
|
||||||
return wrapper.getFloats(reader, entryKey.field, DEFAULT_FLOAT_PARSER, setDocsWithField);
|
return wrapper.getFloats(reader, key.field, DEFAULT_FLOAT_PARSER, setDocsWithField);
|
||||||
} catch (NumberFormatException ne) {
|
} catch (NumberFormatException ne) {
|
||||||
return wrapper.getFloats(reader, entryKey.field, NUMERIC_UTILS_FLOAT_PARSER, setDocsWithField);
|
return wrapper.getFloats(reader, key.field, NUMERIC_UTILS_FLOAT_PARSER, setDocsWithField);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final FloatParser finalParser = parser;
|
|
||||||
// nocommit how to avoid double alloc in numeric field
|
// nocommit how to avoid double alloc in numeric field
|
||||||
// case ...
|
// case ...
|
||||||
final float[] values = new float[reader.maxDoc()];
|
values = new float[reader.maxDoc()];
|
||||||
|
|
||||||
Uninvert u = new Uninvert() {
|
Uninvert u = new Uninvert() {
|
||||||
private float currentValue;
|
private float currentValue;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visitTerm(BytesRef term) {
|
public void visitTerm(BytesRef term) {
|
||||||
currentValue = finalParser.parseFloat(term);
|
currentValue = parser.parseFloat(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -704,10 +759,11 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
u.uninvert(reader, entryKey.field, setDocsWithField);
|
u.uninvert(reader, key.field, setDocsWithField);
|
||||||
|
|
||||||
if (setDocsWithField) {
|
if (setDocsWithField) {
|
||||||
wrapper.setDocsWithField(reader, entryKey.field, u.docsWithField);
|
wrapper.setDocsWithField(reader, key.field, u.docsWithField);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new FloatsFromArray(values);
|
return new FloatsFromArray(values);
|
||||||
|
@ -722,7 +778,7 @@ class FieldCacheImpl implements FieldCache {
|
||||||
// inherit javadocs
|
// inherit javadocs
|
||||||
public Longs getLongs(AtomicReader reader, String field, FieldCache.LongParser parser, boolean setDocsWithField)
|
public Longs getLongs(AtomicReader reader, String field, FieldCache.LongParser parser, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return (Longs) caches.get(Long.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
return (Longs) caches.get(Long.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField);
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit move up?
|
// nocommit move up?
|
||||||
|
@ -745,9 +801,21 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object createValue(AtomicReader reader, Entry entryKey, boolean setDocsWithField)
|
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
LongParser parser = (LongParser) entryKey.custom;
|
|
||||||
|
int maxDoc = reader.maxDoc();
|
||||||
|
final long[] values;
|
||||||
|
NumericDocValues valuesIn = reader.getNumericDocValues(key.field);
|
||||||
|
if (valuesIn != null) {
|
||||||
|
// nocommit should we throw exc if parser isn't
|
||||||
|
// null? if setDocsWithField is true?
|
||||||
|
values = new long[maxDoc];
|
||||||
|
for(int docID=0;docID<maxDoc;docID++) {
|
||||||
|
values[docID] = valuesIn.get(docID);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
final LongParser parser = (LongParser) key.custom;
|
||||||
if (parser == null) {
|
if (parser == null) {
|
||||||
// Confusing: must delegate to wrapper (vs simply
|
// Confusing: must delegate to wrapper (vs simply
|
||||||
// setting parser =
|
// setting parser =
|
||||||
|
@ -755,23 +823,22 @@ class FieldCacheImpl implements FieldCache {
|
||||||
// cache key includes
|
// cache key includes
|
||||||
// DEFAULT_LONG_PARSER/NUMERIC_UTILS_LONG_PARSER:
|
// DEFAULT_LONG_PARSER/NUMERIC_UTILS_LONG_PARSER:
|
||||||
try {
|
try {
|
||||||
return wrapper.getLongs(reader, entryKey.field, DEFAULT_LONG_PARSER, setDocsWithField);
|
return wrapper.getLongs(reader, key.field, DEFAULT_LONG_PARSER, setDocsWithField);
|
||||||
} catch (NumberFormatException ne) {
|
} catch (NumberFormatException ne) {
|
||||||
return wrapper.getLongs(reader, entryKey.field, NUMERIC_UTILS_LONG_PARSER, setDocsWithField);
|
return wrapper.getLongs(reader, key.field, NUMERIC_UTILS_LONG_PARSER, setDocsWithField);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final LongParser finalParser = parser;
|
|
||||||
// nocommit how to avoid double alloc in numeric field
|
// nocommit how to avoid double alloc in numeric field
|
||||||
// case ...
|
// case ...
|
||||||
final long[] values = new long[reader.maxDoc()];
|
values = new long[reader.maxDoc()];
|
||||||
|
|
||||||
Uninvert u = new Uninvert() {
|
Uninvert u = new Uninvert() {
|
||||||
private long currentValue;
|
private long currentValue;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visitTerm(BytesRef term) {
|
public void visitTerm(BytesRef term) {
|
||||||
currentValue = finalParser.parseLong(term);
|
currentValue = parser.parseLong(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -780,12 +847,12 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
u.uninvert(reader, entryKey.field, setDocsWithField);
|
u.uninvert(reader, key.field, setDocsWithField);
|
||||||
|
|
||||||
if (setDocsWithField) {
|
if (setDocsWithField) {
|
||||||
wrapper.setDocsWithField(reader, entryKey.field, u.docsWithField);
|
wrapper.setDocsWithField(reader, key.field, u.docsWithField);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new LongsFromArray(values);
|
return new LongsFromArray(values);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -799,7 +866,7 @@ class FieldCacheImpl implements FieldCache {
|
||||||
// inherit javadocs
|
// inherit javadocs
|
||||||
public Doubles getDoubles(AtomicReader reader, String field, FieldCache.DoubleParser parser, boolean setDocsWithField)
|
public Doubles getDoubles(AtomicReader reader, String field, FieldCache.DoubleParser parser, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return (Doubles) caches.get(Double.TYPE).get(reader, new Entry(field, parser), setDocsWithField);
|
return (Doubles) caches.get(Double.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField);
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit move up?
|
// nocommit move up?
|
||||||
|
@ -822,9 +889,23 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object createValue(AtomicReader reader, Entry entryKey, boolean setDocsWithField)
|
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
DoubleParser parser = (DoubleParser) entryKey.custom;
|
int maxDoc = reader.maxDoc();
|
||||||
|
final double[] values;
|
||||||
|
|
||||||
|
NumericDocValues valuesIn = reader.getNumericDocValues(key.field);
|
||||||
|
if (valuesIn != null) {
|
||||||
|
// nocommit should we throw exc if parser isn't
|
||||||
|
// null? if setDocsWithField is true?
|
||||||
|
values = new double[maxDoc];
|
||||||
|
for(int docID=0;docID<maxDoc;docID++) {
|
||||||
|
// nocommit somewhat dangerous ... eg if user had
|
||||||
|
// indexed as DV.BYTE ...
|
||||||
|
values[docID] = Double.longBitsToDouble(valuesIn.get(docID));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
final DoubleParser parser = (DoubleParser) key.custom;
|
||||||
if (parser == null) {
|
if (parser == null) {
|
||||||
// Confusing: must delegate to wrapper (vs simply
|
// Confusing: must delegate to wrapper (vs simply
|
||||||
// setting parser =
|
// setting parser =
|
||||||
|
@ -832,23 +913,22 @@ class FieldCacheImpl implements FieldCache {
|
||||||
// cache key includes
|
// cache key includes
|
||||||
// DEFAULT_DOUBLE_PARSER/NUMERIC_UTILS_DOUBLE_PARSER:
|
// DEFAULT_DOUBLE_PARSER/NUMERIC_UTILS_DOUBLE_PARSER:
|
||||||
try {
|
try {
|
||||||
return wrapper.getDoubles(reader, entryKey.field, DEFAULT_DOUBLE_PARSER, setDocsWithField);
|
return wrapper.getDoubles(reader, key.field, DEFAULT_DOUBLE_PARSER, setDocsWithField);
|
||||||
} catch (NumberFormatException ne) {
|
} catch (NumberFormatException ne) {
|
||||||
return wrapper.getDoubles(reader, entryKey.field, NUMERIC_UTILS_DOUBLE_PARSER, setDocsWithField);
|
return wrapper.getDoubles(reader, key.field, NUMERIC_UTILS_DOUBLE_PARSER, setDocsWithField);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final DoubleParser finalParser = parser;
|
|
||||||
// nocommit how to avoid double alloc in numeric field
|
// nocommit how to avoid double alloc in numeric field
|
||||||
// case ...
|
// case ...
|
||||||
final double[] values = new double[reader.maxDoc()];
|
values = new double[reader.maxDoc()];
|
||||||
|
|
||||||
Uninvert u = new Uninvert() {
|
Uninvert u = new Uninvert() {
|
||||||
private double currentValue;
|
private double currentValue;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visitTerm(BytesRef term) {
|
public void visitTerm(BytesRef term) {
|
||||||
currentValue = finalParser.parseDouble(term);
|
currentValue = parser.parseDouble(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -857,12 +937,12 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
u.uninvert(reader, entryKey.field, setDocsWithField);
|
u.uninvert(reader, key.field, setDocsWithField);
|
||||||
|
|
||||||
if (setDocsWithField) {
|
if (setDocsWithField) {
|
||||||
wrapper.setDocsWithField(reader, entryKey.field, u.docsWithField);
|
wrapper.setDocsWithField(reader, key.field, u.docsWithField);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new DoublesFromArray(values);
|
return new DoublesFromArray(values);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1046,7 +1126,7 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocTermsIndex getTermsIndex(AtomicReader reader, String field, float acceptableOverheadRatio) throws IOException {
|
public DocTermsIndex getTermsIndex(AtomicReader reader, String field, float acceptableOverheadRatio) throws IOException {
|
||||||
return (DocTermsIndex) caches.get(DocTermsIndex.class).get(reader, new Entry(field, acceptableOverheadRatio), false);
|
return (DocTermsIndex) caches.get(DocTermsIndex.class).get(reader, new CacheKey(field, acceptableOverheadRatio), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static class DocTermsIndexCache extends Cache {
|
static class DocTermsIndexCache extends Cache {
|
||||||
|
@ -1055,12 +1135,69 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object createValue(AtomicReader reader, Entry entryKey, boolean setDocsWithField /* ignored */)
|
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField /* ignored */)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
Terms terms = reader.terms(entryKey.field);
|
final int maxDoc = reader.maxDoc();
|
||||||
|
SortedDocValues valuesIn = reader.getSortedDocValues(key.field);
|
||||||
|
if (valuesIn != null) {
|
||||||
|
// nocommit used packed ints like below!
|
||||||
|
final byte[][] values = new byte[valuesIn.getValueCount()][];
|
||||||
|
BytesRef scratch = new BytesRef();
|
||||||
|
for(int ord=0;ord<values.length;ord++) {
|
||||||
|
valuesIn.lookupOrd(ord, scratch);
|
||||||
|
values[ord] = new byte[scratch.length];
|
||||||
|
System.arraycopy(scratch.bytes, scratch.offset, values[ord], 0, scratch.length);
|
||||||
|
}
|
||||||
|
|
||||||
final float acceptableOverheadRatio = ((Float) entryKey.custom).floatValue();
|
final int[] docToOrd = new int[maxDoc];
|
||||||
|
for(int docID=0;docID<maxDoc;docID++) {
|
||||||
|
docToOrd[docID] = valuesIn.getOrd(docID);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new DocTermsIndex() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public PackedInts.Reader getDocToOrd() {
|
||||||
|
// nocommit
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int numOrd() {
|
||||||
|
return values.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getOrd(int docID) {
|
||||||
|
return docToOrd[docID];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int size() {
|
||||||
|
return docToOrd.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef lookup(int ord, BytesRef ret) {
|
||||||
|
ret.bytes = values[ord];
|
||||||
|
ret.length = ret.bytes.length;
|
||||||
|
ret.offset = 0;
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TermsEnum getTermsEnum() {
|
||||||
|
// nocommit
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
Terms terms = reader.terms(key.field);
|
||||||
|
|
||||||
|
final float acceptableOverheadRatio = ((Float) key.custom).floatValue();
|
||||||
|
|
||||||
final PagedBytes bytes = new PagedBytes(15);
|
final PagedBytes bytes = new PagedBytes(15);
|
||||||
|
|
||||||
|
@ -1068,7 +1205,6 @@ class FieldCacheImpl implements FieldCache {
|
||||||
int startTermsBPV;
|
int startTermsBPV;
|
||||||
int startNumUniqueTerms;
|
int startNumUniqueTerms;
|
||||||
|
|
||||||
int maxDoc = reader.maxDoc();
|
|
||||||
final int termCountHardLimit;
|
final int termCountHardLimit;
|
||||||
if (maxDoc == Integer.MAX_VALUE) {
|
if (maxDoc == Integer.MAX_VALUE) {
|
||||||
termCountHardLimit = Integer.MAX_VALUE;
|
termCountHardLimit = Integer.MAX_VALUE;
|
||||||
|
@ -1151,6 +1287,7 @@ class FieldCacheImpl implements FieldCache {
|
||||||
return new DocTermsIndexImpl(bytes.freeze(true), termOrdToBytesOffset.getMutable(), docToTermOrd.getMutable(), termOrd);
|
return new DocTermsIndexImpl(bytes.freeze(true), termOrdToBytesOffset.getMutable(), docToTermOrd.getMutable(), termOrd);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static class DocTermsImpl extends DocTerms {
|
private static class DocTermsImpl extends DocTerms {
|
||||||
private final PagedBytes.Reader bytes;
|
private final PagedBytes.Reader bytes;
|
||||||
|
@ -1185,7 +1322,7 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocTerms getTerms(AtomicReader reader, String field, float acceptableOverheadRatio) throws IOException {
|
public DocTerms getTerms(AtomicReader reader, String field, float acceptableOverheadRatio) throws IOException {
|
||||||
return (DocTerms) caches.get(DocTerms.class).get(reader, new Entry(field, acceptableOverheadRatio), false);
|
return (DocTerms) caches.get(DocTerms.class).get(reader, new CacheKey(field, acceptableOverheadRatio), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static final class DocTermsCache extends Cache {
|
static final class DocTermsCache extends Cache {
|
||||||
|
@ -1194,14 +1331,48 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object createValue(AtomicReader reader, Entry entryKey, boolean setDocsWithField /* ignored */)
|
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField /* ignored */)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
Terms terms = reader.terms(entryKey.field);
|
final int maxDoc = reader.maxDoc();
|
||||||
|
BinaryDocValues valuesIn = reader.getBinaryDocValues(key.field);
|
||||||
|
if (valuesIn != null) {
|
||||||
|
// nocommit used packed ints like below!
|
||||||
|
final byte[][] values = new byte[maxDoc][];
|
||||||
|
BytesRef scratch = new BytesRef();
|
||||||
|
for(int docID=0;docID<maxDoc;docID++) {
|
||||||
|
valuesIn.get(docID, scratch);
|
||||||
|
values[docID] = new byte[scratch.length];
|
||||||
|
System.arraycopy(scratch.bytes, scratch.offset, values[docID], 0, scratch.length);
|
||||||
|
}
|
||||||
|
|
||||||
final float acceptableOverheadRatio = ((Float) entryKey.custom).floatValue();
|
return new DocTerms() {
|
||||||
|
@Override
|
||||||
|
public int size() {
|
||||||
|
return maxDoc;
|
||||||
|
}
|
||||||
|
|
||||||
final int termCountHardLimit = reader.maxDoc();
|
@Override
|
||||||
|
public boolean exists(int docID) {
|
||||||
|
// nocommit lying ...?
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef getTerm(int docID, BytesRef ret) {
|
||||||
|
ret.bytes = values[docID];
|
||||||
|
ret.length = ret.bytes.length;
|
||||||
|
ret.offset = 0;
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
|
||||||
|
Terms terms = reader.terms(key.field);
|
||||||
|
|
||||||
|
final float acceptableOverheadRatio = ((Float) key.custom).floatValue();
|
||||||
|
|
||||||
|
final int termCountHardLimit = maxDoc;
|
||||||
|
|
||||||
// Holds the actual term data, expanded.
|
// Holds the actual term data, expanded.
|
||||||
final PagedBytes bytes = new PagedBytes(15);
|
final PagedBytes bytes = new PagedBytes(15);
|
||||||
|
@ -1225,7 +1396,7 @@ class FieldCacheImpl implements FieldCache {
|
||||||
startBPV = 1;
|
startBPV = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
final GrowableWriter docToOffset = new GrowableWriter(startBPV, reader.maxDoc(), acceptableOverheadRatio);
|
final GrowableWriter docToOffset = new GrowableWriter(startBPV, maxDoc, acceptableOverheadRatio);
|
||||||
|
|
||||||
// pointer==0 means not set
|
// pointer==0 means not set
|
||||||
bytes.copyUsingLengthPrefix(new BytesRef());
|
bytes.copyUsingLengthPrefix(new BytesRef());
|
||||||
|
@ -1262,9 +1433,10 @@ class FieldCacheImpl implements FieldCache {
|
||||||
return new DocTermsImpl(bytes.freeze(true), docToOffset.getMutable());
|
return new DocTermsImpl(bytes.freeze(true), docToOffset.getMutable());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public DocTermOrds getDocTermOrds(AtomicReader reader, String field) throws IOException {
|
public DocTermOrds getDocTermOrds(AtomicReader reader, String field) throws IOException {
|
||||||
return (DocTermOrds) caches.get(DocTermOrds.class).get(reader, new Entry(field, null), false);
|
return (DocTermOrds) caches.get(DocTermOrds.class).get(reader, new CacheKey(field, null), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static final class DocTermOrdsCache extends Cache {
|
static final class DocTermOrdsCache extends Cache {
|
||||||
|
@ -1273,9 +1445,10 @@ class FieldCacheImpl implements FieldCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object createValue(AtomicReader reader, Entry entryKey, boolean setDocsWithField /* ignored */)
|
protected Object createValue(AtomicReader reader, CacheKey key, boolean setDocsWithField /* ignored */)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return new DocTermOrds(reader, entryKey.field);
|
// No DocValues impl yet (DocValues are single valued...):
|
||||||
|
return new DocTermOrds(reader, key.field);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -87,6 +87,7 @@ import org.apache.lucene.util.packed.PackedInts;
|
||||||
* @lucene.experimental
|
* @lucene.experimental
|
||||||
*/
|
*/
|
||||||
public abstract class FieldComparator<T> {
|
public abstract class FieldComparator<T> {
|
||||||
|
// nocommit remove the doc values comparators
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compare hit at slot1 with hit at slot2.
|
* Compare hit at slot1 with hit at slot2.
|
||||||
|
|
|
@ -49,7 +49,7 @@ import org.apache.lucene.util.LuceneTestCase;
|
||||||
* to this class.
|
* to this class.
|
||||||
*/
|
*/
|
||||||
// nocommit don't suppress any:
|
// nocommit don't suppress any:
|
||||||
@SuppressCodecs({"Direct", "Memory", "Lucene41", "MockRandom", "Lucene40", "Compressing"})
|
@SuppressCodecs({"Asserting", "Direct", "Memory", "Lucene41", "MockRandom", "Lucene40", "Compressing"})
|
||||||
public class TestDemoDocValue extends LuceneTestCase {
|
public class TestDemoDocValue extends LuceneTestCase {
|
||||||
|
|
||||||
public void testDemoNumber() throws IOException {
|
public void testDemoNumber() throws IOException {
|
||||||
|
@ -82,7 +82,7 @@ public class TestDemoDocValue extends LuceneTestCase {
|
||||||
StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
|
StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
|
||||||
assertEquals(text, hitDoc.get("fieldname"));
|
assertEquals(text, hitDoc.get("fieldname"));
|
||||||
assert ireader.leaves().size() == 1;
|
assert ireader.leaves().size() == 1;
|
||||||
NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv", random().nextBoolean());
|
NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv");
|
||||||
assertEquals(5, dv.get(hits.scoreDocs[i].doc));
|
assertEquals(5, dv.get(hits.scoreDocs[i].doc));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,7 +113,7 @@ public class TestDemoDocValue extends LuceneTestCase {
|
||||||
// Now search the index:
|
// Now search the index:
|
||||||
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
||||||
assert ireader.leaves().size() == 1;
|
assert ireader.leaves().size() == 1;
|
||||||
NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv", random().nextBoolean());
|
NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv");
|
||||||
assertEquals(1, dv.get(0));
|
assertEquals(1, dv.get(0));
|
||||||
assertEquals(2, dv.get(1));
|
assertEquals(2, dv.get(1));
|
||||||
|
|
||||||
|
@ -147,7 +147,7 @@ public class TestDemoDocValue extends LuceneTestCase {
|
||||||
// Now search the index:
|
// Now search the index:
|
||||||
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
||||||
assert ireader.leaves().size() == 1;
|
assert ireader.leaves().size() == 1;
|
||||||
NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv", random().nextBoolean());
|
NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv");
|
||||||
for(int i=0;i<2;i++) {
|
for(int i=0;i<2;i++) {
|
||||||
StoredDocument doc2 = ireader.leaves().get(0).reader().document(i);
|
StoredDocument doc2 = ireader.leaves().get(0).reader().document(i);
|
||||||
long expected;
|
long expected;
|
||||||
|
@ -186,7 +186,7 @@ public class TestDemoDocValue extends LuceneTestCase {
|
||||||
// Now search the index:
|
// Now search the index:
|
||||||
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
||||||
assert ireader.leaves().size() == 1;
|
assert ireader.leaves().size() == 1;
|
||||||
NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv", random().nextBoolean());
|
NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv");
|
||||||
assertEquals(Long.MIN_VALUE, dv.get(0));
|
assertEquals(Long.MIN_VALUE, dv.get(0));
|
||||||
assertEquals(Long.MAX_VALUE, dv.get(1));
|
assertEquals(Long.MAX_VALUE, dv.get(1));
|
||||||
|
|
||||||
|
@ -225,7 +225,7 @@ public class TestDemoDocValue extends LuceneTestCase {
|
||||||
StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
|
StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
|
||||||
assertEquals(text, hitDoc.get("fieldname"));
|
assertEquals(text, hitDoc.get("fieldname"));
|
||||||
assert ireader.leaves().size() == 1;
|
assert ireader.leaves().size() == 1;
|
||||||
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv", random().nextBoolean());
|
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
|
||||||
dv.get(hits.scoreDocs[i].doc, scratch);
|
dv.get(hits.scoreDocs[i].doc, scratch);
|
||||||
assertEquals(new BytesRef("hello world"), scratch);
|
assertEquals(new BytesRef("hello world"), scratch);
|
||||||
}
|
}
|
||||||
|
@ -262,7 +262,7 @@ public class TestDemoDocValue extends LuceneTestCase {
|
||||||
// Now search the index:
|
// Now search the index:
|
||||||
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
||||||
assert ireader.leaves().size() == 1;
|
assert ireader.leaves().size() == 1;
|
||||||
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv", random().nextBoolean());
|
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
|
||||||
BytesRef scratch = new BytesRef();
|
BytesRef scratch = new BytesRef();
|
||||||
for(int i=0;i<2;i++) {
|
for(int i=0;i<2;i++) {
|
||||||
StoredDocument doc2 = ireader.leaves().get(0).reader().document(i);
|
StoredDocument doc2 = ireader.leaves().get(0).reader().document(i);
|
||||||
|
@ -311,7 +311,7 @@ public class TestDemoDocValue extends LuceneTestCase {
|
||||||
StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
|
StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
|
||||||
assertEquals(text, hitDoc.get("fieldname"));
|
assertEquals(text, hitDoc.get("fieldname"));
|
||||||
assert ireader.leaves().size() == 1;
|
assert ireader.leaves().size() == 1;
|
||||||
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv", random().nextBoolean());
|
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
|
||||||
dv.lookupOrd(dv.getOrd(hits.scoreDocs[i].doc), scratch);
|
dv.lookupOrd(dv.getOrd(hits.scoreDocs[i].doc), scratch);
|
||||||
assertEquals(new BytesRef("hello world"), scratch);
|
assertEquals(new BytesRef("hello world"), scratch);
|
||||||
}
|
}
|
||||||
|
@ -343,7 +343,7 @@ public class TestDemoDocValue extends LuceneTestCase {
|
||||||
// Now search the index:
|
// Now search the index:
|
||||||
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
||||||
assert ireader.leaves().size() == 1;
|
assert ireader.leaves().size() == 1;
|
||||||
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv", random().nextBoolean());
|
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
|
||||||
BytesRef scratch = new BytesRef();
|
BytesRef scratch = new BytesRef();
|
||||||
dv.lookupOrd(dv.getOrd(0), scratch);
|
dv.lookupOrd(dv.getOrd(0), scratch);
|
||||||
assertEquals("hello world 1", scratch.utf8ToString());
|
assertEquals("hello world 1", scratch.utf8ToString());
|
||||||
|
@ -380,7 +380,7 @@ public class TestDemoDocValue extends LuceneTestCase {
|
||||||
// Now search the index:
|
// Now search the index:
|
||||||
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
|
||||||
assert ireader.leaves().size() == 1;
|
assert ireader.leaves().size() == 1;
|
||||||
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv", random().nextBoolean());
|
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
|
||||||
BytesRef scratch = new BytesRef();
|
BytesRef scratch = new BytesRef();
|
||||||
for(int i=0;i<2;i++) {
|
for(int i=0;i<2;i++) {
|
||||||
StoredDocument doc2 = ireader.leaves().get(0).reader().document(i);
|
StoredDocument doc2 = ireader.leaves().get(0).reader().document(i);
|
||||||
|
|
|
@ -740,17 +740,17 @@ public class MemoryIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit todo
|
// nocommit todo
|
||||||
public NumericDocValues getNumericDocValues(String field, boolean direct) {
|
public NumericDocValues getNumericDocValues(String field) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit todo
|
// nocommit todo
|
||||||
public BinaryDocValues getBinaryDocValues(String field, boolean direct) {
|
public BinaryDocValues getBinaryDocValues(String field) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// nocommit todo
|
// nocommit todo
|
||||||
public SortedDocValues getSortedDocValues(String field, boolean direct) {
|
public SortedDocValues getSortedDocValues(String field) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -399,17 +399,17 @@ public class TestDocSet extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNumericDocValues(String field, boolean direct) {
|
public NumericDocValues getNumericDocValues(String field) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BinaryDocValues getBinaryDocValues(String field, boolean direct) {
|
public BinaryDocValues getBinaryDocValues(String field) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedDocValues getSortedDocValues(String field, boolean direct) {
|
public SortedDocValues getSortedDocValues(String field) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue