LUCENE-5703: BinaryDocValues producers don't allocate or copy bytes on each access anymore

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1600688 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2014-06-05 15:54:49 +00:00
parent 9c4695bcc1
commit 8f267c8560
77 changed files with 1184 additions and 1184 deletions

View File

@ -205,6 +205,10 @@ API Changes
removed, because buffering and checksumming is provided by FilterOutputStreams, removed, because buffering and checksumming is provided by FilterOutputStreams,
provided by the JDK. (Uwe Schindler, Mike McCandless) provided by the JDK. (Uwe Schindler, Mike McCandless)
* LUCENE-5703: BinaryDocValues API changed to work like TermsEnum and not allocate/
copy bytes on each access, you are responsible for cloning if you want to keep
data around. (Adrien Grand)
Optimizations Optimizations
* LUCENE-5603: hunspell stemmer more efficiently strips prefixes * LUCENE-5603: hunspell stemmer more efficiently strips prefixes
@ -247,6 +251,9 @@ Optimizations
* LUCENE-5730: FSDirectory.open returns MMapDirectory for 64-bit operating * LUCENE-5730: FSDirectory.open returns MMapDirectory for 64-bit operating
systems, not just Linux and Windows. (Robert Muir) systems, not just Linux and Windows. (Robert Muir)
* LUCENE-5703: BinaryDocValues producers don't allocate or copy bytes on
each access anymore. (Adrien Grand)
Bug fixes Bug fixes
* LUCENE-5673: MMapDirectory: Work around a "bug" in the JDK that throws * LUCENE-5673: MMapDirectory: Work around a "bug" in the JDK that throws

View File

@ -57,9 +57,9 @@ class DirectDocValuesProducer extends DocValuesProducer {
// ram instances we have already loaded // ram instances we have already loaded
private final Map<Integer,NumericDocValues> numericInstances = private final Map<Integer,NumericDocValues> numericInstances =
new HashMap<>(); new HashMap<>();
private final Map<Integer,BinaryDocValues> binaryInstances = private final Map<Integer,BinaryRawValues> binaryInstances =
new HashMap<>(); new HashMap<>();
private final Map<Integer,SortedDocValues> sortedInstances = private final Map<Integer,SortedRawValues> sortedInstances =
new HashMap<>(); new HashMap<>();
private final Map<Integer,SortedSetRawValues> sortedSetInstances = private final Map<Integer,SortedSetRawValues> sortedSetInstances =
new HashMap<>(); new HashMap<>();
@ -178,9 +178,13 @@ class DirectDocValuesProducer extends DocValuesProducer {
} else if (fieldType == BYTES) { } else if (fieldType == BYTES) {
binaries.put(fieldNumber, readBinaryEntry(meta)); binaries.put(fieldNumber, readBinaryEntry(meta));
} else if (fieldType == SORTED) { } else if (fieldType == SORTED) {
sorteds.put(fieldNumber, readSortedEntry(meta)); SortedEntry entry = readSortedEntry(meta);
sorteds.put(fieldNumber, entry);
binaries.put(fieldNumber, entry.values);
} else if (fieldType == SORTED_SET) { } else if (fieldType == SORTED_SET) {
sortedSets.put(fieldNumber, readSortedSetEntry(meta)); SortedSetEntry entry = readSortedSetEntry(meta);
sortedSets.put(fieldNumber, entry);
binaries.put(fieldNumber, entry.values);
} else { } else {
throw new CorruptIndexException("invalid entry type: " + fieldType + ", input=" + meta); throw new CorruptIndexException("invalid entry type: " + fieldType + ", input=" + meta);
} }
@ -279,16 +283,29 @@ class DirectDocValuesProducer extends DocValuesProducer {
@Override @Override
public synchronized BinaryDocValues getBinary(FieldInfo field) throws IOException { public synchronized BinaryDocValues getBinary(FieldInfo field) throws IOException {
BinaryDocValues instance = binaryInstances.get(field.number); BinaryRawValues instance = binaryInstances.get(field.number);
if (instance == null) { if (instance == null) {
// Lazy load // Lazy load
instance = loadBinary(binaries.get(field.number)); instance = loadBinary(binaries.get(field.number));
binaryInstances.put(field.number, instance); binaryInstances.put(field.number, instance);
} }
return instance; final byte[] bytes = instance.bytes;
final int[] address = instance.address;
return new BinaryDocValues() {
final BytesRef term = new BytesRef();
@Override
public BytesRef get(int docID) {
term.bytes = bytes;
term.offset = address[docID];
term.length = address[docID+1] - term.offset;
return term;
}
};
} }
private BinaryDocValues loadBinary(BinaryEntry entry) throws IOException { private BinaryRawValues loadBinary(BinaryEntry entry) throws IOException {
data.seek(entry.offset); data.seek(entry.offset);
final byte[] bytes = new byte[entry.numBytes]; final byte[] bytes = new byte[entry.numBytes];
data.readBytes(bytes, 0, entry.numBytes); data.readBytes(bytes, 0, entry.numBytes);
@ -302,31 +319,26 @@ class DirectDocValuesProducer extends DocValuesProducer {
ramBytesUsed.addAndGet(RamUsageEstimator.sizeOf(bytes) + RamUsageEstimator.sizeOf(address)); ramBytesUsed.addAndGet(RamUsageEstimator.sizeOf(bytes) + RamUsageEstimator.sizeOf(address));
return new BinaryDocValues() { BinaryRawValues values = new BinaryRawValues();
@Override values.bytes = bytes;
public void get(int docID, BytesRef result) { values.address = address;
result.bytes = bytes; return values;
result.offset = address[docID];
result.length = address[docID+1] - result.offset;
};
};
} }
@Override @Override
public synchronized SortedDocValues getSorted(FieldInfo field) throws IOException { public SortedDocValues getSorted(FieldInfo field) throws IOException {
SortedDocValues instance = sortedInstances.get(field.number);
if (instance == null) {
// Lazy load
instance = loadSorted(field);
sortedInstances.put(field.number, instance);
}
return instance;
}
private SortedDocValues loadSorted(FieldInfo field) throws IOException {
final SortedEntry entry = sorteds.get(field.number); final SortedEntry entry = sorteds.get(field.number);
final NumericDocValues docToOrd = loadNumeric(entry.docToOrd); SortedRawValues instance;
final BinaryDocValues values = loadBinary(entry.values); synchronized (this) {
instance = sortedInstances.get(field.number);
if (instance == null) {
// Lazy load
instance = loadSorted(field);
sortedInstances.put(field.number, instance);
}
}
final NumericDocValues docToOrd = instance.docToOrd;
final BinaryDocValues values = getBinary(field);
return new SortedDocValues() { return new SortedDocValues() {
@ -336,8 +348,8 @@ class DirectDocValuesProducer extends DocValuesProducer {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
values.get(ord, result); return values.get(ord);
} }
@Override @Override
@ -351,6 +363,14 @@ class DirectDocValuesProducer extends DocValuesProducer {
}; };
} }
private SortedRawValues loadSorted(FieldInfo field) throws IOException {
final SortedEntry entry = sorteds.get(field.number);
final NumericDocValues docToOrd = loadNumeric(entry.docToOrd);
final SortedRawValues values = new SortedRawValues();
values.docToOrd = docToOrd;
return values;
}
@Override @Override
public synchronized SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { public synchronized SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
SortedSetRawValues instance = sortedSetInstances.get(field.number); SortedSetRawValues instance = sortedSetInstances.get(field.number);
@ -363,7 +383,7 @@ class DirectDocValuesProducer extends DocValuesProducer {
final NumericDocValues docToOrdAddress = instance.docToOrdAddress; final NumericDocValues docToOrdAddress = instance.docToOrdAddress;
final NumericDocValues ords = instance.ords; final NumericDocValues ords = instance.ords;
final BinaryDocValues values = instance.values; final BinaryDocValues values = getBinary(field);
// Must make a new instance since the iterator has state: // Must make a new instance since the iterator has state:
return new RandomAccessOrds() { return new RandomAccessOrds() {
@ -387,8 +407,8 @@ class DirectDocValuesProducer extends DocValuesProducer {
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
values.get((int) ord, result); return values.get((int) ord);
} }
@Override @Override
@ -416,7 +436,6 @@ class DirectDocValuesProducer extends DocValuesProducer {
SortedSetRawValues instance = new SortedSetRawValues(); SortedSetRawValues instance = new SortedSetRawValues();
instance.docToOrdAddress = loadNumeric(entry.docToOrdAddress); instance.docToOrdAddress = loadNumeric(entry.docToOrdAddress);
instance.ords = loadNumeric(entry.ords); instance.ords = loadNumeric(entry.ords);
instance.values = loadBinary(entry.values);
return instance; return instance;
} }
@ -465,11 +484,19 @@ class DirectDocValuesProducer extends DocValuesProducer {
public void close() throws IOException { public void close() throws IOException {
data.close(); data.close();
} }
static class BinaryRawValues {
byte[] bytes;
int[] address;
}
static class SortedRawValues {
NumericDocValues docToOrd;
}
static class SortedSetRawValues { static class SortedSetRawValues {
NumericDocValues docToOrdAddress; NumericDocValues docToOrdAddress;
NumericDocValues ords; NumericDocValues ords;
BinaryDocValues values;
} }
static class NumericEntry { static class NumericEntry {

View File

@ -71,7 +71,7 @@ class MemoryDocValuesProducer extends DocValuesProducer {
// ram instances we have already loaded // ram instances we have already loaded
private final Map<Integer,NumericDocValues> numericInstances = private final Map<Integer,NumericDocValues> numericInstances =
new HashMap<>(); new HashMap<>();
private final Map<Integer,BinaryDocValues> binaryInstances = private final Map<Integer,BytesAndAddresses> pagedBytesInstances =
new HashMap<>(); new HashMap<>();
private final Map<Integer,FST<Long>> fstInstances = private final Map<Integer,FST<Long>> fstInstances =
new HashMap<>(); new HashMap<>();
@ -279,50 +279,68 @@ class MemoryDocValuesProducer extends DocValuesProducer {
} }
@Override @Override
public synchronized BinaryDocValues getBinary(FieldInfo field) throws IOException { public BinaryDocValues getBinary(FieldInfo field) throws IOException {
BinaryDocValues instance = binaryInstances.get(field.number); BinaryEntry entry = binaries.get(field.number);
if (instance == null) {
instance = loadBinary(field); BytesAndAddresses instance;
binaryInstances.put(field.number, instance); synchronized (this) {
instance = pagedBytesInstances.get(field.number);
if (instance == null) {
instance = loadBinary(field);
pagedBytesInstances.put(field.number, instance);
}
}
final PagedBytes.Reader bytesReader = instance.reader;
final MonotonicBlockPackedReader addresses = instance.addresses;
if (addresses == null) {
assert entry.minLength == entry.maxLength;
final int fixedLength = entry.minLength;
return new BinaryDocValues() {
final BytesRef term = new BytesRef();
@Override
public BytesRef get(int docID) {
bytesReader.fillSlice(term, fixedLength * (long)docID, fixedLength);
return term;
}
};
} else {
return new BinaryDocValues() {
final BytesRef term = new BytesRef();
@Override
public BytesRef get(int docID) {
long startAddress = docID == 0 ? 0 : addresses.get(docID-1);
long endAddress = addresses.get(docID);
bytesReader.fillSlice(term, startAddress, (int) (endAddress - startAddress));
return term;
}
};
} }
return instance;
} }
private BinaryDocValues loadBinary(FieldInfo field) throws IOException { private BytesAndAddresses loadBinary(FieldInfo field) throws IOException {
BytesAndAddresses bytesAndAddresses = new BytesAndAddresses();
BinaryEntry entry = binaries.get(field.number); BinaryEntry entry = binaries.get(field.number);
data.seek(entry.offset); data.seek(entry.offset);
PagedBytes bytes = new PagedBytes(16); PagedBytes bytes = new PagedBytes(16);
bytes.copy(data, entry.numBytes); bytes.copy(data, entry.numBytes);
final PagedBytes.Reader bytesReader = bytes.freeze(true); bytesAndAddresses.reader = bytes.freeze(true);
if (entry.minLength == entry.maxLength) { ramBytesUsed.addAndGet(bytesAndAddresses.reader.ramBytesUsed());
final int fixedLength = entry.minLength; if (entry.minLength != entry.maxLength) {
ramBytesUsed.addAndGet(bytes.ramBytesUsed());
return new BinaryDocValues() {
@Override
public void get(int docID, BytesRef result) {
bytesReader.fillSlice(result, fixedLength * (long)docID, fixedLength);
}
};
} else {
data.seek(data.getFilePointer() + entry.missingBytes); data.seek(data.getFilePointer() + entry.missingBytes);
final MonotonicBlockPackedReader addresses = new MonotonicBlockPackedReader(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false); bytesAndAddresses.addresses = new MonotonicBlockPackedReader(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false);
ramBytesUsed.addAndGet(bytes.ramBytesUsed() + addresses.ramBytesUsed()); ramBytesUsed.addAndGet(bytesAndAddresses.addresses.ramBytesUsed());
return new BinaryDocValues() {
@Override
public void get(int docID, BytesRef result) {
long startAddress = docID == 0 ? 0 : addresses.get(docID-1);
long endAddress = addresses.get(docID);
bytesReader.fillSlice(result, startAddress, (int) (endAddress - startAddress));
}
};
} }
return bytesAndAddresses;
} }
@Override @Override
public SortedDocValues getSorted(FieldInfo field) throws IOException { public SortedDocValues getSorted(FieldInfo field) throws IOException {
final FSTEntry entry = fsts.get(field.number); final FSTEntry entry = fsts.get(field.number);
if (entry.numOrds == 0) { if (entry.numOrds == 0) {
return DocValues.EMPTY_SORTED; return DocValues.emptySorted();
} }
FST<Long> instance; FST<Long> instance;
synchronized(this) { synchronized(this) {
@ -345,21 +363,21 @@ class MemoryDocValuesProducer extends DocValuesProducer {
final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst); final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
return new SortedDocValues() { return new SortedDocValues() {
final BytesRef term = new BytesRef();
@Override @Override
public int getOrd(int docID) { public int getOrd(int docID) {
return (int) docToOrd.get(docID); return (int) docToOrd.get(docID);
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
try { try {
in.setPosition(0); in.setPosition(0);
fst.getFirstArc(firstArc); fst.getFirstArc(firstArc);
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts); IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
result.bytes = new byte[output.length]; Util.toBytesRef(output, term);
result.offset = 0; return term;
result.length = 0;
Util.toBytesRef(output, result);
} catch (IOException bogus) { } catch (IOException bogus) {
throw new RuntimeException(bogus); throw new RuntimeException(bogus);
} }
@ -397,7 +415,7 @@ class MemoryDocValuesProducer extends DocValuesProducer {
public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
final FSTEntry entry = fsts.get(field.number); final FSTEntry entry = fsts.get(field.number);
if (entry.numOrds == 0) { if (entry.numOrds == 0) {
return DocValues.EMPTY_SORTED_SET; // empty FST! return DocValues.emptySortedSet(); // empty FST!
} }
FST<Long> instance; FST<Long> instance;
synchronized(this) { synchronized(this) {
@ -418,9 +436,10 @@ class MemoryDocValuesProducer extends DocValuesProducer {
final Arc<Long> scratchArc = new Arc<>(); final Arc<Long> scratchArc = new Arc<>();
final IntsRef scratchInts = new IntsRef(); final IntsRef scratchInts = new IntsRef();
final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst); final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
final BytesRef ref = new BytesRef();
final ByteArrayDataInput input = new ByteArrayDataInput(); final ByteArrayDataInput input = new ByteArrayDataInput();
return new SortedSetDocValues() { return new SortedSetDocValues() {
final BytesRef term = new BytesRef();
BytesRef ref;
long currentOrd; long currentOrd;
@Override @Override
@ -435,21 +454,19 @@ class MemoryDocValuesProducer extends DocValuesProducer {
@Override @Override
public void setDocument(int docID) { public void setDocument(int docID) {
docToOrds.get(docID, ref); ref = docToOrds.get(docID);
input.reset(ref.bytes, ref.offset, ref.length); input.reset(ref.bytes, ref.offset, ref.length);
currentOrd = 0; currentOrd = 0;
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
try { try {
in.setPosition(0); in.setPosition(0);
fst.getFirstArc(firstArc); fst.getFirstArc(firstArc);
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts); IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
result.bytes = new byte[output.length]; Util.toBytesRef(output, term);
result.offset = 0; return term;
result.length = 0;
Util.toBytesRef(output, result);
} catch (IOException bogus) { } catch (IOException bogus) {
throw new RuntimeException(bogus); throw new RuntimeException(bogus);
} }
@ -552,7 +569,12 @@ class MemoryDocValuesProducer extends DocValuesProducer {
long offset; long offset;
long numOrds; long numOrds;
} }
static class BytesAndAddresses {
PagedBytes.Reader reader;
MonotonicBlockPackedReader addresses;
}
// exposes FSTEnum directly as a TermsEnum: avoids binary-search next() // exposes FSTEnum directly as a TermsEnum: avoids binary-search next()
static class FSTTermsEnum extends TermsEnum { static class FSTTermsEnum extends TermsEnum {
final BytesRefFSTEnum<Long> in; final BytesRefFSTEnum<Long> in;

View File

@ -216,8 +216,10 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
final DecimalFormat decoder = new DecimalFormat(field.pattern, new DecimalFormatSymbols(Locale.ROOT)); final DecimalFormat decoder = new DecimalFormat(field.pattern, new DecimalFormatSymbols(Locale.ROOT));
return new BinaryDocValues() { return new BinaryDocValues() {
final BytesRef term = new BytesRef();
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
try { try {
if (docID < 0 || docID >= maxDoc) { if (docID < 0 || docID >= maxDoc) {
throw new IndexOutOfBoundsException("docID must be 0 .. " + (maxDoc-1) + "; got " + docID); throw new IndexOutOfBoundsException("docID must be 0 .. " + (maxDoc-1) + "; got " + docID);
@ -231,10 +233,11 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
} catch (ParseException pe) { } catch (ParseException pe) {
throw new CorruptIndexException("failed to parse int length (resource=" + in + ")", pe); throw new CorruptIndexException("failed to parse int length (resource=" + in + ")", pe);
} }
result.bytes = new byte[len]; term.grow(len);
result.offset = 0; term.offset = 0;
result.length = len; term.length = len;
in.readBytes(result.bytes, 0, len); in.readBytes(term.bytes, 0, len);
return term;
} catch (IOException ioe) { } catch (IOException ioe) {
throw new RuntimeException(ioe); throw new RuntimeException(ioe);
} }
@ -293,6 +296,8 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
final DecimalFormat ordDecoder = new DecimalFormat(field.ordPattern, new DecimalFormatSymbols(Locale.ROOT)); final DecimalFormat ordDecoder = new DecimalFormat(field.ordPattern, new DecimalFormatSymbols(Locale.ROOT));
return new SortedDocValues() { return new SortedDocValues() {
final BytesRef term = new BytesRef();
@Override @Override
public int getOrd(int docID) { public int getOrd(int docID) {
if (docID < 0 || docID >= maxDoc) { if (docID < 0 || docID >= maxDoc) {
@ -312,7 +317,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
try { try {
if (ord < 0 || ord >= field.numValues) { if (ord < 0 || ord >= field.numValues) {
throw new IndexOutOfBoundsException("ord must be 0 .. " + (field.numValues-1) + "; got " + ord); throw new IndexOutOfBoundsException("ord must be 0 .. " + (field.numValues-1) + "; got " + ord);
@ -326,10 +331,11 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
} catch (ParseException pe) { } catch (ParseException pe) {
throw new CorruptIndexException("failed to parse int length (resource=" + in + ")", pe); throw new CorruptIndexException("failed to parse int length (resource=" + in + ")", pe);
} }
result.bytes = new byte[len]; term.grow(len);
result.offset = 0; term.offset = 0;
result.length = len; term.length = len;
in.readBytes(result.bytes, 0, len); in.readBytes(term.bytes, 0, len);
return term;
} catch (IOException ioe) { } catch (IOException ioe) {
throw new RuntimeException(ioe); throw new RuntimeException(ioe);
} }
@ -357,6 +363,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
return new SortedSetDocValues() { return new SortedSetDocValues() {
String[] currentOrds = new String[0]; String[] currentOrds = new String[0];
int currentIndex = 0; int currentIndex = 0;
final BytesRef term = new BytesRef();
@Override @Override
public long nextOrd() { public long nextOrd() {
@ -388,7 +395,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
try { try {
if (ord < 0 || ord >= field.numValues) { if (ord < 0 || ord >= field.numValues) {
throw new IndexOutOfBoundsException("ord must be 0 .. " + (field.numValues-1) + "; got " + ord); throw new IndexOutOfBoundsException("ord must be 0 .. " + (field.numValues-1) + "; got " + ord);
@ -402,10 +409,11 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
} catch (ParseException pe) { } catch (ParseException pe) {
throw new CorruptIndexException("failed to parse int length (resource=" + in + ")", pe); throw new CorruptIndexException("failed to parse int length (resource=" + in + ")", pe);
} }
result.bytes = new byte[len]; term.grow(len);
result.offset = 0; term.offset = 0;
result.length = len; term.length = len;
in.readBytes(result.bytes, 0, len); in.readBytes(term.bytes, 0, len);
return term;
} catch (IOException ioe) { } catch (IOException ioe) {
throw new RuntimeException(ioe); throw new RuntimeException(ioe);
} }

View File

@ -199,7 +199,7 @@ public abstract class DocValuesConsumer implements Closeable {
return new Iterator<BytesRef>() { return new Iterator<BytesRef>() {
int readerUpto = -1; int readerUpto = -1;
int docIDUpto; int docIDUpto;
BytesRef nextValue = new BytesRef(); BytesRef nextValue;
BytesRef nextPointer; // points to null if missing, or nextValue BytesRef nextPointer; // points to null if missing, or nextValue
AtomicReader currentReader; AtomicReader currentReader;
BinaryDocValues currentValues; BinaryDocValues currentValues;
@ -248,7 +248,7 @@ public abstract class DocValuesConsumer implements Closeable {
if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) { if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) {
nextIsSet = true; nextIsSet = true;
if (currentDocsWithField.get(docIDUpto)) { if (currentDocsWithField.get(docIDUpto)) {
currentValues.get(docIDUpto, nextValue); nextValue = currentValues.get(docIDUpto);
nextPointer = nextValue; nextPointer = nextValue;
} else { } else {
nextPointer = null; nextPointer = null;
@ -308,7 +308,6 @@ public abstract class DocValuesConsumer implements Closeable {
@Override @Override
public Iterator<BytesRef> iterator() { public Iterator<BytesRef> iterator() {
return new Iterator<BytesRef>() { return new Iterator<BytesRef>() {
final BytesRef scratch = new BytesRef();
int currentOrd; int currentOrd;
@Override @Override
@ -323,9 +322,9 @@ public abstract class DocValuesConsumer implements Closeable {
} }
int segmentNumber = map.getFirstSegmentNumber(currentOrd); int segmentNumber = map.getFirstSegmentNumber(currentOrd);
int segmentOrd = (int)map.getFirstSegmentOrd(currentOrd); int segmentOrd = (int)map.getFirstSegmentOrd(currentOrd);
dvs[segmentNumber].lookupOrd(segmentOrd, scratch); final BytesRef term = dvs[segmentNumber].lookupOrd(segmentOrd);
currentOrd++; currentOrd++;
return scratch; return term;
} }
@Override @Override
@ -444,7 +443,6 @@ public abstract class DocValuesConsumer implements Closeable {
@Override @Override
public Iterator<BytesRef> iterator() { public Iterator<BytesRef> iterator() {
return new Iterator<BytesRef>() { return new Iterator<BytesRef>() {
final BytesRef scratch = new BytesRef();
long currentOrd; long currentOrd;
@Override @Override
@ -459,9 +457,9 @@ public abstract class DocValuesConsumer implements Closeable {
} }
int segmentNumber = map.getFirstSegmentNumber(currentOrd); int segmentNumber = map.getFirstSegmentNumber(currentOrd);
long segmentOrd = map.getFirstSegmentOrd(currentOrd); long segmentOrd = map.getFirstSegmentOrd(currentOrd);
dvs[segmentNumber].lookupOrd(segmentOrd, scratch); final BytesRef term = dvs[segmentNumber].lookupOrd(segmentOrd);
currentOrd++; currentOrd++;
return scratch; return term;
} }
@Override @Override

View File

@ -329,9 +329,12 @@ final class Lucene40DocValuesReader extends DocValuesProducer {
success = true; success = true;
ramBytesUsed.addAndGet(bytes.ramBytesUsed()); ramBytesUsed.addAndGet(bytes.ramBytesUsed());
return new BinaryDocValues() { return new BinaryDocValues() {
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
bytesReader.fillSlice(result, fixedLength * (long)docID, fixedLength); final BytesRef term = new BytesRef();
bytesReader.fillSlice(term, fixedLength * (long)docID, fixedLength);
return term;
} }
}; };
} finally { } finally {
@ -369,10 +372,12 @@ final class Lucene40DocValuesReader extends DocValuesProducer {
ramBytesUsed.addAndGet(bytes.ramBytesUsed() + reader.ramBytesUsed()); ramBytesUsed.addAndGet(bytes.ramBytesUsed() + reader.ramBytesUsed());
return new BinaryDocValues() { return new BinaryDocValues() {
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
final BytesRef term = new BytesRef();
long startAddress = reader.get(docID); long startAddress = reader.get(docID);
long endAddress = reader.get(docID+1); long endAddress = reader.get(docID+1);
bytesReader.fillSlice(result, startAddress, (int)(endAddress - startAddress)); bytesReader.fillSlice(term, startAddress, (int)(endAddress - startAddress));
return term;
} }
}; };
} finally { } finally {
@ -412,9 +417,11 @@ final class Lucene40DocValuesReader extends DocValuesProducer {
success = true; success = true;
return new BinaryDocValues() { return new BinaryDocValues() {
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
final BytesRef term = new BytesRef();
final long offset = fixedLength * reader.get(docID); final long offset = fixedLength * reader.get(docID);
bytesReader.fillSlice(result, offset, fixedLength); bytesReader.fillSlice(term, offset, fixedLength);
return term;
} }
}; };
} finally { } finally {
@ -452,20 +459,23 @@ final class Lucene40DocValuesReader extends DocValuesProducer {
ramBytesUsed.addAndGet(bytes.ramBytesUsed() + reader.ramBytesUsed()); ramBytesUsed.addAndGet(bytes.ramBytesUsed() + reader.ramBytesUsed());
success = true; success = true;
return new BinaryDocValues() { return new BinaryDocValues() {
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
final BytesRef term = new BytesRef();
long startAddress = reader.get(docID); long startAddress = reader.get(docID);
BytesRef lengthBytes = new BytesRef(); BytesRef lengthBytes = new BytesRef();
bytesReader.fillSlice(lengthBytes, startAddress, 1); bytesReader.fillSlice(lengthBytes, startAddress, 1);
byte code = lengthBytes.bytes[lengthBytes.offset]; byte code = lengthBytes.bytes[lengthBytes.offset];
if ((code & 128) == 0) { if ((code & 128) == 0) {
// length is 1 byte // length is 1 byte
bytesReader.fillSlice(result, startAddress + 1, (int) code); bytesReader.fillSlice(term, startAddress + 1, (int) code);
} else { } else {
bytesReader.fillSlice(lengthBytes, startAddress + 1, 1); bytesReader.fillSlice(lengthBytes, startAddress + 1, 1);
int length = ((code & 0x7f) << 8) | (lengthBytes.bytes[lengthBytes.offset] & 0xff); int length = ((code & 0x7f) << 8) | (lengthBytes.bytes[lengthBytes.offset] & 0xff);
bytesReader.fillSlice(result, startAddress + 2, length); bytesReader.fillSlice(term, startAddress + 2, length);
} }
return term;
} }
}; };
} finally { } finally {
@ -538,8 +548,10 @@ final class Lucene40DocValuesReader extends DocValuesProducer {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
bytesReader.fillSlice(result, fixedLength * (long) ord, fixedLength); final BytesRef term = new BytesRef();
bytesReader.fillSlice(term, fixedLength * (long) ord, fixedLength);
return term;
} }
@Override @Override
@ -574,10 +586,12 @@ final class Lucene40DocValuesReader extends DocValuesProducer {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
final BytesRef term = new BytesRef();
long startAddress = addressReader.get(ord); long startAddress = addressReader.get(ord);
long endAddress = addressReader.get(ord+1); long endAddress = addressReader.get(ord+1);
bytesReader.fillSlice(result, startAddress, (int)(endAddress - startAddress)); bytesReader.fillSlice(term, startAddress, (int)(endAddress - startAddress));
return term;
} }
@Override @Override
@ -604,8 +618,8 @@ final class Lucene40DocValuesReader extends DocValuesProducer {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
in.lookupOrd(ord+1, result); return in.lookupOrd(ord+1);
} }
@Override @Override

View File

@ -40,6 +40,7 @@ import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
@ -292,19 +293,24 @@ class Lucene42DocValuesProducer extends DocValuesProducer {
ramBytesUsed.addAndGet(bytes.ramBytesUsed()); ramBytesUsed.addAndGet(bytes.ramBytesUsed());
return new BinaryDocValues() { return new BinaryDocValues() {
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
bytesReader.fillSlice(result, fixedLength * (long)docID, fixedLength); final BytesRef term = new BytesRef();
bytesReader.fillSlice(term, fixedLength * (long)docID, fixedLength);
return term;
} }
}; };
} else { } else {
final MonotonicBlockPackedReader addresses = new MonotonicBlockPackedReader(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false); final MonotonicBlockPackedReader addresses = new MonotonicBlockPackedReader(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false);
ramBytesUsed.addAndGet(bytes.ramBytesUsed() + addresses.ramBytesUsed()); ramBytesUsed.addAndGet(bytes.ramBytesUsed() + addresses.ramBytesUsed());
return new BinaryDocValues() { return new BinaryDocValues() {
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
long startAddress = docID == 0 ? 0 : addresses.get(docID-1); long startAddress = docID == 0 ? 0 : addresses.get(docID-1);
long endAddress = addresses.get(docID); long endAddress = addresses.get(docID);
bytesReader.fillSlice(result, startAddress, (int) (endAddress - startAddress)); final BytesRef term = new BytesRef();
bytesReader.fillSlice(term, startAddress, (int) (endAddress - startAddress));
return term;
} }
}; };
} }
@ -334,21 +340,24 @@ class Lucene42DocValuesProducer extends DocValuesProducer {
final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst); final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
return new SortedDocValues() { return new SortedDocValues() {
final BytesRef term = new BytesRef();
@Override @Override
public int getOrd(int docID) { public int getOrd(int docID) {
return (int) docToOrd.get(docID); return (int) docToOrd.get(docID);
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
try { try {
in.setPosition(0); in.setPosition(0);
fst.getFirstArc(firstArc); fst.getFirstArc(firstArc);
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts); IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
result.bytes = new byte[output.length]; term.bytes = ArrayUtil.grow(term.bytes, output.length);
result.offset = 0; term.offset = 0;
result.length = 0; term.length = 0;
Util.toBytesRef(output, result); return Util.toBytesRef(output, term);
} catch (IOException bogus) { } catch (IOException bogus) {
throw new RuntimeException(bogus); throw new RuntimeException(bogus);
} }
@ -386,7 +395,7 @@ class Lucene42DocValuesProducer extends DocValuesProducer {
public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
final FSTEntry entry = fsts.get(field.number); final FSTEntry entry = fsts.get(field.number);
if (entry.numOrds == 0) { if (entry.numOrds == 0) {
return DocValues.EMPTY_SORTED_SET; // empty FST! return DocValues.emptySortedSet(); // empty FST!
} }
FST<Long> instance; FST<Long> instance;
synchronized(this) { synchronized(this) {
@ -407,9 +416,10 @@ class Lucene42DocValuesProducer extends DocValuesProducer {
final Arc<Long> scratchArc = new Arc<>(); final Arc<Long> scratchArc = new Arc<>();
final IntsRef scratchInts = new IntsRef(); final IntsRef scratchInts = new IntsRef();
final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst); final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<>(fst);
final BytesRef ref = new BytesRef();
final ByteArrayDataInput input = new ByteArrayDataInput(); final ByteArrayDataInput input = new ByteArrayDataInput();
return new SortedSetDocValues() { return new SortedSetDocValues() {
final BytesRef term = new BytesRef();
BytesRef ordsRef;
long currentOrd; long currentOrd;
@Override @Override
@ -424,21 +434,21 @@ class Lucene42DocValuesProducer extends DocValuesProducer {
@Override @Override
public void setDocument(int docID) { public void setDocument(int docID) {
docToOrds.get(docID, ref); ordsRef = docToOrds.get(docID);
input.reset(ref.bytes, ref.offset, ref.length); input.reset(ordsRef.bytes, ordsRef.offset, ordsRef.length);
currentOrd = 0; currentOrd = 0;
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
try { try {
in.setPosition(0); in.setPosition(0);
fst.getFirstArc(firstArc); fst.getFirstArc(firstArc);
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts); IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
result.bytes = new byte[output.length]; term.bytes = ArrayUtil.grow(term.bytes, output.length);
result.offset = 0; term.offset = 0;
result.length = 0; term.length = 0;
Util.toBytesRef(output, result); return Util.toBytesRef(output, term);
} catch (IOException bogus) { } catch (IOException bogus) {
throw new RuntimeException(bogus); throw new RuntimeException(bogus);
} }

View File

@ -383,18 +383,20 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
final IndexInput data = this.data.clone(); final IndexInput data = this.data.clone();
return new LongBinaryDocValues() { return new LongBinaryDocValues() {
final BytesRef term;
{
term = new BytesRef(bytes.maxLength);
term.offset = 0;
term.length = bytes.maxLength;
}
@Override @Override
public void get(long id, BytesRef result) { public BytesRef get(long id) {
long address = bytes.offset + id * bytes.maxLength; long address = bytes.offset + id * bytes.maxLength;
try { try {
data.seek(address); data.seek(address);
// NOTE: we could have one buffer, but various consumers (e.g. FieldComparatorSource) data.readBytes(term.bytes, 0, term.length);
// assume "they" own the bytes after calling this! return term;
final byte[] buffer = new byte[bytes.maxLength];
data.readBytes(buffer, 0, buffer.length);
result.bytes = buffer;
result.offset = 0;
result.length = buffer.length;
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -425,20 +427,18 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
final MonotonicBlockPackedReader addresses = getAddressInstance(data, field, bytes); final MonotonicBlockPackedReader addresses = getAddressInstance(data, field, bytes);
return new LongBinaryDocValues() { return new LongBinaryDocValues() {
final BytesRef term = new BytesRef(Math.max(0, bytes.maxLength));
@Override @Override
public void get(long id, BytesRef result) { public BytesRef get(long id) {
long startAddress = bytes.offset + (id == 0 ? 0 : addresses.get(id-1)); long startAddress = bytes.offset + (id == 0 ? 0 : addresses.get(id-1));
long endAddress = bytes.offset + addresses.get(id); long endAddress = bytes.offset + addresses.get(id);
int length = (int) (endAddress - startAddress); int length = (int) (endAddress - startAddress);
try { try {
data.seek(startAddress); data.seek(startAddress);
// NOTE: we could have one buffer, but various consumers (e.g. FieldComparatorSource) data.readBytes(term.bytes, 0, length);
// assume "they" own the bytes after calling this! term.length = length;
final byte[] buffer = new byte[length]; return term;
data.readBytes(buffer, 0, buffer.length);
result.bytes = buffer;
result.offset = 0;
result.length = length;
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -496,8 +496,8 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
binary.get(ord, result); return binary.get(ord);
} }
@Override @Override
@ -583,8 +583,8 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
binary.get(ord, result); return binary.get(ord);
} }
@Override @Override
@ -723,11 +723,11 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
// internally we compose complex dv (sorted/sortedset) from other ones // internally we compose complex dv (sorted/sortedset) from other ones
static abstract class LongBinaryDocValues extends BinaryDocValues { static abstract class LongBinaryDocValues extends BinaryDocValues {
@Override @Override
public final void get(int docID, BytesRef result) { public final BytesRef get(int docID) {
get((long)docID, result); return get((long) docID);
} }
abstract void get(long id, BytesRef Result); abstract BytesRef get(long id);
} }
// in the compressed case, we add a few additional operations for // in the compressed case, we add a few additional operations for
@ -752,13 +752,10 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
} }
@Override @Override
public void get(long id, BytesRef result) { public BytesRef get(long id) {
try { try {
termsEnum.seekExact(id); termsEnum.seekExact(id);
BytesRef term = termsEnum.term(); return termsEnum.term();
result.bytes = term.bytes;
result.offset = term.offset;
result.length = term.length;
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -793,28 +790,18 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
return new TermsEnum() { return new TermsEnum() {
private long currentOrd = -1; private long currentOrd = -1;
// TODO: maxLength is negative when all terms are merged away... // TODO: maxLength is negative when all terms are merged away...
private final BytesRef termBuffer = new BytesRef(bytes.maxLength < 0 ? 0 : bytes.maxLength); private final BytesRef term = new BytesRef(bytes.maxLength < 0 ? 0 : bytes.maxLength);
private final BytesRef term = new BytesRef(); // TODO: paranoia?
@Override @Override
public BytesRef next() throws IOException { public BytesRef next() throws IOException {
if (doNext() == null) {
return null;
} else {
setTerm();
return term;
}
}
private BytesRef doNext() throws IOException {
if (++currentOrd >= numValues) { if (++currentOrd >= numValues) {
return null; return null;
} else { } else {
int start = input.readVInt(); int start = input.readVInt();
int suffix = input.readVInt(); int suffix = input.readVInt();
input.readBytes(termBuffer.bytes, start, suffix); input.readBytes(term.bytes, start, suffix);
termBuffer.length = start + suffix; term.length = start + suffix;
return termBuffer; return term;
} }
} }
@ -827,8 +814,8 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
while (low <= high) { while (low <= high) {
long mid = (low + high) >>> 1; long mid = (low + high) >>> 1;
doSeek(mid * interval); seekExact(mid * interval);
int cmp = termBuffer.compareTo(text); int cmp = term.compareTo(text);
if (cmp < 0) { if (cmp < 0) {
low = mid + 1; low = mid + 1;
@ -836,7 +823,6 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
high = mid - 1; high = mid - 1;
} else { } else {
// we got lucky, found an indexed term // we got lucky, found an indexed term
setTerm();
return SeekStatus.FOUND; return SeekStatus.FOUND;
} }
} }
@ -847,15 +833,13 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
// block before insertion point // block before insertion point
long block = low-1; long block = low-1;
doSeek(block < 0 ? -1 : block * interval); seekExact(block < 0 ? -1 : block * interval);
while (doNext() != null) { while (next() != null) {
int cmp = termBuffer.compareTo(text); int cmp = term.compareTo(text);
if (cmp == 0) { if (cmp == 0) {
setTerm();
return SeekStatus.FOUND; return SeekStatus.FOUND;
} else if (cmp > 0) { } else if (cmp > 0) {
setTerm();
return SeekStatus.NOT_FOUND; return SeekStatus.NOT_FOUND;
} }
} }
@ -865,11 +849,6 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
@Override @Override
public void seekExact(long ord) throws IOException { public void seekExact(long ord) throws IOException {
doSeek(ord);
setTerm();
}
private void doSeek(long ord) throws IOException {
long block = ord / interval; long block = ord / interval;
if (ord >= currentOrd && block == currentOrd / interval) { if (ord >= currentOrd && block == currentOrd / interval) {
@ -881,16 +860,9 @@ public class Lucene45DocValuesProducer extends DocValuesProducer implements Clos
} }
while (currentOrd < ord) { while (currentOrd < ord) {
doNext(); next();
} }
} }
private void setTerm() {
// TODO: is there a cleaner way
term.bytes = new byte[termBuffer.length];
term.offset = 0;
term.copyBytes(termBuffer);
}
@Override @Override
public BytesRef term() throws IOException { public BytesRef term() throws IOException {

View File

@ -363,18 +363,20 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
final IndexInput data = this.data.clone(); final IndexInput data = this.data.clone();
return new LongBinaryDocValues() { return new LongBinaryDocValues() {
final BytesRef term;
{
term = new BytesRef(bytes.maxLength);
term.offset = 0;
term.length = bytes.maxLength;
}
@Override @Override
public void get(long id, BytesRef result) { public BytesRef get(long id) {
long address = bytes.offset + id * bytes.maxLength; long address = bytes.offset + id * bytes.maxLength;
try { try {
data.seek(address); data.seek(address);
// NOTE: we could have one buffer, but various consumers (e.g. FieldComparatorSource) data.readBytes(term.bytes, 0, term.length);
// assume "they" own the bytes after calling this! return term;
final byte[] buffer = new byte[bytes.maxLength];
data.readBytes(buffer, 0, buffer.length);
result.bytes = buffer;
result.offset = 0;
result.length = buffer.length;
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -405,20 +407,18 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
final MonotonicBlockPackedReader addresses = getAddressInstance(data, field, bytes); final MonotonicBlockPackedReader addresses = getAddressInstance(data, field, bytes);
return new LongBinaryDocValues() { return new LongBinaryDocValues() {
final BytesRef term = new BytesRef(Math.max(0, bytes.maxLength));
@Override @Override
public void get(long id, BytesRef result) { public BytesRef get(long id) {
long startAddress = bytes.offset + (id == 0 ? 0 : addresses.get(id-1)); long startAddress = bytes.offset + (id == 0 ? 0 : addresses.get(id-1));
long endAddress = bytes.offset + addresses.get(id); long endAddress = bytes.offset + addresses.get(id);
int length = (int) (endAddress - startAddress); int length = (int) (endAddress - startAddress);
try { try {
data.seek(startAddress); data.seek(startAddress);
// NOTE: we could have one buffer, but various consumers (e.g. FieldComparatorSource) data.readBytes(term.bytes, 0, length);
// assume "they" own the bytes after calling this! term.length = length;
final byte[] buffer = new byte[length]; return term;
data.readBytes(buffer, 0, buffer.length);
result.bytes = buffer;
result.offset = 0;
result.length = length;
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -474,8 +474,8 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
binary.get(ord, result); return binary.get(ord);
} }
@Override @Override
@ -561,8 +561,8 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
binary.get(ord, result); return binary.get(ord);
} }
@Override @Override
@ -704,11 +704,11 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
// internally we compose complex dv (sorted/sortedset) from other ones // internally we compose complex dv (sorted/sortedset) from other ones
static abstract class LongBinaryDocValues extends BinaryDocValues { static abstract class LongBinaryDocValues extends BinaryDocValues {
@Override @Override
public final void get(int docID, BytesRef result) { public final BytesRef get(int docID) {
get((long)docID, result); return get((long)docID);
} }
abstract void get(long id, BytesRef Result); abstract BytesRef get(long id);
} }
// in the compressed case, we add a few additional operations for // in the compressed case, we add a few additional operations for
@ -733,13 +733,10 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
} }
@Override @Override
public void get(long id, BytesRef result) { public BytesRef get(long id) {
try { try {
termsEnum.seekExact(id); termsEnum.seekExact(id);
BytesRef term = termsEnum.term(); return termsEnum.term();
result.bytes = term.bytes;
result.offset = term.offset;
result.length = term.length;
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -774,28 +771,18 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
return new TermsEnum() { return new TermsEnum() {
private long currentOrd = -1; private long currentOrd = -1;
// TODO: maxLength is negative when all terms are merged away... // TODO: maxLength is negative when all terms are merged away...
private final BytesRef termBuffer = new BytesRef(bytes.maxLength < 0 ? 0 : bytes.maxLength); private final BytesRef term = new BytesRef(bytes.maxLength < 0 ? 0 : bytes.maxLength);
private final BytesRef term = new BytesRef(); // TODO: paranoia?
@Override @Override
public BytesRef next() throws IOException { public BytesRef next() throws IOException {
if (doNext() == null) {
return null;
} else {
setTerm();
return term;
}
}
private BytesRef doNext() throws IOException {
if (++currentOrd >= numValues) { if (++currentOrd >= numValues) {
return null; return null;
} else { } else {
int start = input.readVInt(); int start = input.readVInt();
int suffix = input.readVInt(); int suffix = input.readVInt();
input.readBytes(termBuffer.bytes, start, suffix); input.readBytes(term.bytes, start, suffix);
termBuffer.length = start + suffix; term.length = start + suffix;
return termBuffer; return term;
} }
} }
@ -808,8 +795,8 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
while (low <= high) { while (low <= high) {
long mid = (low + high) >>> 1; long mid = (low + high) >>> 1;
doSeek(mid * interval); seekExact(mid * interval);
int cmp = termBuffer.compareTo(text); int cmp = term.compareTo(text);
if (cmp < 0) { if (cmp < 0) {
low = mid + 1; low = mid + 1;
@ -817,7 +804,6 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
high = mid - 1; high = mid - 1;
} else { } else {
// we got lucky, found an indexed term // we got lucky, found an indexed term
setTerm();
return SeekStatus.FOUND; return SeekStatus.FOUND;
} }
} }
@ -828,15 +814,13 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
// block before insertion point // block before insertion point
long block = low-1; long block = low-1;
doSeek(block < 0 ? -1 : block * interval); seekExact(block < 0 ? -1 : block * interval);
while (doNext() != null) { while (next() != null) {
int cmp = termBuffer.compareTo(text); int cmp = term.compareTo(text);
if (cmp == 0) { if (cmp == 0) {
setTerm();
return SeekStatus.FOUND; return SeekStatus.FOUND;
} else if (cmp > 0) { } else if (cmp > 0) {
setTerm();
return SeekStatus.NOT_FOUND; return SeekStatus.NOT_FOUND;
} }
} }
@ -846,11 +830,6 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
@Override @Override
public void seekExact(long ord) throws IOException { public void seekExact(long ord) throws IOException {
doSeek(ord);
setTerm();
}
private void doSeek(long ord) throws IOException {
long block = ord / interval; long block = ord / interval;
if (ord >= currentOrd && block == currentOrd / interval) { if (ord >= currentOrd && block == currentOrd / interval) {
@ -862,16 +841,9 @@ public class Lucene49DocValuesProducer extends DocValuesProducer implements Clos
} }
while (currentOrd < ord) { while (currentOrd < ord) {
doNext(); next();
} }
} }
private void setTerm() {
// TODO: is there a cleaner way
term.bytes = new byte[termBuffer.length];
term.offset = 0;
term.copyBytes(termBuffer);
}
@Override @Override
public BytesRef term() throws IOException { public BytesRef term() throws IOException {

View File

@ -28,6 +28,9 @@ public abstract class BinaryDocValues {
* constructors, typically implicit.) */ * constructors, typically implicit.) */
protected BinaryDocValues() {} protected BinaryDocValues() {}
/** Lookup the value for document. */ /** Lookup the value for document. The returned {@link BytesRef} may be
public abstract void get(int docID, BytesRef result); * re-used across calls to {@link #get(int)} so make sure to
* {@link BytesRef#deepCopyOf(BytesRef) copy it} if you want to keep it
* around. */
public abstract BytesRef get(int docID);
} }

View File

@ -1392,12 +1392,11 @@ public class CheckIndex {
} }
private static void checkBinaryDocValues(String fieldName, AtomicReader reader, BinaryDocValues dv, Bits docsWithField) { private static void checkBinaryDocValues(String fieldName, AtomicReader reader, BinaryDocValues dv, Bits docsWithField) {
BytesRef scratch = new BytesRef();
for (int i = 0; i < reader.maxDoc(); i++) { for (int i = 0; i < reader.maxDoc(); i++) {
dv.get(i, scratch); final BytesRef term = dv.get(i);
assert scratch.isValid(); assert term.isValid();
if (docsWithField.get(i) == false && scratch.length > 0) { if (docsWithField.get(i) == false && term.length > 0) {
throw new RuntimeException("dv for field: " + fieldName + " is missing but has value=" + scratch + " for doc: " + i); throw new RuntimeException("dv for field: " + fieldName + " is missing but has value=" + term + " for doc: " + i);
} }
} }
} }
@ -1430,16 +1429,15 @@ public class CheckIndex {
throw new RuntimeException("dv for field: " + fieldName + " has holes in its ords, valueCount=" + dv.getValueCount() + " but only used: " + seenOrds.cardinality()); throw new RuntimeException("dv for field: " + fieldName + " has holes in its ords, valueCount=" + dv.getValueCount() + " but only used: " + seenOrds.cardinality());
} }
BytesRef lastValue = null; BytesRef lastValue = null;
BytesRef scratch = new BytesRef();
for (int i = 0; i <= maxOrd; i++) { for (int i = 0; i <= maxOrd; i++) {
dv.lookupOrd(i, scratch); final BytesRef term = dv.lookupOrd(i);
assert scratch.isValid(); assert term.isValid();
if (lastValue != null) { if (lastValue != null) {
if (scratch.compareTo(lastValue) <= 0) { if (term.compareTo(lastValue) <= 0) {
throw new RuntimeException("dv for field: " + fieldName + " has ords out of order: " + lastValue + " >=" + scratch); throw new RuntimeException("dv for field: " + fieldName + " has ords out of order: " + lastValue + " >=" + term);
} }
} }
lastValue = BytesRef.deepCopyOf(scratch); lastValue = BytesRef.deepCopyOf(term);
} }
} }
@ -1501,16 +1499,15 @@ public class CheckIndex {
} }
BytesRef lastValue = null; BytesRef lastValue = null;
BytesRef scratch = new BytesRef();
for (long i = 0; i <= maxOrd; i++) { for (long i = 0; i <= maxOrd; i++) {
dv.lookupOrd(i, scratch); final BytesRef term = dv.lookupOrd(i);
assert scratch.isValid(); assert term.isValid();
if (lastValue != null) { if (lastValue != null) {
if (scratch.compareTo(lastValue) <= 0) { if (term.compareTo(lastValue) <= 0) {
throw new RuntimeException("dv for field: " + fieldName + " has ords out of order: " + lastValue + " >=" + scratch); throw new RuntimeException("dv for field: " + fieldName + " has ords out of order: " + lastValue + " >=" + term);
} }
} }
lastValue = BytesRef.deepCopyOf(scratch); lastValue = BytesRef.deepCopyOf(term);
} }
} }

View File

@ -26,87 +26,92 @@ import org.apache.lucene.util.BytesRef;
* This class contains utility methods and constants for DocValues * This class contains utility methods and constants for DocValues
*/ */
public final class DocValues { public final class DocValues {
/* no instantiation */ /* no instantiation */
private DocValues() {} private DocValues() {}
/** /**
* An empty BinaryDocValues which returns {@link BytesRef#EMPTY_BYTES} for every document * An empty BinaryDocValues which returns {@link BytesRef#EMPTY_BYTES} for every document
*/ */
public static final BinaryDocValues EMPTY_BINARY = new BinaryDocValues() { public static final BinaryDocValues emptyBinary() {
@Override final BytesRef empty = new BytesRef();
public void get(int docID, BytesRef result) { return new BinaryDocValues() {
result.bytes = BytesRef.EMPTY_BYTES; @Override
result.offset = 0; public BytesRef get(int docID) {
result.length = 0; return empty;
} }
}; };
}
/** /**
* An empty NumericDocValues which returns zero for every document * An empty NumericDocValues which returns zero for every document
*/ */
public static final NumericDocValues EMPTY_NUMERIC = new NumericDocValues() { public static final NumericDocValues emptyNumeric() {
@Override return new NumericDocValues() {
public long get(int docID) { @Override
return 0; public long get(int docID) {
} return 0;
}; }
};
}
/** /**
* An empty SortedDocValues which returns {@link BytesRef#EMPTY_BYTES} for every document * An empty SortedDocValues which returns {@link BytesRef#EMPTY_BYTES} for every document
*/ */
public static final SortedDocValues EMPTY_SORTED = new SortedDocValues() { public static final SortedDocValues emptySorted() {
@Override final BytesRef empty = new BytesRef();
public int getOrd(int docID) { return new SortedDocValues() {
return -1; @Override
} public int getOrd(int docID) {
return -1;
}
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
result.bytes = BytesRef.EMPTY_BYTES; return empty;
result.offset = 0; }
result.length = 0;
}
@Override @Override
public int getValueCount() { public int getValueCount() {
return 0; return 0;
} }
}; };
}
/** /**
* An empty SortedDocValues which returns {@link SortedSetDocValues#NO_MORE_ORDS} for every document * An empty SortedDocValues which returns {@link SortedSetDocValues#NO_MORE_ORDS} for every document
*/ */
public static final SortedSetDocValues EMPTY_SORTED_SET = new RandomAccessOrds() { public static final SortedSetDocValues emptySortedSet() {
return new RandomAccessOrds() {
@Override @Override
public long nextOrd() { public long nextOrd() {
return NO_MORE_ORDS; return NO_MORE_ORDS;
} }
@Override @Override
public void setDocument(int docID) {} public void setDocument(int docID) {}
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
throw new IndexOutOfBoundsException(); throw new IndexOutOfBoundsException();
} }
@Override @Override
public long getValueCount() { public long getValueCount() {
return 0; return 0;
} }
@Override @Override
public long ordAt(int index) { public long ordAt(int index) {
throw new IndexOutOfBoundsException(); throw new IndexOutOfBoundsException();
} }
@Override @Override
public int cardinality() { public int cardinality() {
return 0; return 0;
} }
}; };
}
/** /**
* Returns a multi-valued view over the provided SortedDocValues * Returns a multi-valued view over the provided SortedDocValues
@ -166,52 +171,52 @@ public final class DocValues {
// as opposed to the AtomicReader apis (which must be strict for consistency), these are lenient // as opposed to the AtomicReader apis (which must be strict for consistency), these are lenient
/** /**
* Returns NumericDocValues for the reader, or {@link #EMPTY_NUMERIC} if it has none. * Returns NumericDocValues for the reader, or {@link #emptyNumeric()} if it has none.
*/ */
public static NumericDocValues getNumeric(AtomicReader in, String field) throws IOException { public static NumericDocValues getNumeric(AtomicReader in, String field) throws IOException {
NumericDocValues dv = in.getNumericDocValues(field); NumericDocValues dv = in.getNumericDocValues(field);
if (dv == null) { if (dv == null) {
return EMPTY_NUMERIC; return emptyNumeric();
} else { } else {
return dv; return dv;
} }
} }
/** /**
* Returns BinaryDocValues for the reader, or {@link #EMPTY_BINARY} if it has none. * Returns BinaryDocValues for the reader, or {@link #emptyBinary} if it has none.
*/ */
public static BinaryDocValues getBinary(AtomicReader in, String field) throws IOException { public static BinaryDocValues getBinary(AtomicReader in, String field) throws IOException {
BinaryDocValues dv = in.getBinaryDocValues(field); BinaryDocValues dv = in.getBinaryDocValues(field);
if (dv == null) { if (dv == null) {
dv = in.getSortedDocValues(field); dv = in.getSortedDocValues(field);
if (dv == null) { if (dv == null) {
return EMPTY_BINARY; return emptyBinary();
} }
} }
return dv; return dv;
} }
/** /**
* Returns SortedDocValues for the reader, or {@link #EMPTY_SORTED} if it has none. * Returns SortedDocValues for the reader, or {@link #emptySorted} if it has none.
*/ */
public static SortedDocValues getSorted(AtomicReader in, String field) throws IOException { public static SortedDocValues getSorted(AtomicReader in, String field) throws IOException {
SortedDocValues dv = in.getSortedDocValues(field); SortedDocValues dv = in.getSortedDocValues(field);
if (dv == null) { if (dv == null) {
return EMPTY_SORTED; return emptySorted();
} else { } else {
return dv; return dv;
} }
} }
/** /**
* Returns SortedSetDocValues for the reader, or {@link #EMPTY_SORTED_SET} if it has none. * Returns SortedSetDocValues for the reader, or {@link #emptySortedSet} if it has none.
*/ */
public static SortedSetDocValues getSortedSet(AtomicReader in, String field) throws IOException { public static SortedSetDocValues getSortedSet(AtomicReader in, String field) throws IOException {
SortedSetDocValues dv = in.getSortedSetDocValues(field); SortedSetDocValues dv = in.getSortedSetDocValues(field);
if (dv == null) { if (dv == null) {
SortedDocValues sorted = in.getSortedDocValues(field); SortedDocValues sorted = in.getSortedDocValues(field);
if (sorted == null) { if (sorted == null) {
return EMPTY_SORTED_SET; return emptySortedSet();
} }
return singleton(sorted); return singleton(sorted);
} }

View File

@ -74,7 +74,7 @@ public class MultiDocValues {
AtomicReaderContext context = leaves.get(i); AtomicReaderContext context = leaves.get(i);
NumericDocValues v = context.reader().getNormValues(field); NumericDocValues v = context.reader().getNormValues(field);
if (v == null) { if (v == null) {
v = DocValues.EMPTY_NUMERIC; v = DocValues.emptyNumeric();
} else { } else {
anyReal = true; anyReal = true;
} }
@ -116,7 +116,7 @@ public class MultiDocValues {
AtomicReaderContext context = leaves.get(i); AtomicReaderContext context = leaves.get(i);
NumericDocValues v = context.reader().getNumericDocValues(field); NumericDocValues v = context.reader().getNumericDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.EMPTY_NUMERIC; v = DocValues.emptyNumeric();
} else { } else {
anyReal = true; anyReal = true;
} }
@ -206,7 +206,7 @@ public class MultiDocValues {
AtomicReaderContext context = leaves.get(i); AtomicReaderContext context = leaves.get(i);
BinaryDocValues v = context.reader().getBinaryDocValues(field); BinaryDocValues v = context.reader().getBinaryDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.EMPTY_BINARY; v = DocValues.emptyBinary();
} else { } else {
anyReal = true; anyReal = true;
} }
@ -220,9 +220,9 @@ public class MultiDocValues {
} else { } else {
return new BinaryDocValues() { return new BinaryDocValues() {
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
int subIndex = ReaderUtil.subIndex(docID, starts); int subIndex = ReaderUtil.subIndex(docID, starts);
values[subIndex].get(docID - starts[subIndex], result); return values[subIndex].get(docID - starts[subIndex]);
} }
}; };
} }
@ -251,7 +251,7 @@ public class MultiDocValues {
AtomicReaderContext context = leaves.get(i); AtomicReaderContext context = leaves.get(i);
SortedDocValues v = context.reader().getSortedDocValues(field); SortedDocValues v = context.reader().getSortedDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.EMPTY_SORTED; v = DocValues.emptySorted();
} else { } else {
anyReal = true; anyReal = true;
} }
@ -295,7 +295,7 @@ public class MultiDocValues {
AtomicReaderContext context = leaves.get(i); AtomicReaderContext context = leaves.get(i);
SortedSetDocValues v = context.reader().getSortedSetDocValues(field); SortedSetDocValues v = context.reader().getSortedSetDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.EMPTY_SORTED_SET; v = DocValues.emptySortedSet();
} else { } else {
anyReal = true; anyReal = true;
} }
@ -453,10 +453,10 @@ public class MultiDocValues {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
int subIndex = mapping.getFirstSegmentNumber(ord); int subIndex = mapping.getFirstSegmentNumber(ord);
int segmentOrd = (int) mapping.getFirstSegmentOrd(ord); int segmentOrd = (int) mapping.getFirstSegmentOrd(ord);
values[subIndex].lookupOrd(segmentOrd, result); return values[subIndex].lookupOrd(segmentOrd);
} }
@Override @Override
@ -504,10 +504,10 @@ public class MultiDocValues {
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
int subIndex = mapping.getFirstSegmentNumber(ord); int subIndex = mapping.getFirstSegmentNumber(ord);
long segmentOrd = mapping.getFirstSegmentOrd(ord); long segmentOrd = mapping.getFirstSegmentOrd(ord);
values[subIndex].lookupOrd(segmentOrd, result); return values[subIndex].lookupOrd(segmentOrd);
} }
@Override @Override

View File

@ -400,7 +400,6 @@ class ReadersAndUpdates {
int curDoc = -1; int curDoc = -1;
int updateDoc = updatesIter.nextDoc(); int updateDoc = updatesIter.nextDoc();
BytesRef scratch = new BytesRef();
@Override @Override
public boolean hasNext() { public boolean hasNext() {
@ -421,8 +420,7 @@ class ReadersAndUpdates {
assert curDoc < updateDoc; assert curDoc < updateDoc;
if (currentValues != null && docsWithField.get(curDoc)) { if (currentValues != null && docsWithField.get(curDoc)) {
// only read the current value if the document had a value before // only read the current value if the document had a value before
currentValues.get(curDoc, scratch); return currentValues.get(curDoc);
return scratch;
} else { } else {
return null; return null;
} }

View File

@ -168,7 +168,7 @@ final class SegmentMerger {
NumericDocValues values = reader.getNumericDocValues(field.name); NumericDocValues values = reader.getNumericDocValues(field.name);
Bits bits = reader.getDocsWithField(field.name); Bits bits = reader.getDocsWithField(field.name);
if (values == null) { if (values == null) {
values = DocValues.EMPTY_NUMERIC; values = DocValues.emptyNumeric();
bits = new Bits.MatchNoBits(reader.maxDoc()); bits = new Bits.MatchNoBits(reader.maxDoc());
} }
toMerge.add(values); toMerge.add(values);
@ -182,7 +182,7 @@ final class SegmentMerger {
BinaryDocValues values = reader.getBinaryDocValues(field.name); BinaryDocValues values = reader.getBinaryDocValues(field.name);
Bits bits = reader.getDocsWithField(field.name); Bits bits = reader.getDocsWithField(field.name);
if (values == null) { if (values == null) {
values = DocValues.EMPTY_BINARY; values = DocValues.emptyBinary();
bits = new Bits.MatchNoBits(reader.maxDoc()); bits = new Bits.MatchNoBits(reader.maxDoc());
} }
toMerge.add(values); toMerge.add(values);
@ -194,7 +194,7 @@ final class SegmentMerger {
for (AtomicReader reader : mergeState.readers) { for (AtomicReader reader : mergeState.readers) {
SortedDocValues values = reader.getSortedDocValues(field.name); SortedDocValues values = reader.getSortedDocValues(field.name);
if (values == null) { if (values == null) {
values = DocValues.EMPTY_SORTED; values = DocValues.emptySorted();
} }
toMerge.add(values); toMerge.add(values);
} }
@ -204,7 +204,7 @@ final class SegmentMerger {
for (AtomicReader reader : mergeState.readers) { for (AtomicReader reader : mergeState.readers) {
SortedSetDocValues values = reader.getSortedSetDocValues(field.name); SortedSetDocValues values = reader.getSortedSetDocValues(field.name);
if (values == null) { if (values == null) {
values = DocValues.EMPTY_SORTED_SET; values = DocValues.emptySortedSet();
} }
toMerge.add(values); toMerge.add(values);
} }
@ -235,7 +235,7 @@ final class SegmentMerger {
for (AtomicReader reader : mergeState.readers) { for (AtomicReader reader : mergeState.readers) {
NumericDocValues norms = reader.getNormValues(field.name); NumericDocValues norms = reader.getNormValues(field.name);
if (norms == null) { if (norms == null) {
norms = DocValues.EMPTY_NUMERIC; norms = DocValues.emptyNumeric();
} }
toMerge.add(norms); toMerge.add(norms);
docsWithField.add(new Bits.MatchAllBits(reader.maxDoc())); docsWithField.add(new Bits.MatchAllBits(reader.maxDoc()));

View File

@ -58,9 +58,9 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
// cast is ok: single-valued cannot exceed Integer.MAX_VALUE // cast is ok: single-valued cannot exceed Integer.MAX_VALUE
in.lookupOrd((int)ord, result); return in.lookupOrd((int) ord);
} }
@Override @Override

View File

@ -140,7 +140,7 @@ public final class SlowCompositeReaderWrapper extends AtomicReader {
AtomicReaderContext context = in.leaves().get(i); AtomicReaderContext context = in.leaves().get(i);
SortedDocValues v = context.reader().getSortedDocValues(field); SortedDocValues v = context.reader().getSortedDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.EMPTY_SORTED; v = DocValues.emptySorted();
} }
values[i] = v; values[i] = v;
starts[i] = context.docBase; starts[i] = context.docBase;
@ -179,7 +179,7 @@ public final class SlowCompositeReaderWrapper extends AtomicReader {
AtomicReaderContext context = in.leaves().get(i); AtomicReaderContext context = in.leaves().get(i);
SortedSetDocValues v = context.reader().getSortedSetDocValues(field); SortedSetDocValues v = context.reader().getSortedSetDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.EMPTY_SORTED_SET; v = DocValues.emptySortedSet();
} }
values[i] = v; values[i] = v;
starts[i] = context.docBase; starts[i] = context.docBase;

View File

@ -28,7 +28,7 @@ import org.apache.lucene.util.BytesRef;
* are dense and in increasing sorted order. * are dense and in increasing sorted order.
*/ */
public abstract class SortedDocValues extends BinaryDocValues { public abstract class SortedDocValues extends BinaryDocValues {
/** Sole constructor. (For invocation by subclass /** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */ * constructors, typically implicit.) */
protected SortedDocValues() {} protected SortedDocValues() {}
@ -42,12 +42,14 @@ public abstract class SortedDocValues extends BinaryDocValues {
*/ */
public abstract int getOrd(int docID); public abstract int getOrd(int docID);
/** Retrieves the value for the specified ordinal. /** Retrieves the value for the specified ordinal. The returned
* {@link BytesRef} may be re-used across calls to {@link #lookupOrd(int)}
* so make sure to {@link BytesRef#deepCopyOf(BytesRef) copy it} if you want
* to keep it around.
* @param ord ordinal to lookup (must be &gt;= 0 and &lt {@link #getValueCount()}) * @param ord ordinal to lookup (must be &gt;= 0 and &lt {@link #getValueCount()})
* @param result will be populated with the ordinal's value
* @see #getOrd(int) * @see #getOrd(int)
*/ */
public abstract void lookupOrd(int ord, BytesRef result); public abstract BytesRef lookupOrd(int ord);
/** /**
* Returns the number of unique values. * Returns the number of unique values.
@ -56,15 +58,15 @@ public abstract class SortedDocValues extends BinaryDocValues {
*/ */
public abstract int getValueCount(); public abstract int getValueCount();
private final BytesRef empty = new BytesRef();
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
int ord = getOrd(docID); int ord = getOrd(docID);
if (ord == -1) { if (ord == -1) {
result.bytes = BytesRef.EMPTY_BYTES; return empty;
result.length = 0;
result.offset = 0;
} else { } else {
lookupOrd(ord, result); return lookupOrd(ord);
} }
} }
@ -75,14 +77,13 @@ public abstract class SortedDocValues extends BinaryDocValues {
* @param key Key to look up * @param key Key to look up
**/ **/
public int lookupTerm(BytesRef key) { public int lookupTerm(BytesRef key) {
BytesRef spare = new BytesRef();
int low = 0; int low = 0;
int high = getValueCount()-1; int high = getValueCount()-1;
while (low <= high) { while (low <= high) {
int mid = (low + high) >>> 1; int mid = (low + high) >>> 1;
lookupOrd(mid, spare); final BytesRef term = lookupOrd(mid);
int cmp = spare.compareTo(key); int cmp = term.compareTo(key);
if (cmp < 0) { if (cmp < 0) {
low = mid + 1; low = mid + 1;

View File

@ -28,11 +28,13 @@ import org.apache.lucene.util.BytesRef;
class SortedDocValuesTermsEnum extends TermsEnum { class SortedDocValuesTermsEnum extends TermsEnum {
private final SortedDocValues values; private final SortedDocValues values;
private int currentOrd = -1; private int currentOrd = -1;
private final BytesRef term = new BytesRef(); private BytesRef term;
private final BytesRef scratch;
/** Creates a new TermsEnum over the provided values */ /** Creates a new TermsEnum over the provided values */
public SortedDocValuesTermsEnum(SortedDocValues values) { public SortedDocValuesTermsEnum(SortedDocValues values) {
this.values = values; this.values = values;
scratch = new BytesRef();
} }
@Override @Override
@ -40,12 +42,8 @@ class SortedDocValuesTermsEnum extends TermsEnum {
int ord = values.lookupTerm(text); int ord = values.lookupTerm(text);
if (ord >= 0) { if (ord >= 0) {
currentOrd = ord; currentOrd = ord;
term.offset = 0; scratch.copyBytes(text);
// TODO: is there a cleaner way? term = scratch;
// term.bytes may be pointing to codec-private byte[]
// storage, so we must force new byte[] allocation:
term.bytes = new byte[text.length];
term.copyBytes(text);
return SeekStatus.FOUND; return SeekStatus.FOUND;
} else { } else {
currentOrd = -ord-1; currentOrd = -ord-1;
@ -53,7 +51,7 @@ class SortedDocValuesTermsEnum extends TermsEnum {
return SeekStatus.END; return SeekStatus.END;
} else { } else {
// TODO: hmm can we avoid this "extra" lookup?: // TODO: hmm can we avoid this "extra" lookup?:
values.lookupOrd(currentOrd, term); term = values.lookupOrd(currentOrd);
return SeekStatus.NOT_FOUND; return SeekStatus.NOT_FOUND;
} }
} }
@ -63,13 +61,9 @@ class SortedDocValuesTermsEnum extends TermsEnum {
public boolean seekExact(BytesRef text) throws IOException { public boolean seekExact(BytesRef text) throws IOException {
int ord = values.lookupTerm(text); int ord = values.lookupTerm(text);
if (ord >= 0) { if (ord >= 0) {
term.offset = 0;
// TODO: is there a cleaner way?
// term.bytes may be pointing to codec-private byte[]
// storage, so we must force new byte[] allocation:
term.bytes = new byte[text.length];
term.copyBytes(text);
currentOrd = ord; currentOrd = ord;
scratch.copyBytes(text);
term = scratch;
return true; return true;
} else { } else {
return false; return false;
@ -80,7 +74,7 @@ class SortedDocValuesTermsEnum extends TermsEnum {
public void seekExact(long ord) throws IOException { public void seekExact(long ord) throws IOException {
assert ord >= 0 && ord < values.getValueCount(); assert ord >= 0 && ord < values.getValueCount();
currentOrd = (int) ord; currentOrd = (int) ord;
values.lookupOrd(currentOrd, term); term = values.lookupOrd(currentOrd);
} }
@Override @Override
@ -89,7 +83,7 @@ class SortedDocValuesTermsEnum extends TermsEnum {
if (currentOrd >= values.getValueCount()) { if (currentOrd >= values.getValueCount()) {
return null; return null;
} }
values.lookupOrd(currentOrd, term); term = values.lookupOrd(currentOrd);
return term; return term;
} }

View File

@ -53,12 +53,14 @@ public abstract class SortedSetDocValues {
*/ */
public abstract void setDocument(int docID); public abstract void setDocument(int docID);
/** Retrieves the value for the specified ordinal. /** Retrieves the value for the specified ordinal. The returned
* {@link BytesRef} may be re-used across calls to lookupOrd so make sure to
* {@link BytesRef#deepCopyOf(BytesRef) copy it} if you want to keep it
* around.
* @param ord ordinal to lookup * @param ord ordinal to lookup
* @param result will be populated with the ordinal's value
* @see #nextOrd * @see #nextOrd
*/ */
public abstract void lookupOrd(long ord, BytesRef result); public abstract BytesRef lookupOrd(long ord);
/** /**
* Returns the number of unique values. * Returns the number of unique values.
@ -74,14 +76,13 @@ public abstract class SortedSetDocValues {
* @param key Key to look up * @param key Key to look up
**/ **/
public long lookupTerm(BytesRef key) { public long lookupTerm(BytesRef key) {
BytesRef spare = new BytesRef();
long low = 0; long low = 0;
long high = getValueCount()-1; long high = getValueCount()-1;
while (low <= high) { while (low <= high) {
long mid = (low + high) >>> 1; long mid = (low + high) >>> 1;
lookupOrd(mid, spare); final BytesRef term = lookupOrd(mid);
int cmp = spare.compareTo(key); int cmp = term.compareTo(key);
if (cmp < 0) { if (cmp < 0) {
low = mid + 1; low = mid + 1;

View File

@ -28,11 +28,13 @@ import org.apache.lucene.util.BytesRef;
class SortedSetDocValuesTermsEnum extends TermsEnum { class SortedSetDocValuesTermsEnum extends TermsEnum {
private final SortedSetDocValues values; private final SortedSetDocValues values;
private long currentOrd = -1; private long currentOrd = -1;
private final BytesRef term = new BytesRef(); private BytesRef term;
private final BytesRef scratch;
/** Creates a new TermsEnum over the provided values */ /** Creates a new TermsEnum over the provided values */
public SortedSetDocValuesTermsEnum(SortedSetDocValues values) { public SortedSetDocValuesTermsEnum(SortedSetDocValues values) {
this.values = values; this.values = values;
scratch = new BytesRef();
} }
@Override @Override
@ -40,12 +42,8 @@ class SortedSetDocValuesTermsEnum extends TermsEnum {
long ord = values.lookupTerm(text); long ord = values.lookupTerm(text);
if (ord >= 0) { if (ord >= 0) {
currentOrd = ord; currentOrd = ord;
term.offset = 0; scratch.copyBytes(text);
// TODO: is there a cleaner way? term = scratch;
// term.bytes may be pointing to codec-private byte[]
// storage, so we must force new byte[] allocation:
term.bytes = new byte[text.length];
term.copyBytes(text);
return SeekStatus.FOUND; return SeekStatus.FOUND;
} else { } else {
currentOrd = -ord-1; currentOrd = -ord-1;
@ -53,7 +51,7 @@ class SortedSetDocValuesTermsEnum extends TermsEnum {
return SeekStatus.END; return SeekStatus.END;
} else { } else {
// TODO: hmm can we avoid this "extra" lookup?: // TODO: hmm can we avoid this "extra" lookup?:
values.lookupOrd(currentOrd, term); term = values.lookupOrd(currentOrd);
return SeekStatus.NOT_FOUND; return SeekStatus.NOT_FOUND;
} }
} }
@ -63,13 +61,9 @@ class SortedSetDocValuesTermsEnum extends TermsEnum {
public boolean seekExact(BytesRef text) throws IOException { public boolean seekExact(BytesRef text) throws IOException {
long ord = values.lookupTerm(text); long ord = values.lookupTerm(text);
if (ord >= 0) { if (ord >= 0) {
term.offset = 0;
// TODO: is there a cleaner way?
// term.bytes may be pointing to codec-private byte[]
// storage, so we must force new byte[] allocation:
term.bytes = new byte[text.length];
term.copyBytes(text);
currentOrd = ord; currentOrd = ord;
scratch.copyBytes(text);
term = scratch;
return true; return true;
} else { } else {
return false; return false;
@ -80,7 +74,7 @@ class SortedSetDocValuesTermsEnum extends TermsEnum {
public void seekExact(long ord) throws IOException { public void seekExact(long ord) throws IOException {
assert ord >= 0 && ord < values.getValueCount(); assert ord >= 0 && ord < values.getValueCount();
currentOrd = (int) ord; currentOrd = (int) ord;
values.lookupOrd(currentOrd, term); term = values.lookupOrd(currentOrd);
} }
@Override @Override
@ -89,7 +83,7 @@ class SortedSetDocValuesTermsEnum extends TermsEnum {
if (currentOrd >= values.getValueCount()) { if (currentOrd >= values.getValueCount()) {
return null; return null;
} }
values.lookupOrd(currentOrd, term); term = values.lookupOrd(currentOrd);
return term; return term;
} }

View File

@ -831,7 +831,7 @@ public abstract class FieldComparator<T> {
if (values[slot] == null) { if (values[slot] == null) {
values[slot] = new BytesRef(); values[slot] = new BytesRef();
} }
termsIndex.lookupOrd(ord, values[slot]); values[slot].copyBytes(termsIndex.lookupOrd(ord));
} }
ords[slot] = ord; ords[slot] = ord;
readerGen[slot] = currentReaderGen; readerGen[slot] = currentReaderGen;
@ -960,21 +960,25 @@ public abstract class FieldComparator<T> {
// sentinels, just used internally in this comparator // sentinels, just used internally in this comparator
private static final byte[] MISSING_BYTES = new byte[0]; private static final byte[] MISSING_BYTES = new byte[0];
private static final byte[] NON_MISSING_BYTES = new byte[0]; // TODO: this is seriously not good, we should nuke this comparator, or
// instead we should represent missing as null, or use missingValue from the user...
private BytesRef[] values; // but it was always this way...
private final BytesRef MISSING_BYTESREF = new BytesRef(MISSING_BYTES);
private final BytesRef[] values;
private final BytesRef[] tempBRs;
private BinaryDocValues docTerms; private BinaryDocValues docTerms;
private Bits docsWithField; private Bits docsWithField;
private final String field; private final String field;
private BytesRef bottom; private BytesRef bottom;
private BytesRef topValue; private BytesRef topValue;
private final BytesRef tempBR = new BytesRef();
// TODO: add missing first/last support here? // TODO: add missing first/last support here?
/** Sole constructor. */ /** Sole constructor. */
TermValComparator(int numHits, String field) { TermValComparator(int numHits, String field) {
values = new BytesRef[numHits]; values = new BytesRef[numHits];
tempBRs = new BytesRef[numHits];
this.field = field; this.field = field;
} }
@ -982,32 +986,27 @@ public abstract class FieldComparator<T> {
public int compare(int slot1, int slot2) { public int compare(int slot1, int slot2) {
final BytesRef val1 = values[slot1]; final BytesRef val1 = values[slot1];
final BytesRef val2 = values[slot2]; final BytesRef val2 = values[slot2];
if (val1.bytes == MISSING_BYTES) { return compareValues(val1, val2);
if (val2.bytes == MISSING_BYTES) {
return 0;
}
return -1;
} else if (val2.bytes == MISSING_BYTES) {
return 1;
}
return val1.compareTo(val2);
} }
@Override @Override
public int compareBottom(int doc) { public int compareBottom(int doc) {
docTerms.get(doc, tempBR); final BytesRef comparableBytes = getComparableBytes(doc, docTerms.get(doc));
setMissingBytes(doc, tempBR); return compareValues(bottom, comparableBytes);
return compareValues(bottom, tempBR);
} }
@Override @Override
public void copy(int slot, int doc) { public void copy(int slot, int doc) {
if (values[slot] == null) { final BytesRef comparableBytes = getComparableBytes(doc, docTerms.get(doc));
values[slot] = new BytesRef(); if (comparableBytes == MISSING_BYTESREF) {
values[slot] = MISSING_BYTESREF;
} else {
if (tempBRs[slot] == null) {
tempBRs[slot] = new BytesRef();
}
values[slot] = tempBRs[slot];
values[slot].copyBytes(comparableBytes);
} }
docTerms.get(doc, values[slot]);
setMissingBytes(doc, values[slot]);
} }
@Override @Override
@ -1027,6 +1026,9 @@ public abstract class FieldComparator<T> {
if (value == null) { if (value == null) {
throw new IllegalArgumentException("value cannot be null"); throw new IllegalArgumentException("value cannot be null");
} }
if (value.bytes == MISSING_BYTES) {
value = MISSING_BYTESREF;
}
topValue = value; topValue = value;
} }
@ -1038,12 +1040,12 @@ public abstract class FieldComparator<T> {
@Override @Override
public int compareValues(BytesRef val1, BytesRef val2) { public int compareValues(BytesRef val1, BytesRef val2) {
// missing always sorts first: // missing always sorts first:
if (val1.bytes == MISSING_BYTES) { if (val1 == MISSING_BYTESREF) {
if (val2.bytes == MISSING_BYTES) { if (val2 == MISSING_BYTESREF) {
return 0; return 0;
} }
return -1; return -1;
} else if (val2.bytes == MISSING_BYTES) { } else if (val2 == MISSING_BYTESREF) {
return 1; return 1;
} }
return val1.compareTo(val2); return val1.compareTo(val2);
@ -1051,20 +1053,19 @@ public abstract class FieldComparator<T> {
@Override @Override
public int compareTop(int doc) { public int compareTop(int doc) {
docTerms.get(doc, tempBR); final BytesRef comparableBytes = getComparableBytes(doc, docTerms.get(doc));
setMissingBytes(doc, tempBR); return compareValues(topValue, comparableBytes);
return compareValues(topValue, tempBR);
} }
private void setMissingBytes(int doc, BytesRef br) { /**
if (br.length == 0) { * Given a document and a term, return the term itself if it exists or
br.offset = 0; * {@link #MISSING_BYTESREF} otherwise.
if (docsWithField.get(doc) == false) { */
br.bytes = MISSING_BYTES; private BytesRef getComparableBytes(int doc, BytesRef term) {
} else { if (term.length == 0 && docsWithField.get(doc) == false) {
br.bytes = NON_MISSING_BYTES; return MISSING_BYTESREF;
}
} }
return term;
} }
} }
} }

View File

@ -106,8 +106,8 @@ public class SortedSetSelector {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
in.lookupOrd(ord, result); return in.lookupOrd(ord);
} }
@Override @Override
@ -141,8 +141,8 @@ public class SortedSetSelector {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
in.lookupOrd(ord, result); return in.lookupOrd(ord);
} }
@Override @Override
@ -176,8 +176,8 @@ public class SortedSetSelector {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
in.lookupOrd(ord, result); return in.lookupOrd(ord);
} }
@Override @Override
@ -211,8 +211,8 @@ public class SortedSetSelector {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
in.lookupOrd(ord, result); return in.lookupOrd(ord);
} }
@Override @Override

View File

@ -110,7 +110,6 @@ public class TestPerFieldDocValuesFormat extends BaseDocValuesFormatTestCase {
Query query = new TermQuery(new Term("fieldname", "text")); Query query = new TermQuery(new Term("fieldname", "text"));
TopDocs hits = isearcher.search(query, null, 1); TopDocs hits = isearcher.search(query, null, 1);
assertEquals(1, hits.totalHits); assertEquals(1, hits.totalHits);
BytesRef scratch = new BytesRef();
// Iterate through the results: // Iterate through the results:
for (int i = 0; i < hits.scoreDocs.length; i++) { for (int i = 0; i < hits.scoreDocs.length; i++) {
StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc); StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
@ -119,8 +118,8 @@ public class TestPerFieldDocValuesFormat extends BaseDocValuesFormatTestCase {
NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv1"); NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv1");
assertEquals(5, dv.get(hits.scoreDocs[i].doc)); assertEquals(5, dv.get(hits.scoreDocs[i].doc));
BinaryDocValues dv2 = ireader.leaves().get(0).reader().getBinaryDocValues("dv2"); BinaryDocValues dv2 = ireader.leaves().get(0).reader().getBinaryDocValues("dv2");
dv2.get(hits.scoreDocs[i].doc, scratch); final BytesRef term = dv2.get(hits.scoreDocs[i].doc);
assertEquals(new BytesRef("hello world"), scratch); assertEquals(new BytesRef("hello world"), term);
} }
ireader.close(); ireader.close();

View File

@ -80,15 +80,14 @@ public class Test2BBinaryDocValues extends LuceneTestCase {
int expectedValue = 0; int expectedValue = 0;
for (AtomicReaderContext context : r.leaves()) { for (AtomicReaderContext context : r.leaves()) {
AtomicReader reader = context.reader(); AtomicReader reader = context.reader();
BytesRef scratch = new BytesRef();
BinaryDocValues dv = reader.getBinaryDocValues("dv"); BinaryDocValues dv = reader.getBinaryDocValues("dv");
for (int i = 0; i < reader.maxDoc(); i++) { for (int i = 0; i < reader.maxDoc(); i++) {
bytes[0] = (byte)(expectedValue >> 24); bytes[0] = (byte)(expectedValue >> 24);
bytes[1] = (byte)(expectedValue >> 16); bytes[1] = (byte)(expectedValue >> 16);
bytes[2] = (byte)(expectedValue >> 8); bytes[2] = (byte)(expectedValue >> 8);
bytes[3] = (byte) expectedValue; bytes[3] = (byte) expectedValue;
dv.get(i, scratch); final BytesRef term = dv.get(i);
assertEquals(data, scratch); assertEquals(data, term);
expectedValue++; expectedValue++;
} }
} }
@ -141,11 +140,10 @@ public class Test2BBinaryDocValues extends LuceneTestCase {
ByteArrayDataInput input = new ByteArrayDataInput(); ByteArrayDataInput input = new ByteArrayDataInput();
for (AtomicReaderContext context : r.leaves()) { for (AtomicReaderContext context : r.leaves()) {
AtomicReader reader = context.reader(); AtomicReader reader = context.reader();
BytesRef scratch = new BytesRef(bytes);
BinaryDocValues dv = reader.getBinaryDocValues("dv"); BinaryDocValues dv = reader.getBinaryDocValues("dv");
for (int i = 0; i < reader.maxDoc(); i++) { for (int i = 0; i < reader.maxDoc(); i++) {
dv.get(i, scratch); final BytesRef term = dv.get(i);
input.reset(scratch.bytes, scratch.offset, scratch.length); input.reset(term.bytes, term.offset, term.length);
assertEquals(expectedValue % 65535, input.readVInt()); assertEquals(expectedValue % 65535, input.readVInt());
assertTrue(input.eof()); assertTrue(input.eof());
expectedValue++; expectedValue++;

View File

@ -78,13 +78,12 @@ public class Test2BSortedDocValues extends LuceneTestCase {
int expectedValue = 0; int expectedValue = 0;
for (AtomicReaderContext context : r.leaves()) { for (AtomicReaderContext context : r.leaves()) {
AtomicReader reader = context.reader(); AtomicReader reader = context.reader();
BytesRef scratch = new BytesRef();
BinaryDocValues dv = reader.getSortedDocValues("dv"); BinaryDocValues dv = reader.getSortedDocValues("dv");
for (int i = 0; i < reader.maxDoc(); i++) { for (int i = 0; i < reader.maxDoc(); i++) {
bytes[0] = (byte)(expectedValue >> 8); bytes[0] = (byte)(expectedValue >> 8);
bytes[1] = (byte) expectedValue; bytes[1] = (byte) expectedValue;
dv.get(i, scratch); final BytesRef term = dv.get(i);
assertEquals(data, scratch); assertEquals(data, term);
expectedValue++; expectedValue++;
} }
} }
@ -144,8 +143,8 @@ public class Test2BSortedDocValues extends LuceneTestCase {
bytes[2] = (byte) (counter >> 8); bytes[2] = (byte) (counter >> 8);
bytes[3] = (byte) counter; bytes[3] = (byte) counter;
counter++; counter++;
dv.get(i, scratch); final BytesRef term = dv.get(i);
assertEquals(data, scratch); assertEquals(data, term);
} }
} }

View File

@ -524,20 +524,19 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
(byte)(id >>> 24), (byte)(id >>> 16),(byte)(id >>> 8),(byte)id (byte)(id >>> 24), (byte)(id >>> 16),(byte)(id >>> 8),(byte)id
}; };
BytesRef expectedRef = new BytesRef(bytes); BytesRef expectedRef = new BytesRef(bytes);
BytesRef scratch = new BytesRef();
dvBytesDerefFixed.get(i, scratch); BytesRef term = dvBytesDerefFixed.get(i);
assertEquals(expectedRef, scratch); assertEquals(expectedRef, term);
dvBytesDerefVar.get(i, scratch); term = dvBytesDerefVar.get(i);
assertEquals(expectedRef, scratch); assertEquals(expectedRef, term);
dvBytesSortedFixed.get(i, scratch); term = dvBytesSortedFixed.get(i);
assertEquals(expectedRef, scratch); assertEquals(expectedRef, term);
dvBytesSortedVar.get(i, scratch); term = dvBytesSortedVar.get(i);
assertEquals(expectedRef, scratch); assertEquals(expectedRef, term);
dvBytesStraightFixed.get(i, scratch); term = dvBytesStraightFixed.get(i);
assertEquals(expectedRef, scratch); assertEquals(expectedRef, term);
dvBytesStraightVar.get(i, scratch); term = dvBytesStraightVar.get(i);
assertEquals(expectedRef, scratch); assertEquals(expectedRef, term);
assertEquals((double)id, Double.longBitsToDouble(dvDouble.get(i)), 0D); assertEquals((double)id, Double.longBitsToDouble(dvDouble.get(i)), 0D);
assertEquals((float)id, Float.intBitsToFloat((int)dvFloat.get(i)), 0F); assertEquals((float)id, Float.intBitsToFloat((int)dvFloat.get(i)), 0F);
@ -549,8 +548,8 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
dvSortedSet.setDocument(i); dvSortedSet.setDocument(i);
long ord = dvSortedSet.nextOrd(); long ord = dvSortedSet.nextOrd();
assertEquals(SortedSetDocValues.NO_MORE_ORDS, dvSortedSet.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, dvSortedSet.nextOrd());
dvSortedSet.lookupOrd(ord, scratch); term = dvSortedSet.lookupOrd(ord);
assertEquals(expectedRef, scratch); assertEquals(expectedRef, term);
} }
} }
} }
@ -1055,9 +1054,8 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
private void assertBinaryDocValues(AtomicReader r, String f, String cf) throws IOException { private void assertBinaryDocValues(AtomicReader r, String f, String cf) throws IOException {
BinaryDocValues bdvf = r.getBinaryDocValues(f); BinaryDocValues bdvf = r.getBinaryDocValues(f);
BinaryDocValues bdvcf = r.getBinaryDocValues(cf); BinaryDocValues bdvcf = r.getBinaryDocValues(cf);
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(TestBinaryDocValuesUpdates.getValue(bdvcf, i, scratch ), TestBinaryDocValuesUpdates.getValue(bdvf, i, scratch)*2); assertEquals(TestBinaryDocValuesUpdates.getValue(bdvcf, i), TestBinaryDocValuesUpdates.getValue(bdvf, i)*2);
} }
} }

View File

@ -59,13 +59,13 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks;
@SuppressWarnings("resource") @SuppressWarnings("resource")
public class TestBinaryDocValuesUpdates extends LuceneTestCase { public class TestBinaryDocValuesUpdates extends LuceneTestCase {
static long getValue(BinaryDocValues bdv, int idx, BytesRef scratch) { static long getValue(BinaryDocValues bdv, int idx) {
bdv.get(idx, scratch); BytesRef term = bdv.get(idx);
idx = scratch.offset; idx = term.offset;
byte b = scratch.bytes[idx++]; byte b = term.bytes[idx++];
long value = b & 0x7FL; long value = b & 0x7FL;
for (int shift = 7; (b & 0x80L) != 0; shift += 7) { for (int shift = 7; (b & 0x80L) != 0; shift += 7) {
b = scratch.bytes[idx++]; b = term.bytes[idx++];
value |= (b & 0x7FL) << shift; value |= (b & 0x7FL) << shift;
} }
return value; return value;
@ -139,9 +139,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
assertEquals(1, reader.leaves().size()); assertEquals(1, reader.leaves().size());
AtomicReader r = reader.leaves().get(0).reader(); AtomicReader r = reader.leaves().get(0).reader();
BinaryDocValues bdv = r.getBinaryDocValues("val"); BinaryDocValues bdv = r.getBinaryDocValues("val");
BytesRef scratch = new BytesRef(); assertEquals(2, getValue(bdv, 0));
assertEquals(2, getValue(bdv, 0, scratch)); assertEquals(2, getValue(bdv, 1));
assertEquals(2, getValue(bdv, 1, scratch));
reader.close(); reader.close();
dir.close(); dir.close();
@ -179,14 +178,13 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
} }
BytesRef scratch = new BytesRef();
for (AtomicReaderContext context : reader.leaves()) { for (AtomicReaderContext context : reader.leaves()) {
AtomicReader r = context.reader(); AtomicReader r = context.reader();
BinaryDocValues bdv = r.getBinaryDocValues("val"); BinaryDocValues bdv = r.getBinaryDocValues("val");
assertNotNull(bdv); assertNotNull(bdv);
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
long expected = expectedValues[i + context.docBase]; long expected = expectedValues[i + context.docBase];
long actual = getValue(bdv, i, scratch); long actual = getValue(bdv, i);
assertEquals(expected, actual); assertEquals(expected, actual);
} }
} }
@ -222,11 +220,10 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
assertNotNull(reader2); assertNotNull(reader2);
assertTrue(reader1 != reader2); assertTrue(reader1 != reader2);
BytesRef scratch = new BytesRef();
BinaryDocValues bdv1 = reader1.leaves().get(0).reader().getBinaryDocValues("val"); BinaryDocValues bdv1 = reader1.leaves().get(0).reader().getBinaryDocValues("val");
BinaryDocValues bdv2 = reader2.leaves().get(0).reader().getBinaryDocValues("val"); BinaryDocValues bdv2 = reader2.leaves().get(0).reader().getBinaryDocValues("val");
assertEquals(1, getValue(bdv1, 0, scratch)); assertEquals(1, getValue(bdv1, 0));
assertEquals(10, getValue(bdv2, 0, scratch)); assertEquals(10, getValue(bdv2, 0));
writer.shutdown(); writer.shutdown();
IOUtils.close(reader1, reader2, dir); IOUtils.close(reader1, reader2, dir);
@ -274,9 +271,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
long[] expectedValues = new long[] { 1, 2, 3, 17, 5, 17}; long[] expectedValues = new long[] { 1, 2, 3, 17, 5, 17};
BinaryDocValues bdv = slow.getBinaryDocValues("val"); BinaryDocValues bdv = slow.getBinaryDocValues("val");
BytesRef scratch = new BytesRef();
for (int i = 0; i < expectedValues.length; i++) { for (int i = 0; i < expectedValues.length; i++) {
assertEquals(expectedValues[i], getValue(bdv, i, scratch)); assertEquals(expectedValues[i], getValue(bdv, i));
} }
reader.close(); reader.close();
@ -311,7 +307,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
AtomicReader r = reader.leaves().get(0).reader(); AtomicReader r = reader.leaves().get(0).reader();
assertFalse(r.getLiveDocs().get(0)); assertFalse(r.getLiveDocs().get(0));
assertEquals(17, getValue(r.getBinaryDocValues("val"), 1, new BytesRef())); assertEquals(17, getValue(r.getBinaryDocValues("val"), 1));
reader.close(); reader.close();
dir.close(); dir.close();
@ -345,7 +341,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
AtomicReader r = reader.leaves().get(0).reader(); AtomicReader r = reader.leaves().get(0).reader();
assertFalse(r.getLiveDocs().get(0)); assertFalse(r.getLiveDocs().get(0));
assertEquals(1, getValue(r.getBinaryDocValues("val"), 0, new BytesRef())); // deletes are currently applied first assertEquals(1, getValue(r.getBinaryDocValues("val"), 0)); // deletes are currently applied first
reader.close(); reader.close();
dir.close(); dir.close();
@ -379,20 +375,19 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
BinaryDocValues bdv = r.getBinaryDocValues("bdv"); BinaryDocValues bdv = r.getBinaryDocValues("bdv");
SortedDocValues sdv = r.getSortedDocValues("sdv"); SortedDocValues sdv = r.getSortedDocValues("sdv");
SortedSetDocValues ssdv = r.getSortedSetDocValues("ssdv"); SortedSetDocValues ssdv = r.getSortedSetDocValues("ssdv");
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(i, ndv.get(i)); assertEquals(i, ndv.get(i));
assertEquals(17, getValue(bdv, i, scratch)); assertEquals(17, getValue(bdv, i));
sdv.get(i, scratch); BytesRef term = sdv.get(i);
assertEquals(new BytesRef(Integer.toString(i)), scratch); assertEquals(new BytesRef(Integer.toString(i)), term);
ssdv.setDocument(i); ssdv.setDocument(i);
long ord = ssdv.nextOrd(); long ord = ssdv.nextOrd();
ssdv.lookupOrd(ord, scratch); term = ssdv.lookupOrd(ord);
assertEquals(i, Integer.parseInt(scratch.utf8ToString())); assertEquals(i, Integer.parseInt(term.utf8ToString()));
if (i != 0) { if (i != 0) {
ord = ssdv.nextOrd(); ord = ssdv.nextOrd();
ssdv.lookupOrd(ord, scratch); term = ssdv.lookupOrd(ord);
assertEquals(i * 2, Integer.parseInt(scratch.utf8ToString())); assertEquals(i * 2, Integer.parseInt(term.utf8ToString()));
} }
assertEquals(SortedSetDocValues.NO_MORE_ORDS, ssdv.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, ssdv.nextOrd());
} }
@ -425,10 +420,9 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
BinaryDocValues bdv1 = r.getBinaryDocValues("bdv1"); BinaryDocValues bdv1 = r.getBinaryDocValues("bdv1");
BinaryDocValues bdv2 = r.getBinaryDocValues("bdv2"); BinaryDocValues bdv2 = r.getBinaryDocValues("bdv2");
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(17, getValue(bdv1, i, scratch)); assertEquals(17, getValue(bdv1, i));
assertEquals(i, getValue(bdv2, i, scratch)); assertEquals(i, getValue(bdv2, i));
} }
reader.close(); reader.close();
@ -457,9 +451,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
final DirectoryReader reader = DirectoryReader.open(dir); final DirectoryReader reader = DirectoryReader.open(dir);
AtomicReader r = reader.leaves().get(0).reader(); AtomicReader r = reader.leaves().get(0).reader();
BinaryDocValues bdv = r.getBinaryDocValues("bdv"); BinaryDocValues bdv = r.getBinaryDocValues("bdv");
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(17, getValue(bdv, i, scratch)); assertEquals(17, getValue(bdv, i));
} }
reader.close(); reader.close();
@ -525,11 +518,10 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
AtomicReader r = SlowCompositeReaderWrapper.wrap(reader); AtomicReader r = SlowCompositeReaderWrapper.wrap(reader);
BinaryDocValues bdv = r.getBinaryDocValues("bdv"); BinaryDocValues bdv = r.getBinaryDocValues("bdv");
SortedDocValues sdv = r.getSortedDocValues("sorted"); SortedDocValues sdv = r.getSortedDocValues("sorted");
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(17, getValue(bdv, i, scratch)); assertEquals(17, getValue(bdv, i));
sdv.get(i, scratch); BytesRef term = sdv.get(i);
assertEquals(new BytesRef("value"), scratch); assertEquals(new BytesRef("value"), term);
} }
reader.close(); reader.close();
@ -555,9 +547,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
final DirectoryReader reader = DirectoryReader.open(dir); final DirectoryReader reader = DirectoryReader.open(dir);
final AtomicReader r = SlowCompositeReaderWrapper.wrap(reader); final AtomicReader r = SlowCompositeReaderWrapper.wrap(reader);
BinaryDocValues bdv = r.getBinaryDocValues("bdv"); BinaryDocValues bdv = r.getBinaryDocValues("bdv");
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(3, getValue(bdv, i, scratch)); assertEquals(3, getValue(bdv, i));
} }
reader.close(); reader.close();
dir.close(); dir.close();
@ -624,9 +615,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
assertNull("index should have no deletes after forceMerge", r.getLiveDocs()); assertNull("index should have no deletes after forceMerge", r.getLiveDocs());
BinaryDocValues bdv = r.getBinaryDocValues("bdv"); BinaryDocValues bdv = r.getBinaryDocValues("bdv");
assertNotNull(bdv); assertNotNull(bdv);
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(value, getValue(bdv, i, scratch)); assertEquals(value, getValue(bdv, i));
} }
reader.close(); reader.close();
} }
@ -656,9 +646,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
final DirectoryReader reader = DirectoryReader.open(dir); final DirectoryReader reader = DirectoryReader.open(dir);
final AtomicReader r = SlowCompositeReaderWrapper.wrap(reader); final AtomicReader r = SlowCompositeReaderWrapper.wrap(reader);
BinaryDocValues bdv = r.getBinaryDocValues("bdv"); BinaryDocValues bdv = r.getBinaryDocValues("bdv");
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(3, getValue(bdv, i, scratch)); assertEquals(3, getValue(bdv, i));
} }
reader.close(); reader.close();
dir.close(); dir.close();
@ -728,7 +717,6 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
reader = newReader; reader = newReader;
// System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader); // System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader);
assertTrue(reader.numDocs() > 0); // we delete at most one document per round assertTrue(reader.numDocs() > 0); // we delete at most one document per round
BytesRef scratch = new BytesRef();
for (AtomicReaderContext context : reader.leaves()) { for (AtomicReaderContext context : reader.leaves()) {
AtomicReader r = context.reader(); AtomicReader r = context.reader();
// System.out.println(((SegmentReader) r).getSegmentName()); // System.out.println(((SegmentReader) r).getSegmentName());
@ -743,7 +731,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
if (liveDocs == null || liveDocs.get(doc)) { if (liveDocs == null || liveDocs.get(doc)) {
// System.out.println("doc=" + (doc + context.docBase) + " f='" + f + "' vslue=" + getValue(bdv, doc, scratch)); // System.out.println("doc=" + (doc + context.docBase) + " f='" + f + "' vslue=" + getValue(bdv, doc, scratch));
assertTrue(docsWithField.get(doc)); assertTrue(docsWithField.get(doc));
assertEquals("invalid value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], getValue(bdv, doc, scratch)); assertEquals("invalid value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], getValue(bdv, doc));
} }
} }
} }
@ -793,17 +781,16 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
DirectoryReader reader = DirectoryReader.open(dir); DirectoryReader reader = DirectoryReader.open(dir);
BytesRef scratch = new BytesRef();
for (AtomicReaderContext context : reader.leaves()) { for (AtomicReaderContext context : reader.leaves()) {
AtomicReader r = context.reader(); AtomicReader r = context.reader();
BinaryDocValues bdv = r.getBinaryDocValues("bdv"); BinaryDocValues bdv = r.getBinaryDocValues("bdv");
Bits docsWithField = r.getDocsWithField("bdv"); Bits docsWithField = r.getDocsWithField("bdv");
assertNotNull(docsWithField); assertNotNull(docsWithField);
assertTrue(docsWithField.get(0)); assertTrue(docsWithField.get(0));
assertEquals(5L, getValue(bdv, 0, scratch)); assertEquals(5L, getValue(bdv, 0));
assertFalse(docsWithField.get(1)); assertFalse(docsWithField.get(1));
bdv.get(1, scratch); BytesRef term = bdv.get(1);
assertEquals(0, scratch.length); assertEquals(0, term.length);
} }
reader.close(); reader.close();
@ -839,12 +826,11 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
DirectoryReader reader = DirectoryReader.open(dir); DirectoryReader reader = DirectoryReader.open(dir);
BytesRef scratch = new BytesRef();
for (AtomicReaderContext context : reader.leaves()) { for (AtomicReaderContext context : reader.leaves()) {
AtomicReader r = context.reader(); AtomicReader r = context.reader();
BinaryDocValues bdv = r.getBinaryDocValues("bdv"); BinaryDocValues bdv = r.getBinaryDocValues("bdv");
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(5L, getValue(bdv, i, scratch)); assertEquals(5L, getValue(bdv, i));
} }
} }
reader.close(); reader.close();
@ -869,7 +855,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
DirectoryReader r = DirectoryReader.open(dir); DirectoryReader r = DirectoryReader.open(dir);
BinaryDocValues bdv = r.leaves().get(0).reader().getBinaryDocValues("f"); BinaryDocValues bdv = r.leaves().get(0).reader().getBinaryDocValues("f");
assertEquals(17, getValue(bdv, 0, new BytesRef())); assertEquals(17, getValue(bdv, 0));
r.close(); r.close();
dir.close(); dir.close();
@ -1013,7 +999,6 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
DirectoryReader reader = DirectoryReader.open(dir); DirectoryReader reader = DirectoryReader.open(dir);
BytesRef scratch = new BytesRef();
for (AtomicReaderContext context : reader.leaves()) { for (AtomicReaderContext context : reader.leaves()) {
AtomicReader r = context.reader(); AtomicReader r = context.reader();
for (int i = 0; i < numFields; i++) { for (int i = 0; i < numFields; i++) {
@ -1026,7 +1011,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
if (liveDocs == null || liveDocs.get(j)) { if (liveDocs == null || liveDocs.get(j)) {
assertTrue(docsWithBdv.get(j)); assertTrue(docsWithBdv.get(j));
assertTrue(docsWithControl.get(j)); assertTrue(docsWithControl.get(j));
assertEquals(getValue(control, j, scratch), getValue(bdv, j, scratch) * 2); assertEquals(getValue(control, j), getValue(bdv, j) * 2);
} }
} }
} }
@ -1053,7 +1038,6 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
} }
int numGens = atLeast(5); int numGens = atLeast(5);
BytesRef scratch = new BytesRef();
for (int i = 0; i < numGens; i++) { for (int i = 0; i < numGens; i++) {
int doc = random().nextInt(numDocs); int doc = random().nextInt(numDocs);
Term t = new Term("id", "doc" + doc); Term t = new Term("id", "doc" + doc);
@ -1065,7 +1049,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
BinaryDocValues fbdv = r.getBinaryDocValues("f"); BinaryDocValues fbdv = r.getBinaryDocValues("f");
BinaryDocValues cfbdv = r.getBinaryDocValues("cf"); BinaryDocValues cfbdv = r.getBinaryDocValues("cf");
for (int j = 0; j < r.maxDoc(); j++) { for (int j = 0; j < r.maxDoc(); j++) {
assertEquals(getValue(cfbdv, j, scratch), getValue(fbdv, j, scratch) * 2); assertEquals(getValue(cfbdv, j), getValue(fbdv, j) * 2);
} }
} }
reader.close(); reader.close();
@ -1114,11 +1098,10 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
AtomicReader r = SlowCompositeReaderWrapper.wrap(reader); AtomicReader r = SlowCompositeReaderWrapper.wrap(reader);
BinaryDocValues f1 = r.getBinaryDocValues("f1"); BinaryDocValues f1 = r.getBinaryDocValues("f1");
BinaryDocValues f2 = r.getBinaryDocValues("f2"); BinaryDocValues f2 = r.getBinaryDocValues("f2");
BytesRef scratch = new BytesRef(); assertEquals(12L, getValue(f1, 0));
assertEquals(12L, getValue(f1, 0, scratch)); assertEquals(13L, getValue(f2, 0));
assertEquals(13L, getValue(f2, 0, scratch)); assertEquals(17L, getValue(f1, 1));
assertEquals(17L, getValue(f1, 1, scratch)); assertEquals(2L, getValue(f2, 1));
assertEquals(2L, getValue(f2, 1, scratch));
reader.close(); reader.close();
dir.close(); dir.close();
} }
@ -1167,13 +1150,12 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
DirectoryReader reader = DirectoryReader.open(dir2); DirectoryReader reader = DirectoryReader.open(dir2);
BytesRef scratch = new BytesRef();
for (AtomicReaderContext context : reader.leaves()) { for (AtomicReaderContext context : reader.leaves()) {
AtomicReader r = context.reader(); AtomicReader r = context.reader();
BinaryDocValues bdv = r.getBinaryDocValues("bdv"); BinaryDocValues bdv = r.getBinaryDocValues("bdv");
BinaryDocValues control = r.getBinaryDocValues("control"); BinaryDocValues control = r.getBinaryDocValues("control");
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(getValue(bdv, i, scratch)*2, getValue(control, i, scratch)); assertEquals(getValue(bdv, i)*2, getValue(control, i));
} }
} }
reader.close(); reader.close();
@ -1263,14 +1245,13 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
DirectoryReader reader = DirectoryReader.open(dir); DirectoryReader reader = DirectoryReader.open(dir);
BytesRef scratch = new BytesRef();
for (AtomicReaderContext context : reader.leaves()) { for (AtomicReaderContext context : reader.leaves()) {
for (int i = 0; i < numBinaryFields; i++) { for (int i = 0; i < numBinaryFields; i++) {
AtomicReader r = context.reader(); AtomicReader r = context.reader();
BinaryDocValues f = r.getBinaryDocValues("f" + i); BinaryDocValues f = r.getBinaryDocValues("f" + i);
BinaryDocValues cf = r.getBinaryDocValues("cf" + i); BinaryDocValues cf = r.getBinaryDocValues("cf" + i);
for (int j = 0; j < r.maxDoc(); j++) { for (int j = 0; j < r.maxDoc(); j++) {
assertEquals("reader=" + r + ", field=f" + i + ", doc=" + j, getValue(cf, j, scratch), getValue(f, j, scratch) * 2); assertEquals("reader=" + r + ", field=f" + i + ", doc=" + j, getValue(cf, j), getValue(f, j) * 2);
} }
} }
} }
@ -1298,9 +1279,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
DirectoryReader reader = DirectoryReader.open(dir); DirectoryReader reader = DirectoryReader.open(dir);
BytesRef scratch = new BytesRef(); assertEquals(4, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f1"), 0));
assertEquals(4, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f1"), 0, scratch)); assertEquals(3, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f2"), 0));
assertEquals(3, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f2"), 0, scratch));
reader.close(); reader.close();
dir.close(); dir.close();
@ -1324,7 +1304,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
DirectoryReader reader = DirectoryReader.open(dir); DirectoryReader reader = DirectoryReader.open(dir);
assertEquals(1, reader.leaves().size()); assertEquals(1, reader.leaves().size());
assertEquals(2L, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f1"), 0, new BytesRef())); assertEquals(2L, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f1"), 0));
reader.close(); reader.close();
dir.close(); dir.close();
@ -1346,7 +1326,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
DirectoryReader reader = DirectoryReader.open(dir); DirectoryReader reader = DirectoryReader.open(dir);
assertEquals(1, reader.leaves().size()); assertEquals(1, reader.leaves().size());
assertEquals(1L, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f1"), 0, new BytesRef())); assertEquals(1L, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f1"), 0));
reader.close(); reader.close();
dir.close(); dir.close();

View File

@ -19,7 +19,6 @@ package org.apache.lucene.index;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TestUtil;
/** Tests the codec configuration defined by LuceneTestCase randomly /** Tests the codec configuration defined by LuceneTestCase randomly
* (typically a mix across different fields). * (typically a mix across different fields).

View File

@ -177,13 +177,12 @@ public class TestDocValuesIndexing extends LuceneTestCase {
DirectoryReader r = w.getReader(); DirectoryReader r = w.getReader();
BinaryDocValues s = DocValues.getSorted(getOnlySegmentReader(r), "field"); BinaryDocValues s = DocValues.getSorted(getOnlySegmentReader(r), "field");
BytesRef bytes1 = new BytesRef(); BytesRef bytes1 = s.get(0);
s.get(0, bytes1);
assertEquals(bytes.length, bytes1.length); assertEquals(bytes.length, bytes1.length);
bytes[0] = 0; bytes[0] = 0;
assertEquals(b, bytes1); assertEquals(b, bytes1);
s.get(1, bytes1); bytes1 = s.get(1);
assertEquals(bytes.length, bytes1.length); assertEquals(bytes.length, bytes1.length);
bytes[0] = 1; bytes[0] = 1;
assertEquals(b, bytes1); assertEquals(b, bytes1);

View File

@ -2091,7 +2091,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
for (int i = 0; i < reader.maxDoc(); i++) { for (int i = 0; i < reader.maxDoc(); i++) {
if (liveDocs == null || liveDocs.get(i)) { if (liveDocs == null || liveDocs.get(i)) {
assertEquals("doc=" + (docBase + i), cf.get(i), f.get(i) * 2); assertEquals("doc=" + (docBase + i), cf.get(i), f.get(i) * 2);
assertEquals("doc=" + (docBase + i), TestBinaryDocValuesUpdates.getValue(bcf, i, scratch), TestBinaryDocValuesUpdates.getValue(bf, i, scratch) * 2); assertEquals("doc=" + (docBase + i), TestBinaryDocValuesUpdates.getValue(bcf, i), TestBinaryDocValuesUpdates.getValue(bf, i) * 2);
} }
} }
} }

View File

@ -116,7 +116,6 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
reader = newReader; reader = newReader;
// System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader); // System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader);
assertTrue(reader.numDocs() > 0); // we delete at most one document per round assertTrue(reader.numDocs() > 0); // we delete at most one document per round
BytesRef scratch = new BytesRef();
for (AtomicReaderContext context : reader.leaves()) { for (AtomicReaderContext context : reader.leaves()) {
AtomicReader r = context.reader(); AtomicReader r = context.reader();
// System.out.println(((SegmentReader) r).getSegmentName()); // System.out.println(((SegmentReader) r).getSegmentName());
@ -141,7 +140,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
if (field < numNDVFields) { if (field < numNDVFields) {
assertEquals("invalid value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], ndv.get(doc)); assertEquals("invalid value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], ndv.get(doc));
} else { } else {
assertEquals("invalid value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], TestBinaryDocValuesUpdates.getValue(bdv, doc, scratch)); assertEquals("invalid value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], TestBinaryDocValuesUpdates.getValue(bdv, doc));
} }
} }
} }
@ -275,7 +274,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
assertTrue(docsWithBdv.get(j)); assertTrue(docsWithBdv.get(j));
assertTrue(docsWithControl.get(j)); assertTrue(docsWithControl.get(j));
long ctrlValue = control.get(j); long ctrlValue = control.get(j);
long bdvValue = TestBinaryDocValuesUpdates.getValue(bdv, j, scratch) * 2; long bdvValue = TestBinaryDocValuesUpdates.getValue(bdv, j) * 2;
// if (ctrlValue != bdvValue) { // if (ctrlValue != bdvValue) {
// System.out.println("seg=" + r + ", f=f" + i + ", doc=" + j + ", group=" + r.document(j).get("updKey") + ", ctrlValue=" + ctrlValue + ", bdvBytes=" + scratch); // System.out.println("seg=" + r + ", f=f" + i + ", doc=" + j + ", group=" + r.document(j).get("updKey") + ", ctrlValue=" + ctrlValue + ", bdvBytes=" + scratch);
// } // }
@ -306,7 +305,6 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
} }
int numGens = atLeast(5); int numGens = atLeast(5);
BytesRef scratch = new BytesRef();
for (int i = 0; i < numGens; i++) { for (int i = 0; i < numGens; i++) {
int doc = random().nextInt(numDocs); int doc = random().nextInt(numDocs);
Term t = new Term("id", "doc" + doc); Term t = new Term("id", "doc" + doc);
@ -319,7 +317,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
BinaryDocValues fbdv = r.getBinaryDocValues("f"); BinaryDocValues fbdv = r.getBinaryDocValues("f");
NumericDocValues cfndv = r.getNumericDocValues("cf"); NumericDocValues cfndv = r.getNumericDocValues("cf");
for (int j = 0; j < r.maxDoc(); j++) { for (int j = 0; j < r.maxDoc(); j++) {
assertEquals(cfndv.get(j), TestBinaryDocValuesUpdates.getValue(fbdv, j, scratch) * 2); assertEquals(cfndv.get(j), TestBinaryDocValuesUpdates.getValue(fbdv, j) * 2);
} }
} }
reader.close(); reader.close();
@ -381,14 +379,13 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
DirectoryReader reader = DirectoryReader.open(dir); DirectoryReader reader = DirectoryReader.open(dir);
BytesRef scratch = new BytesRef();
for (AtomicReaderContext context : reader.leaves()) { for (AtomicReaderContext context : reader.leaves()) {
for (int i = 0; i < numBinaryFields; i++) { for (int i = 0; i < numBinaryFields; i++) {
AtomicReader r = context.reader(); AtomicReader r = context.reader();
BinaryDocValues f = r.getBinaryDocValues("f" + i); BinaryDocValues f = r.getBinaryDocValues("f" + i);
NumericDocValues cf = r.getNumericDocValues("cf" + i); NumericDocValues cf = r.getNumericDocValues("cf" + i);
for (int j = 0; j < r.maxDoc(); j++) { for (int j = 0; j < r.maxDoc(); j++) {
assertEquals("reader=" + r + ", field=f" + i + ", doc=" + j, cf.get(j), TestBinaryDocValuesUpdates.getValue(f, j, scratch) * 2); assertEquals("reader=" + r + ", field=f" + i + ", doc=" + j, cf.get(j), TestBinaryDocValuesUpdates.getValue(f, j) * 2);
} }
} }
} }

View File

@ -96,11 +96,9 @@ public class TestMultiDocValues extends LuceneTestCase {
BinaryDocValues multi = MultiDocValues.getBinaryValues(ir, "bytes"); BinaryDocValues multi = MultiDocValues.getBinaryValues(ir, "bytes");
BinaryDocValues single = merged.getBinaryDocValues("bytes"); BinaryDocValues single = merged.getBinaryDocValues("bytes");
BytesRef actual = new BytesRef();
BytesRef expected = new BytesRef();
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
single.get(i, expected); final BytesRef expected = BytesRef.deepCopyOf(single.get(i));
multi.get(i, actual); final BytesRef actual = multi.get(i);
assertEquals(expected, actual); assertEquals(expected, actual);
} }
ir.close(); ir.close();
@ -139,14 +137,12 @@ public class TestMultiDocValues extends LuceneTestCase {
SortedDocValues multi = MultiDocValues.getSortedValues(ir, "bytes"); SortedDocValues multi = MultiDocValues.getSortedValues(ir, "bytes");
SortedDocValues single = merged.getSortedDocValues("bytes"); SortedDocValues single = merged.getSortedDocValues("bytes");
assertEquals(single.getValueCount(), multi.getValueCount()); assertEquals(single.getValueCount(), multi.getValueCount());
BytesRef actual = new BytesRef();
BytesRef expected = new BytesRef();
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
// check ord // check ord
assertEquals(single.getOrd(i), multi.getOrd(i)); assertEquals(single.getOrd(i), multi.getOrd(i));
// check value // check value
single.get(i, expected); final BytesRef expected = BytesRef.deepCopyOf(single.get(i));
multi.get(i, actual); final BytesRef actual = multi.get(i);
assertEquals(expected, actual); assertEquals(expected, actual);
} }
ir.close(); ir.close();
@ -183,14 +179,12 @@ public class TestMultiDocValues extends LuceneTestCase {
SortedDocValues multi = MultiDocValues.getSortedValues(ir, "bytes"); SortedDocValues multi = MultiDocValues.getSortedValues(ir, "bytes");
SortedDocValues single = merged.getSortedDocValues("bytes"); SortedDocValues single = merged.getSortedDocValues("bytes");
assertEquals(single.getValueCount(), multi.getValueCount()); assertEquals(single.getValueCount(), multi.getValueCount());
BytesRef actual = new BytesRef();
BytesRef expected = new BytesRef();
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
// check ord // check ord
assertEquals(single.getOrd(i), multi.getOrd(i)); assertEquals(single.getOrd(i), multi.getOrd(i));
// check ord value // check ord value
single.get(i, expected); final BytesRef expected = BytesRef.deepCopyOf(single.get(i));
multi.get(i, actual); final BytesRef actual = multi.get(i);
assertEquals(expected, actual); assertEquals(expected, actual);
} }
ir.close(); ir.close();
@ -230,12 +224,10 @@ public class TestMultiDocValues extends LuceneTestCase {
assertNull(single); assertNull(single);
} else { } else {
assertEquals(single.getValueCount(), multi.getValueCount()); assertEquals(single.getValueCount(), multi.getValueCount());
BytesRef actual = new BytesRef();
BytesRef expected = new BytesRef();
// check values // check values
for (long i = 0; i < single.getValueCount(); i++) { for (long i = 0; i < single.getValueCount(); i++) {
single.lookupOrd(i, expected); final BytesRef expected = BytesRef.deepCopyOf(single.lookupOrd(i));
multi.lookupOrd(i, actual); final BytesRef actual = multi.lookupOrd(i);
assertEquals(expected, actual); assertEquals(expected, actual);
} }
// check ord list // check ord list
@ -295,12 +287,10 @@ public class TestMultiDocValues extends LuceneTestCase {
assertNull(single); assertNull(single);
} else { } else {
assertEquals(single.getValueCount(), multi.getValueCount()); assertEquals(single.getValueCount(), multi.getValueCount());
BytesRef actual = new BytesRef();
BytesRef expected = new BytesRef();
// check values // check values
for (long i = 0; i < single.getValueCount(); i++) { for (long i = 0; i < single.getValueCount(); i++) {
single.lookupOrd(i, expected); final BytesRef expected = BytesRef.deepCopyOf(single.lookupOrd(i));
multi.lookupOrd(i, actual); final BytesRef actual = multi.lookupOrd(i);
assertEquals(expected, actual); assertEquals(expected, actual);
} }
// check ord list // check ord list

View File

@ -356,21 +356,20 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
BinaryDocValues bdv = r.getBinaryDocValues("bdv"); BinaryDocValues bdv = r.getBinaryDocValues("bdv");
SortedDocValues sdv = r.getSortedDocValues("sdv"); SortedDocValues sdv = r.getSortedDocValues("sdv");
SortedSetDocValues ssdv = r.getSortedSetDocValues("ssdv"); SortedSetDocValues ssdv = r.getSortedSetDocValues("ssdv");
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(17, ndv.get(i)); assertEquals(17, ndv.get(i));
bdv.get(i, scratch); BytesRef term = bdv.get(i);
assertEquals(new BytesRef(Integer.toString(i)), scratch); assertEquals(new BytesRef(Integer.toString(i)), term);
sdv.get(i, scratch); term = sdv.get(i);
assertEquals(new BytesRef(Integer.toString(i)), scratch); assertEquals(new BytesRef(Integer.toString(i)), term);
ssdv.setDocument(i); ssdv.setDocument(i);
long ord = ssdv.nextOrd(); long ord = ssdv.nextOrd();
ssdv.lookupOrd(ord, scratch); term = ssdv.lookupOrd(ord);
assertEquals(i, Integer.parseInt(scratch.utf8ToString())); assertEquals(i, Integer.parseInt(term.utf8ToString()));
if (i != 0) { if (i != 0) {
ord = ssdv.nextOrd(); ord = ssdv.nextOrd();
ssdv.lookupOrd(ord, scratch); term = ssdv.lookupOrd(ord);
assertEquals(i * 2, Integer.parseInt(scratch.utf8ToString())); assertEquals(i * 2, Integer.parseInt(term.utf8ToString()));
} }
assertEquals(SortedSetDocValues.NO_MORE_ORDS, ssdv.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, ssdv.nextOrd());
} }
@ -504,11 +503,10 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
AtomicReader r = SlowCompositeReaderWrapper.wrap(reader); AtomicReader r = SlowCompositeReaderWrapper.wrap(reader);
NumericDocValues ndv = r.getNumericDocValues("ndv"); NumericDocValues ndv = r.getNumericDocValues("ndv");
SortedDocValues sdv = r.getSortedDocValues("sorted"); SortedDocValues sdv = r.getSortedDocValues("sorted");
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
assertEquals(17, ndv.get(i)); assertEquals(17, ndv.get(i));
sdv.get(i, scratch); final BytesRef term = sdv.get(i);
assertEquals(new BytesRef("value"), scratch); assertEquals(new BytesRef("value"), term);
} }
reader.close(); reader.close();

View File

@ -148,7 +148,6 @@ class ElevationComparatorSource extends FieldComparatorSource {
SortedDocValues idIndex; SortedDocValues idIndex;
private final int[] values = new int[numHits]; private final int[] values = new int[numHits];
private final BytesRef tempBR = new BytesRef();
int bottomVal; int bottomVal;
@Override @Override
@ -171,8 +170,8 @@ class ElevationComparatorSource extends FieldComparatorSource {
if (ord == -1) { if (ord == -1) {
return 0; return 0;
} else { } else {
idIndex.lookupOrd(ord, tempBR); final BytesRef term = idIndex.lookupOrd(ord);
Integer prio = priority.get(tempBR); Integer prio = priority.get(term);
return prio == null ? 0 : prio.intValue(); return prio == null ? 0 : prio.intValue();
} }
} }

View File

@ -73,17 +73,16 @@ public class DefaultSortedSetDocValuesReaderState extends SortedSetDocValuesRead
// each term/ord it's assigning as it goes... // each term/ord it's assigning as it goes...
String lastDim = null; String lastDim = null;
int startOrd = -1; int startOrd = -1;
BytesRef spare = new BytesRef();
// TODO: this approach can work for full hierarchy?; // TODO: this approach can work for full hierarchy?;
// TaxoReader can't do this since ords are not in // TaxoReader can't do this since ords are not in
// "sorted order" ... but we should generalize this to // "sorted order" ... but we should generalize this to
// support arbitrary hierarchy: // support arbitrary hierarchy:
for(int ord=0;ord<valueCount;ord++) { for(int ord=0;ord<valueCount;ord++) {
dv.lookupOrd(ord, spare); final BytesRef term = dv.lookupOrd(ord);
String[] components = FacetsConfig.stringToPath(spare.utf8ToString()); String[] components = FacetsConfig.stringToPath(term.utf8ToString());
if (components.length != 2) { if (components.length != 2) {
throw new IllegalArgumentException("this class can only handle 2 level hierarchy (dim/value); got: " + Arrays.toString(components) + " " + spare.utf8ToString()); throw new IllegalArgumentException("this class can only handle 2 level hierarchy (dim/value); got: " + Arrays.toString(components) + " " + term.utf8ToString());
} }
if (!components[0].equals(lastDim)) { if (!components[0].equals(lastDim)) {
if (lastDim != null) { if (lastDim != null) {

View File

@ -131,13 +131,11 @@ public class SortedSetDocValuesFacetCounts extends Facets {
return null; return null;
} }
BytesRef scratch = new BytesRef();
LabelAndValue[] labelValues = new LabelAndValue[q.size()]; LabelAndValue[] labelValues = new LabelAndValue[q.size()];
for(int i=labelValues.length-1;i>=0;i--) { for(int i=labelValues.length-1;i>=0;i--) {
TopOrdAndIntQueue.OrdAndValue ordAndValue = q.pop(); TopOrdAndIntQueue.OrdAndValue ordAndValue = q.pop();
dv.lookupOrd(ordAndValue.ord, scratch); final BytesRef term = dv.lookupOrd(ordAndValue.ord);
String[] parts = FacetsConfig.stringToPath(scratch.utf8ToString()); String[] parts = FacetsConfig.stringToPath(term.utf8ToString());
labelValues[i] = new LabelAndValue(parts[1], ordAndValue.value); labelValues[i] = new LabelAndValue(parts[1], ordAndValue.value);
} }

View File

@ -46,17 +46,15 @@ public class DocValuesOrdinalsReader extends OrdinalsReader {
public OrdinalsSegmentReader getReader(AtomicReaderContext context) throws IOException { public OrdinalsSegmentReader getReader(AtomicReaderContext context) throws IOException {
BinaryDocValues values0 = context.reader().getBinaryDocValues(field); BinaryDocValues values0 = context.reader().getBinaryDocValues(field);
if (values0 == null) { if (values0 == null) {
values0 = DocValues.EMPTY_BINARY; values0 = DocValues.emptyBinary();
} }
final BinaryDocValues values = values0; final BinaryDocValues values = values0;
return new OrdinalsSegmentReader() { return new OrdinalsSegmentReader() {
private final BytesRef bytes = new BytesRef(32);
@Override @Override
public void get(int docID, IntsRef ordinals) throws IOException { public void get(int docID, IntsRef ordinals) throws IOException {
values.get(docID, bytes); final BytesRef bytes = values.get(docID);
decode(bytes, ordinals); decode(bytes, ordinals);
} }
}; };

View File

@ -55,17 +55,16 @@ public class FastTaxonomyFacetCounts extends IntTaxonomyFacets {
if (dv == null) { // this reader does not have DocValues for the requested category list if (dv == null) { // this reader does not have DocValues for the requested category list
continue; continue;
} }
BytesRef scratch = new BytesRef();
DocIdSetIterator docs = hits.bits.iterator(); DocIdSetIterator docs = hits.bits.iterator();
int doc; int doc;
while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
dv.get(doc, scratch); final BytesRef bytesRef = dv.get(doc);
byte[] bytes = scratch.bytes; byte[] bytes = bytesRef.bytes;
int end = scratch.offset + scratch.length; int end = bytesRef.offset + bytesRef.length;
int ord = 0; int ord = 0;
int offset = scratch.offset; int offset = bytesRef.offset;
int prev = 0; int prev = 0;
while (offset < end) { while (offset < end) {
byte b = bytes[offset++]; byte b = bytes[offset++];

View File

@ -54,8 +54,7 @@ public class TaxonomyFacetSumFloatAssociations extends FloatTaxonomyFacets {
if (dv == null) { // this reader does not have DocValues for the requested category list if (dv == null) { // this reader does not have DocValues for the requested category list
continue; continue;
} }
BytesRef scratch = new BytesRef();
DocIdSetIterator docs = hits.bits.iterator(); DocIdSetIterator docs = hits.bits.iterator();
int doc; int doc;
@ -63,10 +62,10 @@ public class TaxonomyFacetSumFloatAssociations extends FloatTaxonomyFacets {
//System.out.println(" doc=" + doc); //System.out.println(" doc=" + doc);
// TODO: use OrdinalsReader? we'd need to add a // TODO: use OrdinalsReader? we'd need to add a
// BytesRef getAssociation()? // BytesRef getAssociation()?
dv.get(doc, scratch); final BytesRef bytesRef = dv.get(doc);
byte[] bytes = scratch.bytes; byte[] bytes = bytesRef.bytes;
int end = scratch.offset + scratch.length; int end = bytesRef.offset + bytesRef.length;
int offset = scratch.offset; int offset = bytesRef.offset;
while (offset < end) { while (offset < end) {
int ord = ((bytes[offset]&0xFF) << 24) | int ord = ((bytes[offset]&0xFF) << 24) |
((bytes[offset+1]&0xFF) << 16) | ((bytes[offset+1]&0xFF) << 16) |

View File

@ -54,8 +54,7 @@ public class TaxonomyFacetSumIntAssociations extends IntTaxonomyFacets {
if (dv == null) { // this reader does not have DocValues for the requested category list if (dv == null) { // this reader does not have DocValues for the requested category list
continue; continue;
} }
BytesRef scratch = new BytesRef();
DocIdSetIterator docs = hits.bits.iterator(); DocIdSetIterator docs = hits.bits.iterator();
int doc; int doc;
@ -63,10 +62,10 @@ public class TaxonomyFacetSumIntAssociations extends IntTaxonomyFacets {
//System.out.println(" doc=" + doc); //System.out.println(" doc=" + doc);
// TODO: use OrdinalsReader? we'd need to add a // TODO: use OrdinalsReader? we'd need to add a
// BytesRef getAssociation()? // BytesRef getAssociation()?
dv.get(doc, scratch); final BytesRef bytesRef = dv.get(doc);
byte[] bytes = scratch.bytes; byte[] bytes = bytesRef.bytes;
int end = scratch.offset + scratch.length; int end = bytesRef.offset + bytesRef.length;
int offset = scratch.offset; int offset = bytesRef.offset;
while (offset < end) { while (offset < end) {
int ord = ((bytes[offset]&0xFF) << 24) | int ord = ((bytes[offset]&0xFF) << 24) |
((bytes[offset+1]&0xFF) << 16) | ((bytes[offset+1]&0xFF) << 16) |

View File

@ -47,7 +47,6 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
private static final int DEFAULT_INITIAL_SIZE = 128; private static final int DEFAULT_INITIAL_SIZE = 128;
final String groupField; final String groupField;
final BytesRef scratchBytesRef = new BytesRef();
SortedDocValues groupIndex; SortedDocValues groupIndex;
AtomicReaderContext readerContext; AtomicReaderContext readerContext;
@ -134,17 +133,17 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
@Override @Override
protected void retrieveGroupHeadAndAddIfNotExist(int doc) throws IOException { protected void retrieveGroupHeadAndAddIfNotExist(int doc) throws IOException {
final int ord = groupIndex.getOrd(doc); final int ord = groupIndex.getOrd(doc);
final BytesRef groupValue; BytesRef groupValue;
if (ord == -1) { if (ord == -1) {
groupValue = null; groupValue = null;
} else { } else {
groupIndex.lookupOrd(ord, scratchBytesRef); groupValue = groupIndex.lookupOrd(ord);
groupValue = scratchBytesRef;
} }
GroupHead groupHead = groups.get(groupValue); GroupHead groupHead = groups.get(groupValue);
if (groupHead == null) { if (groupHead == null) {
groupValue = groupValue == null ? null : BytesRef.deepCopyOf(groupValue);
groupHead = new GroupHead(groupValue, sortWithinGroup, doc); groupHead = new GroupHead(groupValue, sortWithinGroup, doc);
groups.put(groupValue == null ? null : BytesRef.deepCopyOf(groupValue), groupHead); groups.put(groupValue, groupHead);
temporalResult.stop = true; temporalResult.stop = true;
} else { } else {
temporalResult.stop = false; temporalResult.stop = false;
@ -183,7 +182,6 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
final FieldComparator<?>[] comparators; final FieldComparator<?>[] comparators;
@SuppressWarnings({"unchecked","rawtypes"})
private GroupHead(BytesRef groupValue, Sort sort, int doc) throws IOException { private GroupHead(BytesRef groupValue, Sort sort, int doc) throws IOException {
super(groupValue, doc + readerContext.docBase); super(groupValue, doc + readerContext.docBase);
final SortField[] sortFields = sort.getSort(); final SortField[] sortFields = sort.getSort();
@ -254,12 +252,11 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
GroupHead groupHead; GroupHead groupHead;
if (!ordSet.exists(key)) { if (!ordSet.exists(key)) {
ordSet.put(key); ordSet.put(key);
BytesRef term; final BytesRef term;
if (key == -1) { if (key == -1) {
term = null; term = null;
} else { } else {
term = new BytesRef(); term = BytesRef.deepCopyOf(groupIndex.lookupOrd(key));
groupIndex.lookupOrd(key, term);
} }
groupHead = new GroupHead(doc, term); groupHead = new GroupHead(doc, term);
collectedGroups.add(groupHead); collectedGroups.add(groupHead);
@ -332,7 +329,7 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
sortOrds[i] = sortsIndex[i].getOrd(doc); sortOrds[i] = sortsIndex[i].getOrd(doc);
sortValues[i] = new BytesRef(); sortValues[i] = new BytesRef();
if (sortOrds[i] != -1) { if (sortOrds[i] != -1) {
sortsIndex[i].get(doc, sortValues[i]); sortValues[i].copyBytes(sortsIndex[i].get(doc));
} }
} }
} }
@ -351,12 +348,8 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
} else { } else {
if (sortOrds[compIDX] < 0) { if (sortOrds[compIDX] < 0) {
// The current segment doesn't contain the sort value we encountered before. Therefore the ord is negative. // The current segment doesn't contain the sort value we encountered before. Therefore the ord is negative.
if (sortsIndex[compIDX].getOrd(doc) == -1) { final BytesRef term = sortsIndex[compIDX].get(doc);
scratchBytesRef.length = 0; return sortValues[compIDX].compareTo(term);
} else {
sortsIndex[compIDX].get(doc, scratchBytesRef);
}
return sortValues[compIDX].compareTo(scratchBytesRef);
} else { } else {
return sortOrds[compIDX] - sortsIndex[compIDX].getOrd(doc); return sortOrds[compIDX] - sortsIndex[compIDX].getOrd(doc);
} }
@ -370,11 +363,7 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
scores[i] = scorer.score(); scores[i] = scorer.score();
} else { } else {
sortOrds[i] = sortsIndex[i].getOrd(doc); sortOrds[i] = sortsIndex[i].getOrd(doc);
if (sortOrds[i] == -1) { sortValues[i].copyBytes(sortsIndex[i].get(doc));
sortValues[i].length = 0;
} else {
sortsIndex[i].get(doc, sortValues[i]);
}
} }
} }
this.doc = doc + readerContext.docBase; this.doc = doc + readerContext.docBase;
@ -422,12 +411,11 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
GroupHead groupHead; GroupHead groupHead;
if (!ordSet.exists(key)) { if (!ordSet.exists(key)) {
ordSet.put(key); ordSet.put(key);
BytesRef term; final BytesRef term;
if (key == -1) { if (key == -1) {
term = null; term = null;
} else { } else {
term = new BytesRef(); term = BytesRef.deepCopyOf(groupIndex.lookupOrd(key));
groupIndex.lookupOrd(key, term);
} }
groupHead = new GroupHead(doc, term); groupHead = new GroupHead(doc, term);
collectedGroups.add(groupHead); collectedGroups.add(groupHead);
@ -487,9 +475,7 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
for (int i = 0; i < sortsIndex.length; i++) { for (int i = 0; i < sortsIndex.length; i++) {
sortOrds[i] = sortsIndex[i].getOrd(doc); sortOrds[i] = sortsIndex[i].getOrd(doc);
sortValues[i] = new BytesRef(); sortValues[i] = new BytesRef();
if (sortOrds[i] != -1) { sortValues[i].copyBytes(sortsIndex[i].get(doc));
sortsIndex[i].get(doc, sortValues[i]);
}
} }
} }
@ -497,12 +483,8 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
public int compare(int compIDX, int doc) throws IOException { public int compare(int compIDX, int doc) throws IOException {
if (sortOrds[compIDX] < 0) { if (sortOrds[compIDX] < 0) {
// The current segment doesn't contain the sort value we encountered before. Therefore the ord is negative. // The current segment doesn't contain the sort value we encountered before. Therefore the ord is negative.
if (sortsIndex[compIDX].getOrd(doc) == -1) { final BytesRef term = sortsIndex[compIDX].get(doc);
scratchBytesRef.length = 0; return sortValues[compIDX].compareTo(term);
} else {
sortsIndex[compIDX].get(doc, scratchBytesRef);
}
return sortValues[compIDX].compareTo(scratchBytesRef);
} else { } else {
return sortOrds[compIDX] - sortsIndex[compIDX].getOrd(doc); return sortOrds[compIDX] - sortsIndex[compIDX].getOrd(doc);
} }
@ -512,11 +494,7 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
public void updateDocHead(int doc) throws IOException { public void updateDocHead(int doc) throws IOException {
for (int i = 0; i < sortsIndex.length; i++) { for (int i = 0; i < sortsIndex.length; i++) {
sortOrds[i] = sortsIndex[i].getOrd(doc); sortOrds[i] = sortsIndex[i].getOrd(doc);
if (sortOrds[i] == -1) { sortValues[i].copyBytes(sortsIndex[i].get(doc));
sortValues[i].length = 0;
} else {
sortsIndex[i].lookupOrd(sortOrds[i], sortValues[i]);
}
} }
this.doc = doc + readerContext.docBase; this.doc = doc + readerContext.docBase;
} }
@ -565,12 +543,11 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
GroupHead groupHead; GroupHead groupHead;
if (!ordSet.exists(key)) { if (!ordSet.exists(key)) {
ordSet.put(key); ordSet.put(key);
BytesRef term; final BytesRef term;
if (key == -1) { if (key == -1) {
term = null; term = null;
} else { } else {
term = new BytesRef(); term = BytesRef.deepCopyOf(groupIndex.lookupOrd(key));
groupIndex.lookupOrd(key, term);
} }
groupHead = new GroupHead(doc, term); groupHead = new GroupHead(doc, term);
collectedGroups.add(groupHead); collectedGroups.add(groupHead);

View File

@ -20,7 +20,6 @@ package org.apache.lucene.search.grouping.term;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.grouping.AbstractAllGroupsCollector; import org.apache.lucene.search.grouping.AbstractAllGroupsCollector;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.SentinelIntSet; import org.apache.lucene.util.SentinelIntSet;
@ -87,12 +86,11 @@ public class TermAllGroupsCollector extends AbstractAllGroupsCollector<BytesRef>
int key = index.getOrd(doc); int key = index.getOrd(doc);
if (!ordSet.exists(key)) { if (!ordSet.exists(key)) {
ordSet.put(key); ordSet.put(key);
BytesRef term; final BytesRef term;
if (key == -1) { if (key == -1) {
term = null; term = null;
} else { } else {
term = new BytesRef(); term = BytesRef.deepCopyOf(index.lookupOrd(key));
index.lookupOrd(key, term);
} }
groups.add(term); groups.add(term);
} }

View File

@ -20,7 +20,6 @@ package org.apache.lucene.search.grouping.term;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.grouping.AbstractDistinctValuesCollector; import org.apache.lucene.search.grouping.AbstractDistinctValuesCollector;
import org.apache.lucene.search.grouping.SearchGroup; import org.apache.lucene.search.grouping.SearchGroup;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -80,9 +79,8 @@ public class TermDistinctValuesCollector extends AbstractDistinctValuesCollector
if (countOrd == -1) { if (countOrd == -1) {
gc.uniqueValues.add(null); gc.uniqueValues.add(null);
} else { } else {
BytesRef br = new BytesRef(); BytesRef term = BytesRef.deepCopyOf(countFieldTermIndex.lookupOrd(countOrd));
countFieldTermIndex.lookupOrd(countOrd, br); gc.uniqueValues.add(term);
gc.uniqueValues.add(br);
} }
gc.ords = Arrays.copyOf(gc.ords, gc.ords.length + 1); gc.ords = Arrays.copyOf(gc.ords, gc.ords.length + 1);

View File

@ -22,7 +22,6 @@ import java.io.IOException;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.search.grouping.AbstractFirstPassGroupingCollector; import org.apache.lucene.search.grouping.AbstractFirstPassGroupingCollector;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -36,7 +35,6 @@ import org.apache.lucene.util.BytesRef;
*/ */
public class TermFirstPassGroupingCollector extends AbstractFirstPassGroupingCollector<BytesRef> { public class TermFirstPassGroupingCollector extends AbstractFirstPassGroupingCollector<BytesRef> {
private final BytesRef scratchBytesRef = new BytesRef();
private SortedDocValues index; private SortedDocValues index;
private String groupField; private String groupField;
@ -68,8 +66,7 @@ public class TermFirstPassGroupingCollector extends AbstractFirstPassGroupingCol
if (ord == -1) { if (ord == -1) {
return null; return null;
} else { } else {
index.lookupOrd(ord, scratchBytesRef); return index.lookupOrd(ord);
return scratchBytesRef;
} }
} }

View File

@ -107,16 +107,14 @@ public abstract class TermGroupFacetCollector extends AbstractGroupFacetCollecto
if (groupOrd == -1) { if (groupOrd == -1) {
groupKey = null; groupKey = null;
} else { } else {
groupKey = new BytesRef(); groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
groupFieldTermsIndex.lookupOrd(groupOrd, groupKey);
} }
BytesRef facetKey; BytesRef facetKey;
if (facetOrd == -1) { if (facetOrd == -1) {
facetKey = null; facetKey = null;
} else { } else {
facetKey = new BytesRef(); facetKey = BytesRef.deepCopyOf(facetFieldTermsIndex.lookupOrd(facetOrd));
facetFieldTermsIndex.lookupOrd(facetOrd, facetKey);
} }
groupedFacetHits.add(new GroupedFacetHit(groupKey, facetKey)); groupedFacetHits.add(new GroupedFacetHit(groupKey, facetKey));
@ -224,8 +222,7 @@ public abstract class TermGroupFacetCollector extends AbstractGroupFacetCollecto
if (groupOrd == -1) { if (groupOrd == -1) {
groupKey = null; groupKey = null;
} else { } else {
groupKey = new BytesRef(); groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
groupFieldTermsIndex.lookupOrd(groupOrd, groupKey);
} }
groupedFacetHits.add(new GroupedFacetHit(groupKey, null)); groupedFacetHits.add(new GroupedFacetHit(groupKey, null));
return; return;
@ -263,16 +260,14 @@ public abstract class TermGroupFacetCollector extends AbstractGroupFacetCollecto
if (groupOrd == -1) { if (groupOrd == -1) {
groupKey = null; groupKey = null;
} else { } else {
groupKey = new BytesRef(); groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
groupFieldTermsIndex.lookupOrd(groupOrd, groupKey);
} }
final BytesRef facetValue; final BytesRef facetValue;
if (facetOrd == facetFieldNumTerms) { if (facetOrd == facetFieldNumTerms) {
facetValue = null; facetValue = null;
} else { } else {
facetFieldDocTermOrds.lookupOrd(facetOrd, scratch); facetValue = BytesRef.deepCopyOf(facetFieldDocTermOrds.lookupOrd(facetOrd));
facetValue = BytesRef.deepCopyOf(scratch); // must we?
} }
groupedFacetHits.add(new GroupedFacetHit(groupKey, facetValue)); groupedFacetHits.add(new GroupedFacetHit(groupKey, facetValue));
} }

View File

@ -23,9 +23,6 @@ import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.BytesRefHash;
@ -78,8 +75,8 @@ abstract class TermsCollector extends SimpleCollector {
docTermOrds.setDocument(doc); docTermOrds.setDocument(doc);
long ord; long ord;
while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
docTermOrds.lookupOrd(ord, scratch); final BytesRef term = docTermOrds.lookupOrd(ord);
collectorTerms.add(scratch); collectorTerms.add(term);
} }
} }
@ -101,8 +98,8 @@ abstract class TermsCollector extends SimpleCollector {
@Override @Override
public void collect(int doc) throws IOException { public void collect(int doc) throws IOException {
fromDocTerms.get(doc, spare); final BytesRef term = fromDocTerms.get(doc);
collectorTerms.add(spare); collectorTerms.add(term);
} }
@Override @Override

View File

@ -93,7 +93,6 @@ abstract class TermsWithScoreCollector extends SimpleCollector {
// impl that works with single value per document // impl that works with single value per document
static class SV extends TermsWithScoreCollector { static class SV extends TermsWithScoreCollector {
final BytesRef spare = new BytesRef();
BinaryDocValues fromDocTerms; BinaryDocValues fromDocTerms;
SV(String field, ScoreMode scoreMode) { SV(String field, ScoreMode scoreMode) {
@ -102,8 +101,7 @@ abstract class TermsWithScoreCollector extends SimpleCollector {
@Override @Override
public void collect(int doc) throws IOException { public void collect(int doc) throws IOException {
fromDocTerms.get(doc, spare); int ord = collectedTerms.add(fromDocTerms.get(doc));
int ord = collectedTerms.add(spare);
if (ord < 0) { if (ord < 0) {
ord = -ord - 1; ord = -ord - 1;
} else { } else {
@ -144,8 +142,7 @@ abstract class TermsWithScoreCollector extends SimpleCollector {
@Override @Override
public void collect(int doc) throws IOException { public void collect(int doc) throws IOException {
fromDocTerms.get(doc, spare); int ord = collectedTerms.add(fromDocTerms.get(doc));
int ord = collectedTerms.add(spare);
if (ord < 0) { if (ord < 0) {
ord = -ord - 1; ord = -ord - 1;
} else { } else {
@ -183,7 +180,6 @@ abstract class TermsWithScoreCollector extends SimpleCollector {
static class MV extends TermsWithScoreCollector { static class MV extends TermsWithScoreCollector {
SortedSetDocValues fromDocTermOrds; SortedSetDocValues fromDocTermOrds;
final BytesRef scratch = new BytesRef();
MV(String field, ScoreMode scoreMode) { MV(String field, ScoreMode scoreMode) {
super(field, scoreMode); super(field, scoreMode);
@ -194,9 +190,7 @@ abstract class TermsWithScoreCollector extends SimpleCollector {
fromDocTermOrds.setDocument(doc); fromDocTermOrds.setDocument(doc);
long ord; long ord;
while ((ord = fromDocTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { while ((ord = fromDocTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
fromDocTermOrds.lookupOrd(ord, scratch); int termID = collectedTerms.add(fromDocTermOrds.lookupOrd(ord));
int termID = collectedTerms.add(scratch);
if (termID < 0) { if (termID < 0) {
termID = -termID - 1; termID = -termID - 1;
} else { } else {
@ -233,9 +227,7 @@ abstract class TermsWithScoreCollector extends SimpleCollector {
fromDocTermOrds.setDocument(doc); fromDocTermOrds.setDocument(doc);
long ord; long ord;
while ((ord = fromDocTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { while ((ord = fromDocTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
fromDocTermOrds.lookupOrd(ord, scratch); int termID = collectedTerms.add(fromDocTermOrds.lookupOrd(ord));
int termID = collectedTerms.add(scratch);
if (termID < 0) { if (termID < 0) {
termID = -termID - 1; termID = -termID - 1;
} else { } else {

View File

@ -662,14 +662,13 @@ public class TestJoinUtil extends LuceneTestCase {
private Scorer scorer; private Scorer scorer;
private SortedSetDocValues docTermOrds; private SortedSetDocValues docTermOrds;
final BytesRef joinValue = new BytesRef();
@Override @Override
public void collect(int doc) throws IOException { public void collect(int doc) throws IOException {
docTermOrds.setDocument(doc); docTermOrds.setDocument(doc);
long ord; long ord;
while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
docTermOrds.lookupOrd(ord, joinValue); final BytesRef joinValue = docTermOrds.lookupOrd(ord);
JoinScore joinScore = joinValueToJoinScores.get(joinValue); JoinScore joinScore = joinValueToJoinScores.get(joinValue);
if (joinScore == null) { if (joinScore == null) {
joinValueToJoinScores.put(BytesRef.deepCopyOf(joinValue), joinScore = new JoinScore()); joinValueToJoinScores.put(BytesRef.deepCopyOf(joinValue), joinScore = new JoinScore());
@ -699,12 +698,10 @@ public class TestJoinUtil extends LuceneTestCase {
private Scorer scorer; private Scorer scorer;
private BinaryDocValues terms; private BinaryDocValues terms;
private Bits docsWithField; private Bits docsWithField;
private final BytesRef spare = new BytesRef();
@Override @Override
public void collect(int doc) throws IOException { public void collect(int doc) throws IOException {
terms.get(doc, spare); final BytesRef joinValue = terms.get(doc);
BytesRef joinValue = spare;
if (joinValue.length == 0 && !docsWithField.get(doc)) { if (joinValue.length == 0 && !docsWithField.get(doc)) {
return; return;
} }
@ -764,7 +761,6 @@ public class TestJoinUtil extends LuceneTestCase {
toSearcher.search(new MatchAllDocsQuery(), new SimpleCollector() { toSearcher.search(new MatchAllDocsQuery(), new SimpleCollector() {
private SortedSetDocValues docTermOrds; private SortedSetDocValues docTermOrds;
private final BytesRef scratch = new BytesRef();
private int docBase; private int docBase;
@Override @Override
@ -772,8 +768,8 @@ public class TestJoinUtil extends LuceneTestCase {
docTermOrds.setDocument(doc); docTermOrds.setDocument(doc);
long ord; long ord;
while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
docTermOrds.lookupOrd(ord, scratch); final BytesRef joinValue = docTermOrds.lookupOrd(ord);
JoinScore joinScore = joinValueToJoinScores.get(scratch); JoinScore joinScore = joinValueToJoinScores.get(joinValue);
if (joinScore == null) { if (joinScore == null) {
continue; continue;
} }
@ -803,12 +799,11 @@ public class TestJoinUtil extends LuceneTestCase {
private BinaryDocValues terms; private BinaryDocValues terms;
private int docBase; private int docBase;
private final BytesRef spare = new BytesRef();
@Override @Override
public void collect(int doc) { public void collect(int doc) {
terms.get(doc, spare); final BytesRef joinValue = terms.get(doc);
JoinScore joinScore = joinValueToJoinScores.get(spare); JoinScore joinScore = joinValueToJoinScores.get(joinValue);
if (joinScore == null) { if (joinScore == null) {
return; return;
} }

View File

@ -201,8 +201,8 @@ public class SortingAtomicReader extends FilterAtomicReader {
} }
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
in.get(docMap.newToOld(docID), result); return in.get(docMap.newToOld(docID));
} }
} }
@ -259,8 +259,8 @@ public class SortingAtomicReader extends FilterAtomicReader {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
in.lookupOrd(ord, result); return in.lookupOrd(ord);
} }
@Override @Override
@ -269,8 +269,8 @@ public class SortingAtomicReader extends FilterAtomicReader {
} }
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
in.get(docMap.newToOld(docID), result); return in.get(docMap.newToOld(docID));
} }
@Override @Override
@ -300,8 +300,8 @@ public class SortingAtomicReader extends FilterAtomicReader {
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
in.lookupOrd(ord, result); return in.lookupOrd(ord);
} }
@Override @Override

View File

@ -32,7 +32,6 @@ import org.apache.lucene.index.Fields;
import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -773,7 +772,7 @@ public class DocTermOrds {
/** Returns a SortedSetDocValues view of this instance */ /** Returns a SortedSetDocValues view of this instance */
public SortedSetDocValues iterator(AtomicReader reader) throws IOException { public SortedSetDocValues iterator(AtomicReader reader) throws IOException {
if (isEmpty()) { if (isEmpty()) {
return DocValues.EMPTY_SORTED_SET; return DocValues.emptySortedSet();
} else { } else {
return new Iterator(reader); return new Iterator(reader);
} }
@ -874,16 +873,12 @@ public class DocTermOrds {
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
BytesRef ref = null;
try { try {
ref = DocTermOrds.this.lookupTerm(te, (int) ord); return DocTermOrds.this.lookupTerm(te, (int) ord);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
result.bytes = ref.bytes;
result.offset = ref.offset;
result.length = ref.length;
} }
@Override @Override

View File

@ -431,11 +431,11 @@ class FieldCacheImpl implements FieldCache {
} else { } else {
final FieldInfo info = reader.getFieldInfos().fieldInfo(field); final FieldInfo info = reader.getFieldInfos().fieldInfo(field);
if (info == null) { if (info == null) {
return DocValues.EMPTY_NUMERIC; return DocValues.emptyNumeric();
} else if (info.hasDocValues()) { } else if (info.hasDocValues()) {
throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType()); throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType());
} else if (!info.isIndexed()) { } else if (!info.isIndexed()) {
return DocValues.EMPTY_NUMERIC; return DocValues.emptyNumeric();
} }
return (NumericDocValues) caches.get(Long.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField); return (NumericDocValues) caches.get(Long.TYPE).get(reader, new CacheKey(field, parser), setDocsWithField);
} }
@ -523,7 +523,7 @@ class FieldCacheImpl implements FieldCache {
} }
} }
public static class SortedDocValuesImpl extends SortedDocValues { public static class SortedDocValuesImpl {
private final PagedBytes.Reader bytes; private final PagedBytes.Reader bytes;
private final MonotonicAppendingLongBuffer termOrdToBytesOffset; private final MonotonicAppendingLongBuffer termOrdToBytesOffset;
private final PackedInts.Reader docToTermOrd; private final PackedInts.Reader docToTermOrd;
@ -535,26 +535,33 @@ class FieldCacheImpl implements FieldCache {
this.termOrdToBytesOffset = termOrdToBytesOffset; this.termOrdToBytesOffset = termOrdToBytesOffset;
this.numOrd = numOrd; this.numOrd = numOrd;
} }
public SortedDocValues iterator() {
final BytesRef term = new BytesRef();
return new SortedDocValues() {
@Override @Override
public int getValueCount() { public int getValueCount() {
return numOrd; return numOrd;
} }
@Override @Override
public int getOrd(int docID) { public int getOrd(int docID) {
// Subtract 1, matching the 1+ord we did when // Subtract 1, matching the 1+ord we did when
// storing, so that missing values, which are 0 in the // storing, so that missing values, which are 0 in the
// packed ints, are returned as -1 ord: // packed ints, are returned as -1 ord:
return (int) docToTermOrd.get(docID)-1; return (int) docToTermOrd.get(docID)-1;
} }
@Override @Override
public void lookupOrd(int ord, BytesRef ret) { public BytesRef lookupOrd(int ord) {
if (ord < 0) { if (ord < 0) {
throw new IllegalArgumentException("ord must be >=0 (got ord=" + ord + ")"); throw new IllegalArgumentException("ord must be >=0 (got ord=" + ord + ")");
} }
bytes.fill(ret, termOrdToBytesOffset.get(ord)); bytes.fill(term, termOrdToBytesOffset.get(ord));
return term;
}
};
} }
} }
@ -571,15 +578,16 @@ class FieldCacheImpl implements FieldCache {
} else { } else {
final FieldInfo info = reader.getFieldInfos().fieldInfo(field); final FieldInfo info = reader.getFieldInfos().fieldInfo(field);
if (info == null) { if (info == null) {
return DocValues.EMPTY_SORTED; return DocValues.emptySorted();
} else if (info.hasDocValues()) { } else if (info.hasDocValues()) {
// we don't try to build a sorted instance from numeric/binary doc // we don't try to build a sorted instance from numeric/binary doc
// values because dedup can be very costly // values because dedup can be very costly
throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType()); throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType());
} else if (!info.isIndexed()) { } else if (!info.isIndexed()) {
return DocValues.EMPTY_SORTED; return DocValues.emptySorted();
} }
return (SortedDocValues) caches.get(SortedDocValues.class).get(reader, new CacheKey(field, acceptableOverheadRatio), false); SortedDocValuesImpl impl = (SortedDocValuesImpl) caches.get(SortedDocValues.class).get(reader, new CacheKey(field, acceptableOverheadRatio), false);
return impl.iterator();
} }
} }
@ -674,22 +682,23 @@ class FieldCacheImpl implements FieldCache {
private static class BinaryDocValuesImpl extends BinaryDocValues { private static class BinaryDocValuesImpl extends BinaryDocValues {
private final PagedBytes.Reader bytes; private final PagedBytes.Reader bytes;
private final PackedInts.Reader docToOffset; private final PackedInts.Reader docToOffset;
private final BytesRef term;
public BinaryDocValuesImpl(PagedBytes.Reader bytes, PackedInts.Reader docToOffset) { public BinaryDocValuesImpl(PagedBytes.Reader bytes, PackedInts.Reader docToOffset) {
this.bytes = bytes; this.bytes = bytes;
this.docToOffset = docToOffset; this.docToOffset = docToOffset;
term = new BytesRef();
} }
@Override @Override
public void get(int docID, BytesRef ret) { public BytesRef get(int docID) {
final int pointer = (int) docToOffset.get(docID); final int pointer = (int) docToOffset.get(docID);
if (pointer == 0) { if (pointer == 0) {
ret.bytes = BytesRef.EMPTY_BYTES; term.length = 0;
ret.offset = 0;
ret.length = 0;
} else { } else {
bytes.fill(ret, pointer); bytes.fill(term, pointer);
} }
return term;
} }
} }
@ -713,11 +722,11 @@ class FieldCacheImpl implements FieldCache {
final FieldInfo info = reader.getFieldInfos().fieldInfo(field); final FieldInfo info = reader.getFieldInfos().fieldInfo(field);
if (info == null) { if (info == null) {
return DocValues.EMPTY_BINARY; return DocValues.emptyBinary();
} else if (info.hasDocValues()) { } else if (info.hasDocValues()) {
throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType()); throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType());
} else if (!info.isIndexed()) { } else if (!info.isIndexed()) {
return DocValues.EMPTY_BINARY; return DocValues.emptyBinary();
} }
return (BinaryDocValues) caches.get(BinaryDocValues.class).get(reader, new CacheKey(field, acceptableOverheadRatio), setDocsWithField); return (BinaryDocValues) caches.get(BinaryDocValues.class).get(reader, new CacheKey(field, acceptableOverheadRatio), setDocsWithField);
@ -835,18 +844,18 @@ class FieldCacheImpl implements FieldCache {
final FieldInfo info = reader.getFieldInfos().fieldInfo(field); final FieldInfo info = reader.getFieldInfos().fieldInfo(field);
if (info == null) { if (info == null) {
return DocValues.EMPTY_SORTED_SET; return DocValues.emptySortedSet();
} else if (info.hasDocValues()) { } else if (info.hasDocValues()) {
throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType()); throw new IllegalStateException("Type mismatch: " + field + " was indexed as " + info.getDocValuesType());
} else if (!info.isIndexed()) { } else if (!info.isIndexed()) {
return DocValues.EMPTY_SORTED_SET; return DocValues.emptySortedSet();
} }
// ok we need to uninvert. check if we can optimize a bit. // ok we need to uninvert. check if we can optimize a bit.
Terms terms = reader.terms(field); Terms terms = reader.terms(field);
if (terms == null) { if (terms == null) {
return DocValues.EMPTY_SORTED_SET; return DocValues.emptySortedSet();
} else { } else {
// if #postings = #docswithfield we know that the field is "single valued enough". // if #postings = #docswithfield we know that the field is "single valued enough".
// its possible the same term might appear twice in the same document, but SORTED_SET discards frequency. // its possible the same term might appear twice in the same document, but SORTED_SET discards frequency.

View File

@ -239,9 +239,8 @@ public abstract class SorterTestBase extends LuceneTestCase {
@Test @Test
public void testBinaryDocValuesField() throws Exception { public void testBinaryDocValuesField() throws Exception {
BinaryDocValues dv = reader.getBinaryDocValues(BINARY_DV_FIELD); BinaryDocValues dv = reader.getBinaryDocValues(BINARY_DV_FIELD);
BytesRef bytes = new BytesRef();
for (int i = 0; i < reader.maxDoc(); i++) { for (int i = 0; i < reader.maxDoc(); i++) {
dv.get(i, bytes); final BytesRef bytes = dv.get(i);
assertEquals("incorrect binary DocValues for doc " + i, sortedValues[i].toString(), bytes.utf8ToString()); assertEquals("incorrect binary DocValues for doc " + i, sortedValues[i].toString(), bytes.utf8ToString());
} }
} }
@ -367,9 +366,8 @@ public abstract class SorterTestBase extends LuceneTestCase {
public void testSortedDocValuesField() throws Exception { public void testSortedDocValuesField() throws Exception {
SortedDocValues dv = reader.getSortedDocValues(SORTED_DV_FIELD); SortedDocValues dv = reader.getSortedDocValues(SORTED_DV_FIELD);
int maxDoc = reader.maxDoc(); int maxDoc = reader.maxDoc();
BytesRef bytes = new BytesRef();
for (int i = 0; i < maxDoc; i++) { for (int i = 0; i < maxDoc; i++) {
dv.get(i, bytes); final BytesRef bytes = dv.get(i);
assertEquals("incorrect sorted DocValues for doc " + i, sortedValues[i].toString(), bytes.utf8ToString()); assertEquals("incorrect sorted DocValues for doc " + i, sortedValues[i].toString(), bytes.utf8ToString());
} }
} }
@ -379,13 +377,12 @@ public abstract class SorterTestBase extends LuceneTestCase {
assumeTrue("default codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); assumeTrue("default codec does not support SORTED_SET", defaultCodecSupportsSortedSet());
SortedSetDocValues dv = reader.getSortedSetDocValues(SORTED_SET_DV_FIELD); SortedSetDocValues dv = reader.getSortedSetDocValues(SORTED_SET_DV_FIELD);
int maxDoc = reader.maxDoc(); int maxDoc = reader.maxDoc();
BytesRef bytes = new BytesRef();
for (int i = 0; i < maxDoc; i++) { for (int i = 0; i < maxDoc; i++) {
dv.setDocument(i); dv.setDocument(i);
dv.lookupOrd(dv.nextOrd(), bytes); BytesRef bytes = dv.lookupOrd(dv.nextOrd());
int value = sortedValues[i].intValue(); int value = sortedValues[i].intValue();
assertEquals("incorrect sorted-set DocValues for doc " + i, Integer.valueOf(value).toString(), bytes.utf8ToString()); assertEquals("incorrect sorted-set DocValues for doc " + i, Integer.valueOf(value).toString(), bytes.utf8ToString());
dv.lookupOrd(dv.nextOrd(), bytes); bytes = dv.lookupOrd(dv.nextOrd());
assertEquals("incorrect sorted-set DocValues for doc " + i, Integer.valueOf(value + 1).toString(), bytes.utf8ToString()); assertEquals("incorrect sorted-set DocValues for doc " + i, Integer.valueOf(value + 1).toString(), bytes.utf8ToString());
assertEquals(SortedSetDocValues.NO_MORE_ORDS, dv.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, dv.nextOrd());
} }

View File

@ -456,11 +456,10 @@ public class TestDocTermOrds extends LuceneTestCase {
assertEquals(1, v.nextOrd()); assertEquals(1, v.nextOrd());
assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd());
BytesRef value = new BytesRef(); BytesRef value = v.lookupOrd(0);
v.lookupOrd(0, value);
assertEquals(-3, NumericUtils.prefixCodedToInt(value)); assertEquals(-3, NumericUtils.prefixCodedToInt(value));
v.lookupOrd(1, value); value = v.lookupOrd(1);
assertEquals(5, NumericUtils.prefixCodedToInt(value)); assertEquals(5, NumericUtils.prefixCodedToInt(value));
ir.close(); ir.close();
@ -498,11 +497,10 @@ public class TestDocTermOrds extends LuceneTestCase {
assertEquals(1, v.nextOrd()); assertEquals(1, v.nextOrd());
assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd());
BytesRef value = new BytesRef(); BytesRef value = v.lookupOrd(0);
v.lookupOrd(0, value);
assertEquals(-3, NumericUtils.prefixCodedToLong(value)); assertEquals(-3, NumericUtils.prefixCodedToLong(value));
v.lookupOrd(1, value); value = v.lookupOrd(1);
assertEquals(5, NumericUtils.prefixCodedToLong(value)); assertEquals(5, NumericUtils.prefixCodedToLong(value));
ir.close(); ir.close();
@ -640,11 +638,10 @@ public class TestDocTermOrds extends LuceneTestCase {
assertEquals(1, v.nextOrd()); assertEquals(1, v.nextOrd());
assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd());
BytesRef value = new BytesRef(); BytesRef value = v.lookupOrd(0);
v.lookupOrd(0, value);
assertEquals("bar", value.utf8ToString()); assertEquals("bar", value.utf8ToString());
v.lookupOrd(1, value); value = v.lookupOrd(1);
assertEquals("baz", value.utf8ToString()); assertEquals("baz", value.utf8ToString());
ir.close(); ir.close();

View File

@ -201,28 +201,23 @@ public class TestFieldCache extends LuceneTestCase {
// getTermsIndex // getTermsIndex
SortedDocValues termsIndex = cache.getTermsIndex(reader, "theRandomUnicodeString"); SortedDocValues termsIndex = cache.getTermsIndex(reader, "theRandomUnicodeString");
assertSame("Second request to cache return same array", termsIndex, cache.getTermsIndex(reader, "theRandomUnicodeString"));
final BytesRef br = new BytesRef();
for (int i = 0; i < NUM_DOCS; i++) { for (int i = 0; i < NUM_DOCS; i++) {
final BytesRef term; final String s;
final int ord = termsIndex.getOrd(i); final int ord = termsIndex.getOrd(i);
if (ord == -1) { if (ord == -1) {
term = null; s = null;
} else { } else {
termsIndex.lookupOrd(ord, br); s = termsIndex.lookupOrd(ord).utf8ToString();
term = br;
} }
final String s = term == null ? null : term.utf8ToString();
assertTrue("for doc " + i + ": " + s + " does not equal: " + unicodeStrings[i], unicodeStrings[i] == null || unicodeStrings[i].equals(s)); assertTrue("for doc " + i + ": " + s + " does not equal: " + unicodeStrings[i], unicodeStrings[i] == null || unicodeStrings[i].equals(s));
} }
int nTerms = termsIndex.getValueCount(); int nTerms = termsIndex.getValueCount();
TermsEnum tenum = termsIndex.termsEnum(); TermsEnum tenum = termsIndex.termsEnum();
BytesRef val = new BytesRef();
for (int i=0; i<nTerms; i++) { for (int i=0; i<nTerms; i++) {
BytesRef val1 = tenum.next(); BytesRef val1 = BytesRef.deepCopyOf(tenum.next());
termsIndex.lookupOrd(i, val); final BytesRef val = termsIndex.lookupOrd(i);
// System.out.println("i="+i); // System.out.println("i="+i);
assertEquals(val, val1); assertEquals(val, val1);
} }
@ -231,13 +226,13 @@ public class TestFieldCache extends LuceneTestCase {
int num = atLeast(100); int num = atLeast(100);
for (int i = 0; i < num; i++) { for (int i = 0; i < num; i++) {
int k = random().nextInt(nTerms); int k = random().nextInt(nTerms);
termsIndex.lookupOrd(k, val); final BytesRef val = BytesRef.deepCopyOf(termsIndex.lookupOrd(k));
assertEquals(TermsEnum.SeekStatus.FOUND, tenum.seekCeil(val)); assertEquals(TermsEnum.SeekStatus.FOUND, tenum.seekCeil(val));
assertEquals(val, tenum.term()); assertEquals(val, tenum.term());
} }
for(int i=0;i<nTerms;i++) { for(int i=0;i<nTerms;i++) {
termsIndex.lookupOrd(i, val); final BytesRef val = BytesRef.deepCopyOf(termsIndex.lookupOrd(i));
assertEquals(TermsEnum.SeekStatus.FOUND, tenum.seekCeil(val)); assertEquals(TermsEnum.SeekStatus.FOUND, tenum.seekCeil(val));
assertEquals(val, tenum.term()); assertEquals(val, tenum.term());
} }
@ -250,14 +245,12 @@ public class TestFieldCache extends LuceneTestCase {
assertSame("Second request to cache return same array", terms, cache.getTerms(reader, "theRandomUnicodeString", true)); assertSame("Second request to cache return same array", terms, cache.getTerms(reader, "theRandomUnicodeString", true));
Bits bits = cache.getDocsWithField(reader, "theRandomUnicodeString"); Bits bits = cache.getDocsWithField(reader, "theRandomUnicodeString");
for (int i = 0; i < NUM_DOCS; i++) { for (int i = 0; i < NUM_DOCS; i++) {
terms.get(i, br); final String s;
final BytesRef term;
if (!bits.get(i)) { if (!bits.get(i)) {
term = null; s = null;
} else { } else {
term = br; s = terms.get(i).utf8ToString();
} }
final String s = term == null ? null : term.utf8ToString();
assertTrue("for doc " + i + ": " + s + " does not equal: " + unicodeStrings[i], unicodeStrings[i] == null || unicodeStrings[i].equals(s)); assertTrue("for doc " + i + ": " + s + " does not equal: " + unicodeStrings[i], unicodeStrings[i] == null || unicodeStrings[i].equals(s));
} }
@ -282,8 +275,7 @@ public class TestFieldCache extends LuceneTestCase {
} }
long ord = termOrds.nextOrd(); long ord = termOrds.nextOrd();
assert ord != SortedSetDocValues.NO_MORE_ORDS; assert ord != SortedSetDocValues.NO_MORE_ORDS;
BytesRef scratch = new BytesRef(); BytesRef scratch = termOrds.lookupOrd(ord);
termOrds.lookupOrd(ord, scratch);
assertEquals(v, scratch); assertEquals(v, scratch);
} }
assertEquals(SortedSetDocValues.NO_MORE_ORDS, termOrds.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, termOrds.nextOrd());
@ -448,8 +440,6 @@ public class TestFieldCache extends LuceneTestCase {
iw.shutdown(); iw.shutdown();
AtomicReader ar = getOnlySegmentReader(ir); AtomicReader ar = getOnlySegmentReader(ir);
BytesRef scratch = new BytesRef();
// Binary type: can be retrieved via getTerms() // Binary type: can be retrieved via getTerms()
try { try {
FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.NUMERIC_UTILS_INT_PARSER, false); FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.NUMERIC_UTILS_INT_PARSER, false);
@ -457,8 +447,8 @@ public class TestFieldCache extends LuceneTestCase {
} catch (IllegalStateException expected) {} } catch (IllegalStateException expected) {}
BinaryDocValues binary = FieldCache.DEFAULT.getTerms(ar, "binary", true); BinaryDocValues binary = FieldCache.DEFAULT.getTerms(ar, "binary", true);
binary.get(0, scratch); final BytesRef term = binary.get(0);
assertEquals("binary value", scratch.utf8ToString()); assertEquals("binary value", term.utf8ToString());
try { try {
FieldCache.DEFAULT.getTermsIndex(ar, "binary"); FieldCache.DEFAULT.getTermsIndex(ar, "binary");
@ -490,13 +480,13 @@ public class TestFieldCache extends LuceneTestCase {
} catch (IllegalStateException expected) {} } catch (IllegalStateException expected) {}
binary = FieldCache.DEFAULT.getTerms(ar, "sorted", true); binary = FieldCache.DEFAULT.getTerms(ar, "sorted", true);
binary.get(0, scratch); BytesRef scratch = binary.get(0);
assertEquals("sorted value", scratch.utf8ToString()); assertEquals("sorted value", scratch.utf8ToString());
SortedDocValues sorted = FieldCache.DEFAULT.getTermsIndex(ar, "sorted"); SortedDocValues sorted = FieldCache.DEFAULT.getTermsIndex(ar, "sorted");
assertEquals(0, sorted.getOrd(0)); assertEquals(0, sorted.getOrd(0));
assertEquals(1, sorted.getValueCount()); assertEquals(1, sorted.getValueCount());
sorted.get(0, scratch); scratch = sorted.get(0);
assertEquals("sorted value", scratch.utf8ToString()); assertEquals("sorted value", scratch.utf8ToString());
SortedSetDocValues sortedSet = FieldCache.DEFAULT.getDocTermOrds(ar, "sorted", null); SortedSetDocValues sortedSet = FieldCache.DEFAULT.getDocTermOrds(ar, "sorted", null);
@ -598,14 +588,13 @@ public class TestFieldCache extends LuceneTestCase {
NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true); NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true);
assertEquals(0, doubles.get(0)); assertEquals(0, doubles.get(0));
BytesRef scratch = new BytesRef();
BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true); BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true);
binaries.get(0, scratch); BytesRef scratch = binaries.get(0);
assertEquals(0, scratch.length); assertEquals(0, scratch.length);
SortedDocValues sorted = cache.getTermsIndex(ar, "bogustermsindex"); SortedDocValues sorted = cache.getTermsIndex(ar, "bogustermsindex");
assertEquals(-1, sorted.getOrd(0)); assertEquals(-1, sorted.getOrd(0));
sorted.get(0, scratch); scratch = sorted.get(0);
assertEquals(0, scratch.length); assertEquals(0, scratch.length);
SortedSetDocValues sortedSet = cache.getDocTermOrds(ar, "bogusmultivalued", null); SortedSetDocValues sortedSet = cache.getDocTermOrds(ar, "bogusmultivalued", null);
@ -657,14 +646,13 @@ public class TestFieldCache extends LuceneTestCase {
NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true); NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true);
assertEquals(0, doubles.get(0)); assertEquals(0, doubles.get(0));
BytesRef scratch = new BytesRef();
BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true); BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true);
binaries.get(0, scratch); BytesRef scratch = binaries.get(0);
assertEquals(0, scratch.length); assertEquals(0, scratch.length);
SortedDocValues sorted = cache.getTermsIndex(ar, "bogustermsindex"); SortedDocValues sorted = cache.getTermsIndex(ar, "bogustermsindex");
assertEquals(-1, sorted.getOrd(0)); assertEquals(-1, sorted.getOrd(0));
sorted.get(0, scratch); scratch = sorted.get(0);
assertEquals(0, scratch.length); assertEquals(0, scratch.length);
SortedSetDocValues sortedSet = cache.getDocTermOrds(ar, "bogusmultivalued", null); SortedSetDocValues sortedSet = cache.getDocTermOrds(ar, "bogusmultivalued", null);

View File

@ -1054,7 +1054,10 @@ public class TestFieldCacheSort extends LuceneTestCase {
// this should not throw AIOOBE or RuntimeEx // this should not throw AIOOBE or RuntimeEx
IndexReader reader = UninvertingReader.wrap(DirectoryReader.open(indexStore), IndexReader reader = UninvertingReader.wrap(DirectoryReader.open(indexStore),
Collections.singletonMap("string", Type.SORTED)); Collections.singletonMap("string", Type.SORTED));
IndexSearcher searcher = newSearcher(reader); // NOTE: we can't wrap this with newSearcher, because when the API is abused in this way,
// the number of ords can exceed the number of documents, and AssertingAtomicReader will get angry,
// rightfully so (its a broken dv)
IndexSearcher searcher = new IndexSearcher(reader);
searcher.search(new MatchAllDocsQuery(), null, 500, sort); searcher.search(new MatchAllDocsQuery(), null, 500, sort);
reader.close(); reader.close();
indexStore.close(); indexStore.close();

View File

@ -0,0 +1,288 @@
package org.apache.lucene.uninverting;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.uninverting.UninvertingReader.Type;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
/** random sorting tests with uninversion */
public class TestFieldCacheSortRandom extends LuceneTestCase {
public void testRandomStringSort() throws Exception {
Random random = new Random(random().nextLong());
final int NUM_DOCS = atLeast(100);
final Directory dir = newDirectory();
final RandomIndexWriter writer = new RandomIndexWriter(random, dir);
final boolean allowDups = random.nextBoolean();
final Set<String> seen = new HashSet<>();
final int maxLength = TestUtil.nextInt(random, 5, 100);
if (VERBOSE) {
System.out.println("TEST: NUM_DOCS=" + NUM_DOCS + " maxLength=" + maxLength + " allowDups=" + allowDups);
}
int numDocs = 0;
final List<BytesRef> docValues = new ArrayList<>();
// TODO: deletions
while (numDocs < NUM_DOCS) {
final Document doc = new Document();
// 10% of the time, the document is missing the value:
final BytesRef br;
if (random().nextInt(10) != 7) {
final String s;
if (random.nextBoolean()) {
s = TestUtil.randomSimpleString(random, maxLength);
} else {
s = TestUtil.randomUnicodeString(random, maxLength);
}
if (!allowDups) {
if (seen.contains(s)) {
continue;
}
seen.add(s);
}
if (VERBOSE) {
System.out.println(" " + numDocs + ": s=" + s);
}
doc.add(new StringField("stringdv", s, Field.Store.NO));
docValues.add(new BytesRef(s));
} else {
br = null;
if (VERBOSE) {
System.out.println(" " + numDocs + ": <missing>");
}
docValues.add(null);
}
doc.add(new IntField("id", numDocs, Field.Store.YES));
writer.addDocument(doc);
numDocs++;
if (random.nextInt(40) == 17) {
// force flush
writer.getReader().close();
}
}
Map<String,UninvertingReader.Type> mapping = new HashMap<>();
mapping.put("stringdv", Type.SORTED);
mapping.put("id", Type.INTEGER);
final IndexReader r = UninvertingReader.wrap(writer.getReader(), mapping);
writer.shutdown();
if (VERBOSE) {
System.out.println(" reader=" + r);
}
final IndexSearcher s = newSearcher(r, false);
final int ITERS = atLeast(100);
for(int iter=0;iter<ITERS;iter++) {
final boolean reverse = random.nextBoolean();
final TopFieldDocs hits;
final SortField sf;
final boolean sortMissingLast;
final boolean missingIsNull;
sf = new SortField("stringdv", SortField.Type.STRING, reverse);
sortMissingLast = random().nextBoolean();
missingIsNull = true;
if (sortMissingLast) {
sf.setMissingValue(SortField.STRING_LAST);
}
final Sort sort;
if (random.nextBoolean()) {
sort = new Sort(sf);
} else {
sort = new Sort(sf, SortField.FIELD_DOC);
}
final int hitCount = TestUtil.nextInt(random, 1, r.maxDoc() + 20);
final RandomFilter f = new RandomFilter(random, random.nextFloat(), docValues);
int queryType = random.nextInt(3);
if (queryType == 0) {
// force out of order
BooleanQuery bq = new BooleanQuery();
// Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
// which delegates to BS if there are no mandatory clauses.
bq.add(new MatchAllDocsQuery(), Occur.SHOULD);
// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
// the clause instead of BQ.
bq.setMinimumNumberShouldMatch(1);
hits = s.search(bq, f, hitCount, sort, random.nextBoolean(), random.nextBoolean());
} else if (queryType == 1) {
hits = s.search(new ConstantScoreQuery(f),
null, hitCount, sort, random.nextBoolean(), random.nextBoolean());
} else {
hits = s.search(new MatchAllDocsQuery(),
f, hitCount, sort, random.nextBoolean(), random.nextBoolean());
}
if (VERBOSE) {
System.out.println("\nTEST: iter=" + iter + " " + hits.totalHits + " hits; topN=" + hitCount + "; reverse=" + reverse + "; sortMissingLast=" + sortMissingLast + " sort=" + sort);
}
// Compute expected results:
Collections.sort(f.matchValues, new Comparator<BytesRef>() {
@Override
public int compare(BytesRef a, BytesRef b) {
if (a == null) {
if (b == null) {
return 0;
}
if (sortMissingLast) {
return 1;
} else {
return -1;
}
} else if (b == null) {
if (sortMissingLast) {
return -1;
} else {
return 1;
}
} else {
return a.compareTo(b);
}
}
});
if (reverse) {
Collections.reverse(f.matchValues);
}
final List<BytesRef> expected = f.matchValues;
if (VERBOSE) {
System.out.println(" expected:");
for(int idx=0;idx<expected.size();idx++) {
BytesRef br = expected.get(idx);
if (br == null && missingIsNull == false) {
br = new BytesRef();
}
System.out.println(" " + idx + ": " + (br == null ? "<missing>" : br.utf8ToString()));
if (idx == hitCount-1) {
break;
}
}
}
if (VERBOSE) {
System.out.println(" actual:");
for(int hitIDX=0;hitIDX<hits.scoreDocs.length;hitIDX++) {
final FieldDoc fd = (FieldDoc) hits.scoreDocs[hitIDX];
BytesRef br = (BytesRef) fd.fields[0];
System.out.println(" " + hitIDX + ": " + (br == null ? "<missing>" : br.utf8ToString()) + " id=" + s.doc(fd.doc).get("id"));
}
}
for(int hitIDX=0;hitIDX<hits.scoreDocs.length;hitIDX++) {
final FieldDoc fd = (FieldDoc) hits.scoreDocs[hitIDX];
BytesRef br = expected.get(hitIDX);
if (br == null && missingIsNull == false) {
br = new BytesRef();
}
// Normally, the old codecs (that don't support
// docsWithField via doc values) will always return
// an empty BytesRef for the missing case; however,
// if all docs in a given segment were missing, in
// that case it will return null! So we must map
// null here, too:
BytesRef br2 = (BytesRef) fd.fields[0];
if (br2 == null && missingIsNull == false) {
br2 = new BytesRef();
}
assertEquals(br, br2);
}
}
r.close();
dir.close();
}
private static class RandomFilter extends Filter {
private final Random random;
private float density;
private final List<BytesRef> docValues;
public final List<BytesRef> matchValues = Collections.synchronizedList(new ArrayList<BytesRef>());
// density should be 0.0 ... 1.0
public RandomFilter(Random random, float density, List<BytesRef> docValues) {
this.random = random;
this.density = density;
this.docValues = docValues;
}
@Override
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
final int maxDoc = context.reader().maxDoc();
final NumericDocValues idSource = DocValues.getNumeric(context.reader(), "id");
assertNotNull(idSource);
final FixedBitSet bits = new FixedBitSet(maxDoc);
for(int docID=0;docID<maxDoc;docID++) {
if (random.nextFloat() <= density && (acceptDocs == null || acceptDocs.get(docID))) {
bits.set(docID);
//System.out.println(" acc id=" + idSource.getInt(docID) + " docID=" + docID);
matchValues.add(docValues.get((int) idSource.get(docID)));
}
}
return bits;
}
}
}

View File

@ -200,8 +200,7 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
BinaryDocValues s = FieldCache.DEFAULT.getTerms(ar, "field", false); BinaryDocValues s = FieldCache.DEFAULT.getTerms(ar, "field", false);
for(int docID=0;docID<docBytes.size();docID++) { for(int docID=0;docID<docBytes.size();docID++) {
StoredDocument doc = ar.document(docID); StoredDocument doc = ar.document(docID);
BytesRef bytes = new BytesRef(); BytesRef bytes = s.get(docID);
s.get(docID, bytes);
byte[] expected = docBytes.get(Integer.parseInt(doc.get("id"))); byte[] expected = docBytes.get(Integer.parseInt(doc.get("id")));
assertEquals(expected.length, bytes.length); assertEquals(expected.length, bytes.length);
assertEquals(new BytesRef(expected), bytes); assertEquals(new BytesRef(expected), bytes);
@ -272,8 +271,7 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
BinaryDocValues s = FieldCache.DEFAULT.getTerms(ar, "field", false); BinaryDocValues s = FieldCache.DEFAULT.getTerms(ar, "field", false);
for(int docID=0;docID<docBytes.size();docID++) { for(int docID=0;docID<docBytes.size();docID++) {
StoredDocument doc = ar.document(docID); StoredDocument doc = ar.document(docID);
BytesRef bytes = new BytesRef(); BytesRef bytes = s.get(docID);
s.get(docID, bytes);
byte[] expected = docBytes.get(Integer.parseInt(doc.get("id"))); byte[] expected = docBytes.get(Integer.parseInt(doc.get("id")));
assertEquals(expected.length, bytes.length); assertEquals(expected.length, bytes.length);
assertEquals(new BytesRef(expected), bytes); assertEquals(new BytesRef(expected), bytes);
@ -495,7 +493,7 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
// can be null for the segment if no docs actually had any SortedDocValues // can be null for the segment if no docs actually had any SortedDocValues
// in this case FC.getDocTermsOrds returns EMPTY // in this case FC.getDocTermsOrds returns EMPTY
if (actual == null) { if (actual == null) {
assertEquals(DocValues.EMPTY_SORTED_SET, expected); assertEquals(expected.getValueCount(), 0);
return; return;
} }
assertEquals(expected.getValueCount(), actual.getValueCount()); assertEquals(expected.getValueCount(), actual.getValueCount());
@ -511,11 +509,9 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
} }
// compare ord dictionary // compare ord dictionary
BytesRef expectedBytes = new BytesRef();
BytesRef actualBytes = new BytesRef();
for (long i = 0; i < expected.getValueCount(); i++) { for (long i = 0; i < expected.getValueCount(); i++) {
expected.lookupOrd(i, expectedBytes); final BytesRef expectedBytes = BytesRef.deepCopyOf(expected.lookupOrd(i));
actual.lookupOrd(i, actualBytes); final BytesRef actualBytes = actual.lookupOrd(i);
assertEquals(expectedBytes, actualBytes); assertEquals(expectedBytes, actualBytes);
} }

View File

@ -90,8 +90,6 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
SortedDocValues sdv = FieldCache.DEFAULT.getTermsIndex(ar, "sorted"); SortedDocValues sdv = FieldCache.DEFAULT.getTermsIndex(ar, "sorted");
startingGun.await(); startingGun.await();
int iters = atLeast(1000); int iters = atLeast(1000);
BytesRef scratch = new BytesRef();
BytesRef scratch2 = new BytesRef();
for(int iter=0;iter<iters;iter++) { for(int iter=0;iter<iters;iter++) {
int docID = threadRandom.nextInt(numDocs); int docID = threadRandom.nextInt(numDocs);
switch(threadRandom.nextInt(4)) { switch(threadRandom.nextInt(4)) {
@ -108,11 +106,10 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false).get(docID)); assertEquals(numbers.get(docID).longValue(), FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, false).get(docID));
break; break;
} }
bdv.get(docID, scratch); BytesRef term = bdv.get(docID);
assertEquals(binary.get(docID), scratch); assertEquals(binary.get(docID), term);
// Cannot share a single scratch against two "sources": term = sdv.get(docID);
sdv.get(docID, scratch2); assertEquals(sorted.get(docID), term);
assertEquals(sorted.get(docID), scratch2);
} }
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);
@ -207,12 +204,11 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
while(System.currentTimeMillis() < END_TIME) { while(System.currentTimeMillis() < END_TIME) {
final SortedDocValues source; final SortedDocValues source;
source = stringDVDirect; source = stringDVDirect;
final BytesRef scratch = new BytesRef();
for(int iter=0;iter<100;iter++) { for(int iter=0;iter<100;iter++) {
final int docID = random.nextInt(sr.maxDoc()); final int docID = random.nextInt(sr.maxDoc());
source.get(docID, scratch); BytesRef term = source.get(docID);
assertEquals(docValues.get((int) docIDToID.get(docID)), scratch); assertEquals(docValues.get((int) docIDToID.get(docID)), term);
} }
} }
} }

View File

@ -72,11 +72,10 @@ public class TestUninvertingReader extends LuceneTestCase {
assertEquals(1, v.nextOrd()); assertEquals(1, v.nextOrd());
assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd());
BytesRef value = new BytesRef(); BytesRef value = v.lookupOrd(0);
v.lookupOrd(0, value);
assertEquals(-3, NumericUtils.prefixCodedToInt(value)); assertEquals(-3, NumericUtils.prefixCodedToInt(value));
v.lookupOrd(1, value); value = v.lookupOrd(1);
assertEquals(5, NumericUtils.prefixCodedToInt(value)); assertEquals(5, NumericUtils.prefixCodedToInt(value));
ir.close(); ir.close();
@ -117,11 +116,10 @@ public class TestUninvertingReader extends LuceneTestCase {
assertEquals(1, v.nextOrd()); assertEquals(1, v.nextOrd());
assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd());
BytesRef value = new BytesRef(); BytesRef value = v.lookupOrd(0);
v.lookupOrd(0, value);
assertEquals(Float.floatToRawIntBits(-3f), NumericUtils.prefixCodedToInt(value)); assertEquals(Float.floatToRawIntBits(-3f), NumericUtils.prefixCodedToInt(value));
v.lookupOrd(1, value); value = v.lookupOrd(1);
assertEquals(Float.floatToRawIntBits(5f), NumericUtils.prefixCodedToInt(value)); assertEquals(Float.floatToRawIntBits(5f), NumericUtils.prefixCodedToInt(value));
ir.close(); ir.close();
@ -161,11 +159,10 @@ public class TestUninvertingReader extends LuceneTestCase {
assertEquals(1, v.nextOrd()); assertEquals(1, v.nextOrd());
assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd());
BytesRef value = new BytesRef(); BytesRef value = v.lookupOrd(0);
v.lookupOrd(0, value);
assertEquals(-3, NumericUtils.prefixCodedToLong(value)); assertEquals(-3, NumericUtils.prefixCodedToLong(value));
v.lookupOrd(1, value); value = v.lookupOrd(1);
assertEquals(5, NumericUtils.prefixCodedToLong(value)); assertEquals(5, NumericUtils.prefixCodedToLong(value));
ir.close(); ir.close();
@ -205,11 +202,10 @@ public class TestUninvertingReader extends LuceneTestCase {
assertEquals(1, v.nextOrd()); assertEquals(1, v.nextOrd());
assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd());
BytesRef value = new BytesRef(); BytesRef value = v.lookupOrd(0);
v.lookupOrd(0, value);
assertEquals(Double.doubleToRawLongBits(-3d), NumericUtils.prefixCodedToLong(value)); assertEquals(Double.doubleToRawLongBits(-3d), NumericUtils.prefixCodedToLong(value));
v.lookupOrd(1, value); value = v.lookupOrd(1);
assertEquals(Double.doubleToRawLongBits(5d), NumericUtils.prefixCodedToLong(value)); assertEquals(Double.doubleToRawLongBits(5d), NumericUtils.prefixCodedToLong(value));
ir.close(); ir.close();

View File

@ -40,7 +40,6 @@ public abstract class DocTermsIndexDocValues extends FunctionValues {
protected final SortedDocValues termsIndex; protected final SortedDocValues termsIndex;
protected final ValueSource vs; protected final ValueSource vs;
protected final MutableValueStr val = new MutableValueStr(); protected final MutableValueStr val = new MutableValueStr();
protected final BytesRef spare = new BytesRef();
protected final CharsRef spareChars = new CharsRef(); protected final CharsRef spareChars = new CharsRef();
public DocTermsIndexDocValues(ValueSource vs, AtomicReaderContext context, String field) throws IOException { public DocTermsIndexDocValues(ValueSource vs, AtomicReaderContext context, String field) throws IOException {
@ -71,17 +70,18 @@ public abstract class DocTermsIndexDocValues extends FunctionValues {
@Override @Override
public boolean bytesVal(int doc, BytesRef target) { public boolean bytesVal(int doc, BytesRef target) {
termsIndex.get(doc, target); target.length = 0;
target.copyBytes(termsIndex.get(doc));
return target.length > 0; return target.length > 0;
} }
@Override @Override
public String strVal(int doc) { public String strVal(int doc) {
termsIndex.get(doc, spare); final BytesRef term = termsIndex.get(doc);
if (spare.length == 0) { if (term.length == 0) {
return null; return null;
} }
UnicodeUtil.UTF8toUTF16(spare, spareChars); UnicodeUtil.UTF8toUTF16(term, spareChars);
return spareChars.toString(); return spareChars.toString();
} }
@ -149,14 +149,10 @@ public abstract class DocTermsIndexDocValues extends FunctionValues {
@Override @Override
public void fillValue(int doc) { public void fillValue(int doc) {
int ord = termsIndex.getOrd(doc); int ord = termsIndex.getOrd(doc);
if (ord == -1) { mval.value.length = 0;
mval.value.bytes = BytesRef.EMPTY_BYTES; mval.exists = ord >= 0;
mval.value.offset = 0; if (mval.exists) {
mval.value.length = 0; mval.value.copyBytes(termsIndex.lookupOrd(ord));
mval.exists = false;
} else {
termsIndex.lookupOrd(ord, mval.value);
mval.exists = true;
} }
} }
}; };

View File

@ -59,7 +59,7 @@ public class BytesRefFieldSource extends FieldCacheSource {
@Override @Override
public boolean bytesVal(int doc, BytesRef target) { public boolean bytesVal(int doc, BytesRef target) {
binaryValues.get(doc, target); target.copyBytes(binaryValues.get(doc));
return target.length > 0; return target.length > 0;
} }
@ -93,13 +93,8 @@ public class BytesRefFieldSource extends FieldCacheSource {
@Override @Override
public void fillValue(int doc) { public void fillValue(int doc) {
mval.exists = docsWithField.get(doc); mval.exists = docsWithField.get(doc);
if (mval.exists) { mval.value.length = 0;
binaryValues.get(doc, mval.value); mval.value.copyBytes(binaryValues.get(doc));
} else {
mval.value.bytes = BytesRef.EMPTY_BYTES;
mval.value.offset = 0;
mval.value.length = 0;
}
} }
}; };
} }

View File

@ -62,14 +62,13 @@ public class JoinDocFreqValueSource extends FieldCacheSource {
final TermsEnum termsEnum = t == null ? TermsEnum.EMPTY : t.iterator(null); final TermsEnum termsEnum = t == null ? TermsEnum.EMPTY : t.iterator(null);
return new IntDocValues(this) { return new IntDocValues(this) {
final BytesRef ref = new BytesRef();
@Override @Override
public int intVal(int doc) public int intVal(int doc)
{ {
try { try {
terms.get(doc, ref); final BytesRef term = terms.get(doc);
if (termsEnum.seekExact(ref)) { if (termsEnum.seekExact(term)) {
return termsEnum.docFreq(); return termsEnum.docFreq();
} else { } else {
return 0; return 0;

View File

@ -45,7 +45,6 @@ public final class SlowCollatedStringComparator extends FieldComparator<String>
final Collator collator; final Collator collator;
private String bottom; private String bottom;
private String topValue; private String topValue;
private final BytesRef tempBR = new BytesRef();
public SlowCollatedStringComparator(int numHits, String field, Collator collator) { public SlowCollatedStringComparator(int numHits, String field, Collator collator) {
values = new String[numHits]; values = new String[numHits];
@ -70,8 +69,8 @@ public final class SlowCollatedStringComparator extends FieldComparator<String>
@Override @Override
public int compareBottom(int doc) { public int compareBottom(int doc) {
currentDocTerms.get(doc, tempBR); final BytesRef term = currentDocTerms.get(doc);
final String val2 = tempBR.length == 0 && docsWithField.get(doc) == false ? null : tempBR.utf8ToString(); final String val2 = term.length == 0 && docsWithField.get(doc) == false ? null : term.utf8ToString();
if (bottom == null) { if (bottom == null) {
if (val2 == null) { if (val2 == null) {
return 0; return 0;
@ -85,11 +84,11 @@ public final class SlowCollatedStringComparator extends FieldComparator<String>
@Override @Override
public void copy(int slot, int doc) { public void copy(int slot, int doc) {
currentDocTerms.get(doc, tempBR); final BytesRef term = currentDocTerms.get(doc);
if (tempBR.length == 0 && docsWithField.get(doc) == false) { if (term.length == 0 && docsWithField.get(doc) == false) {
values[slot] = null; values[slot] = null;
} else { } else {
values[slot] = tempBR.utf8ToString(); values[slot] = term.utf8ToString();
} }
} }
@ -131,12 +130,12 @@ public final class SlowCollatedStringComparator extends FieldComparator<String>
@Override @Override
public int compareTop(int doc) { public int compareTop(int doc) {
currentDocTerms.get(doc, tempBR); final BytesRef term = currentDocTerms.get(doc);
final String docValue; final String docValue;
if (tempBR.length == 0 && docsWithField.get(doc) == false) { if (term.length == 0 && docsWithField.get(doc) == false) {
docValue = null; docValue = null;
} else { } else {
docValue = tempBR.utf8ToString(); docValue = term.utf8ToString();
} }
return compareValues(topValue, docValue); return compareValues(topValue, docValue);
} }

View File

@ -215,7 +215,7 @@ public class SerializedDVStrategy extends SpatialStrategy {
boolean fillBytes(int doc) { boolean fillBytes(int doc) {
if (bytesRefDoc != doc) { if (bytesRefDoc != doc) {
docValues.get(doc, bytesRef); bytesRef.copyBytes(docValues.get(doc));
bytesRefDoc = doc; bytesRefDoc = doc;
} }
return bytesRef.length != 0; return bytesRef.length != 0;

View File

@ -488,17 +488,15 @@ public class AnalyzingInfixSuggester extends Lookup implements Closeable {
BinaryDocValues payloadsDV = MultiDocValues.getBinaryValues(searcher.getIndexReader(), "payloads"); BinaryDocValues payloadsDV = MultiDocValues.getBinaryValues(searcher.getIndexReader(), "payloads");
List<AtomicReaderContext> leaves = searcher.getIndexReader().leaves(); List<AtomicReaderContext> leaves = searcher.getIndexReader().leaves();
List<LookupResult> results = new ArrayList<>(); List<LookupResult> results = new ArrayList<>();
BytesRef scratch = new BytesRef();
for (int i=0;i<hits.scoreDocs.length;i++) { for (int i=0;i<hits.scoreDocs.length;i++) {
FieldDoc fd = (FieldDoc) hits.scoreDocs[i]; FieldDoc fd = (FieldDoc) hits.scoreDocs[i];
textDV.get(fd.doc, scratch); BytesRef term = textDV.get(fd.doc);
String text = scratch.utf8ToString(); String text = term.utf8ToString();
long score = (Long) fd.fields[0]; long score = (Long) fd.fields[0];
BytesRef payload; BytesRef payload;
if (payloadsDV != null) { if (payloadsDV != null) {
payload = new BytesRef(); payload = BytesRef.deepCopyOf(payloadsDV.get(fd.doc));
payloadsDV.get(fd.doc, payload);
} else { } else {
payload = null; payload = null;
} }
@ -512,8 +510,7 @@ public class AnalyzingInfixSuggester extends Lookup implements Closeable {
contextsDV.setDocument(fd.doc - leaves.get(segment).docBase); contextsDV.setDocument(fd.doc - leaves.get(segment).docBase);
long ord; long ord;
while ((ord = contextsDV.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { while ((ord = contextsDV.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
BytesRef context = new BytesRef(); BytesRef context = BytesRef.deepCopyOf(contextsDV.lookupOrd(ord));
contextsDV.lookupOrd(ord, context);
contexts.add(context); contexts.add(context);
} }
} else { } else {

View File

@ -155,18 +155,15 @@ public class BlendedInfixSuggester extends AnalyzingInfixSuggester {
// we reduce the num to the one initially requested // we reduce the num to the one initially requested
int actualNum = num / numFactor; int actualNum = num / numFactor;
BytesRef scratch = new BytesRef();
for (int i = 0; i < hits.scoreDocs.length; i++) { for (int i = 0; i < hits.scoreDocs.length; i++) {
FieldDoc fd = (FieldDoc) hits.scoreDocs[i]; FieldDoc fd = (FieldDoc) hits.scoreDocs[i];
textDV.get(fd.doc, scratch); final String text = textDV.get(fd.doc).utf8ToString();
String text = scratch.utf8ToString();
long weight = (Long) fd.fields[0]; long weight = (Long) fd.fields[0];
BytesRef payload; BytesRef payload;
if (payloadsDV != null) { if (payloadsDV != null) {
payload = new BytesRef(); payload = BytesRef.deepCopyOf(payloadsDV.get(fd.doc));
payloadsDV.get(fd.doc, payload);
} else { } else {
payload = null; payload = null;
} }

View File

@ -449,11 +449,11 @@ public class AssertingAtomicReader extends FilterAtomicReader {
} }
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
assert docID >= 0 && docID < maxDoc; assert docID >= 0 && docID < maxDoc;
final BytesRef result = in.get(docID);
assert result.isValid(); assert result.isValid();
in.get(docID, result); return result;
assert result.isValid();
} }
} }
@ -479,11 +479,11 @@ public class AssertingAtomicReader extends FilterAtomicReader {
} }
@Override @Override
public void lookupOrd(int ord, BytesRef result) { public BytesRef lookupOrd(int ord) {
assert ord >= 0 && ord < valueCount; assert ord >= 0 && ord < valueCount;
final BytesRef result = in.lookupOrd(ord);
assert result.isValid(); assert result.isValid();
in.lookupOrd(ord, result); return result;
assert result.isValid();
} }
@Override @Override
@ -494,11 +494,11 @@ public class AssertingAtomicReader extends FilterAtomicReader {
} }
@Override @Override
public void get(int docID, BytesRef result) { public BytesRef get(int docID) {
assert docID >= 0 && docID < maxDoc; assert docID >= 0 && docID < maxDoc;
final BytesRef result = in.get(docID);
assert result.isValid(); assert result.isValid();
in.get(docID, result); return result;
assert result.isValid();
} }
@Override @Override
@ -543,11 +543,11 @@ public class AssertingAtomicReader extends FilterAtomicReader {
} }
@Override @Override
public void lookupOrd(long ord, BytesRef result) { public BytesRef lookupOrd(long ord) {
assert ord >= 0 && ord < valueCount; assert ord >= 0 && ord < valueCount;
final BytesRef result = in.lookupOrd(ord);
assert result.isValid(); assert result.isValid();
in.lookupOrd(ord, result); return result;
assert result.isValid();
} }
@Override @Override

View File

@ -206,11 +206,10 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(text, hitDoc.get("fieldname")); assertEquals(text, hitDoc.get("fieldname"));
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv1"); BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv1");
BytesRef scratch = new BytesRef(); BytesRef scratch = dv.get(hits.scoreDocs[i].doc);
dv.get(hits.scoreDocs[i].doc, scratch);
assertEquals(new BytesRef(longTerm), scratch); assertEquals(new BytesRef(longTerm), scratch);
dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv2"); dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv2");
dv.get(hits.scoreDocs[i].doc, scratch); scratch = dv.get(hits.scoreDocs[i].doc);
assertEquals(new BytesRef(text), scratch); assertEquals(new BytesRef(text), scratch);
} }
@ -238,7 +237,6 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
Query query = new TermQuery(new Term("fieldname", "text")); Query query = new TermQuery(new Term("fieldname", "text"));
TopDocs hits = isearcher.search(query, null, 1); TopDocs hits = isearcher.search(query, null, 1);
assertEquals(1, hits.totalHits); assertEquals(1, hits.totalHits);
BytesRef scratch = new BytesRef();
// Iterate through the results: // Iterate through the results:
for (int i = 0; i < hits.scoreDocs.length; i++) { for (int i = 0; i < hits.scoreDocs.length; i++) {
StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc); StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
@ -247,7 +245,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv1"); NumericDocValues dv = ireader.leaves().get(0).reader().getNumericDocValues("dv1");
assertEquals(5, dv.get(hits.scoreDocs[i].doc)); assertEquals(5, dv.get(hits.scoreDocs[i].doc));
BinaryDocValues dv2 = ireader.leaves().get(0).reader().getBinaryDocValues("dv2"); BinaryDocValues dv2 = ireader.leaves().get(0).reader().getBinaryDocValues("dv2");
dv2.get(hits.scoreDocs[i].doc, scratch); BytesRef scratch = dv2.get(hits.scoreDocs[i].doc);
assertEquals(new BytesRef("hello world"), scratch); assertEquals(new BytesRef("hello world"), scratch);
} }
@ -276,7 +274,6 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
Query query = new TermQuery(new Term("fieldname", "text")); Query query = new TermQuery(new Term("fieldname", "text"));
TopDocs hits = isearcher.search(query, null, 1); TopDocs hits = isearcher.search(query, null, 1);
assertEquals(1, hits.totalHits); assertEquals(1, hits.totalHits);
BytesRef scratch = new BytesRef();
// Iterate through the results: // Iterate through the results:
for (int i = 0; i < hits.scoreDocs.length; i++) { for (int i = 0; i < hits.scoreDocs.length; i++) {
StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc); StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
@ -284,12 +281,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv1"); SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv1");
int ord = dv.getOrd(0); int ord = dv.getOrd(0);
dv.lookupOrd(ord, scratch); BytesRef scratch = dv.lookupOrd(ord);
assertEquals(new BytesRef("hello hello"), scratch); assertEquals(new BytesRef("hello hello"), scratch);
NumericDocValues dv2 = ireader.leaves().get(0).reader().getNumericDocValues("dv2"); NumericDocValues dv2 = ireader.leaves().get(0).reader().getNumericDocValues("dv2");
assertEquals(5, dv2.get(hits.scoreDocs[i].doc)); assertEquals(5, dv2.get(hits.scoreDocs[i].doc));
BinaryDocValues dv3 = ireader.leaves().get(0).reader().getBinaryDocValues("dv3"); BinaryDocValues dv3 = ireader.leaves().get(0).reader().getBinaryDocValues("dv3");
dv3.get(hits.scoreDocs[i].doc, scratch); scratch = dv3.get(hits.scoreDocs[i].doc);
assertEquals(new BytesRef("hello world"), scratch); assertEquals(new BytesRef("hello world"), scratch);
} }
@ -326,12 +323,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv2"); SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv2");
int ord = dv.getOrd(0); int ord = dv.getOrd(0);
dv.lookupOrd(ord, scratch); scratch = dv.lookupOrd(ord);
assertEquals(new BytesRef("hello hello"), scratch); assertEquals(new BytesRef("hello hello"), scratch);
NumericDocValues dv2 = ireader.leaves().get(0).reader().getNumericDocValues("dv3"); NumericDocValues dv2 = ireader.leaves().get(0).reader().getNumericDocValues("dv3");
assertEquals(5, dv2.get(hits.scoreDocs[i].doc)); assertEquals(5, dv2.get(hits.scoreDocs[i].doc));
BinaryDocValues dv3 = ireader.leaves().get(0).reader().getBinaryDocValues("dv1"); BinaryDocValues dv3 = ireader.leaves().get(0).reader().getBinaryDocValues("dv1");
dv3.get(hits.scoreDocs[i].doc, scratch); scratch = dv3.get(hits.scoreDocs[i].doc);
assertEquals(new BytesRef("hello world"), scratch); assertEquals(new BytesRef("hello world"), scratch);
} }
@ -480,14 +477,13 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
Query query = new TermQuery(new Term("fieldname", "text")); Query query = new TermQuery(new Term("fieldname", "text"));
TopDocs hits = isearcher.search(query, null, 1); TopDocs hits = isearcher.search(query, null, 1);
assertEquals(1, hits.totalHits); assertEquals(1, hits.totalHits);
BytesRef scratch = new BytesRef();
// Iterate through the results: // Iterate through the results:
for (int i = 0; i < hits.scoreDocs.length; i++) { for (int i = 0; i < hits.scoreDocs.length; i++) {
StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc); StoredDocument hitDoc = isearcher.doc(hits.scoreDocs[i].doc);
assertEquals(text, hitDoc.get("fieldname")); assertEquals(text, hitDoc.get("fieldname"));
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv"); BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
dv.get(hits.scoreDocs[i].doc, scratch); BytesRef scratch = dv.get(hits.scoreDocs[i].doc);
assertEquals(new BytesRef("hello world"), scratch); assertEquals(new BytesRef("hello world"), scratch);
} }
@ -527,7 +523,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
} else { } else {
expected = "hello 2"; expected = "hello 2";
} }
dv.get(i, scratch); scratch = dv.get(i);
assertEquals(expected, scratch.utf8ToString()); assertEquals(expected, scratch.utf8ToString());
} }
@ -564,7 +560,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(text, hitDoc.get("fieldname")); assertEquals(text, hitDoc.get("fieldname"));
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv"); SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
dv.lookupOrd(dv.getOrd(hits.scoreDocs[i].doc), scratch); scratch = dv.lookupOrd(dv.getOrd(hits.scoreDocs[i].doc));
assertEquals(new BytesRef("hello world"), scratch); assertEquals(new BytesRef("hello world"), scratch);
} }
@ -593,9 +589,9 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv"); SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
BytesRef scratch = new BytesRef(); BytesRef scratch = new BytesRef();
dv.lookupOrd(dv.getOrd(0), scratch); scratch = dv.lookupOrd(dv.getOrd(0));
assertEquals("hello world 1", scratch.utf8ToString()); assertEquals("hello world 1", scratch.utf8ToString());
dv.lookupOrd(dv.getOrd(1), scratch); scratch = dv.lookupOrd(dv.getOrd(1));
assertEquals("hello world 2", scratch.utf8ToString()); assertEquals("hello world 2", scratch.utf8ToString());
ireader.close(); ireader.close();
@ -626,12 +622,11 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv"); SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
assertEquals(2, dv.getValueCount()); assertEquals(2, dv.getValueCount());
BytesRef scratch = new BytesRef();
assertEquals(0, dv.getOrd(0)); assertEquals(0, dv.getOrd(0));
dv.lookupOrd(0, scratch); BytesRef scratch = dv.lookupOrd(0);
assertEquals("hello world 1", scratch.utf8ToString()); assertEquals("hello world 1", scratch.utf8ToString());
assertEquals(1, dv.getOrd(1)); assertEquals(1, dv.getOrd(1));
dv.lookupOrd(1, scratch); scratch = dv.lookupOrd(1);
assertEquals("hello world 2", scratch.utf8ToString()); assertEquals("hello world 2", scratch.utf8ToString());
assertEquals(0, dv.getOrd(2)); assertEquals(0, dv.getOrd(2));
@ -663,10 +658,9 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv"); SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
assertEquals(2, dv.getValueCount()); // 2 ords assertEquals(2, dv.getValueCount()); // 2 ords
BytesRef scratch = new BytesRef(); BytesRef scratch = dv.lookupOrd(0);
dv.lookupOrd(0, scratch);
assertEquals(new BytesRef("hello world 1"), scratch); assertEquals(new BytesRef("hello world 1"), scratch);
dv.lookupOrd(1, scratch); scratch = dv.lookupOrd(1);
assertEquals(new BytesRef("hello world 2"), scratch); assertEquals(new BytesRef("hello world 2"), scratch);
for(int i=0;i<2;i++) { for(int i=0;i<2;i++) {
StoredDocument doc2 = ireader.leaves().get(0).reader().document(i); StoredDocument doc2 = ireader.leaves().get(0).reader().document(i);
@ -676,7 +670,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
} else { } else {
expected = "hello world 2"; expected = "hello world 2";
} }
dv.lookupOrd(dv.getOrd(i), scratch); scratch = dv.lookupOrd(dv.getOrd(i));
assertEquals(expected, scratch.utf8ToString()); assertEquals(expected, scratch.utf8ToString());
} }
@ -712,8 +706,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
} else { } else {
assertEquals(0, dv.getOrd(0)); assertEquals(0, dv.getOrd(0));
assertEquals(1, dv.getValueCount()); assertEquals(1, dv.getValueCount());
BytesRef ref = new BytesRef(); BytesRef ref = dv.lookupOrd(0);
dv.lookupOrd(0, ref);
assertEquals(new BytesRef(), ref); assertEquals(new BytesRef(), ref);
} }
@ -737,8 +730,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
IndexReader ireader = DirectoryReader.open(directory); // read-only=true IndexReader ireader = DirectoryReader.open(directory); // read-only=true
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv"); BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
BytesRef scratch = new BytesRef(); BytesRef scratch = dv.get(0);
dv.get(0, scratch);
assertEquals(new BytesRef("hello\nworld\r1"), scratch); assertEquals(new BytesRef("hello\nworld\r1"), scratch);
ireader.close(); ireader.close();
@ -763,13 +755,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
IndexReader ireader = DirectoryReader.open(directory); // read-only=true IndexReader ireader = DirectoryReader.open(directory); // read-only=true
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv"); SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
BytesRef scratch = new BytesRef(); BytesRef scratch = dv.lookupOrd(dv.getOrd(0));
dv.lookupOrd(dv.getOrd(0), scratch);
assertEquals(new BytesRef("hello world 2"), scratch); assertEquals(new BytesRef("hello world 2"), scratch);
if (defaultCodecSupportsDocsWithField()) { if (defaultCodecSupportsDocsWithField()) {
assertEquals(-1, dv.getOrd(1)); assertEquals(-1, dv.getOrd(1));
} }
dv.get(1, scratch); scratch = dv.get(1);
assertEquals(new BytesRef(""), scratch); assertEquals(new BytesRef(""), scratch);
ireader.close(); ireader.close();
directory.close(); directory.close();
@ -866,10 +857,9 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
IndexReader ireader = DirectoryReader.open(directory); // read-only=true IndexReader ireader = DirectoryReader.open(directory); // read-only=true
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv"); SortedDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
BytesRef scratch = new BytesRef();
assertEquals(0, dv.getOrd(0)); assertEquals(0, dv.getOrd(0));
assertEquals(0, dv.getOrd(1)); assertEquals(0, dv.getOrd(1));
dv.lookupOrd(dv.getOrd(0), scratch); BytesRef scratch = dv.lookupOrd(dv.getOrd(0));
assertEquals("", scratch.utf8ToString()); assertEquals("", scratch.utf8ToString());
ireader.close(); ireader.close();
@ -896,10 +886,9 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
IndexReader ireader = DirectoryReader.open(directory); // read-only=true IndexReader ireader = DirectoryReader.open(directory); // read-only=true
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv"); BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
BytesRef scratch = new BytesRef(); BytesRef scratch = dv.get(0);
dv.get(0, scratch);
assertEquals("", scratch.utf8ToString()); assertEquals("", scratch.utf8ToString());
dv.get(1, scratch); scratch = dv.get(1);
assertEquals("", scratch.utf8ToString()); assertEquals("", scratch.utf8ToString());
ireader.close(); ireader.close();
@ -925,8 +914,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
IndexReader ireader = DirectoryReader.open(directory); // read-only=true IndexReader ireader = DirectoryReader.open(directory); // read-only=true
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv"); BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
BytesRef scratch = new BytesRef(); BytesRef scratch = dv.get(0);
dv.get(0, scratch);
assertEquals(new BytesRef(bytes), scratch); assertEquals(new BytesRef(bytes), scratch);
ireader.close(); ireader.close();
@ -952,8 +940,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
IndexReader ireader = DirectoryReader.open(directory); // read-only=true IndexReader ireader = DirectoryReader.open(directory); // read-only=true
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv"); BinaryDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
BytesRef scratch = new BytesRef(); BytesRef scratch = dv.get(0);
dv.get(0, scratch);
assertEquals(new BytesRef(bytes), scratch); assertEquals(new BytesRef(bytes), scratch);
ireader.close(); ireader.close();
directory.close(); directory.close();
@ -976,8 +963,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv"); BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
byte mybytes[] = new byte[20]; byte mybytes[] = new byte[20];
BytesRef scratch = new BytesRef(mybytes); BytesRef scratch = dv.get(0);
dv.get(0, scratch);
assertEquals("boo!", scratch.utf8ToString()); assertEquals("boo!", scratch.utf8ToString());
assertFalse(scratch.bytes == mybytes); assertFalse(scratch.bytes == mybytes);
@ -1002,8 +988,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assert ireader.leaves().size() == 1; assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv"); BinaryDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
byte mybytes[] = new byte[20]; byte mybytes[] = new byte[20];
BytesRef scratch = new BytesRef(mybytes); BytesRef scratch = dv.get(0);
dv.get(0, scratch);
assertEquals("boo!", scratch.utf8ToString()); assertEquals("boo!", scratch.utf8ToString());
assertFalse(scratch.bytes == mybytes); assertFalse(scratch.bytes == mybytes);
@ -1011,72 +996,6 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
directory.close(); directory.close();
} }
public void testCodecUsesOwnBytesEachTime() throws IOException {
Analyzer analyzer = new MockAnalyzer(random());
Directory directory = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
conf.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf);
Document doc = new Document();
doc.add(new BinaryDocValuesField("dv", new BytesRef("foo!")));
iwriter.addDocument(doc);
doc = new Document();
doc.add(new BinaryDocValuesField("dv", new BytesRef("bar!")));
iwriter.addDocument(doc);
iwriter.shutdown();
// Now search the index:
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
BytesRef scratch = new BytesRef();
dv.get(0, scratch);
assertEquals("foo!", scratch.utf8ToString());
BytesRef scratch2 = new BytesRef();
dv.get(1, scratch2);
assertEquals("bar!", scratch2.utf8ToString());
// check scratch is still valid
assertEquals("foo!", scratch.utf8ToString());
ireader.close();
directory.close();
}
public void testCodecUsesOwnSortedBytesEachTime() throws IOException {
Analyzer analyzer = new MockAnalyzer(random());
Directory directory = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
conf.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf);
Document doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo!")));
iwriter.addDocument(doc);
doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("bar!")));
iwriter.addDocument(doc);
iwriter.shutdown();
// Now search the index:
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getSortedDocValues("dv");
BytesRef scratch = new BytesRef();
dv.get(0, scratch);
assertEquals("foo!", scratch.utf8ToString());
BytesRef scratch2 = new BytesRef();
dv.get(1, scratch2);
assertEquals("bar!", scratch2.utf8ToString());
// check scratch is still valid
assertEquals("foo!", scratch.utf8ToString());
ireader.close();
directory.close();
}
/* /*
* Simple test case to show how to use the API * Simple test case to show how to use the API
*/ */
@ -1181,11 +1100,10 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
SortedDocValues docValues = MultiDocValues.getSortedValues(reader, "field"); SortedDocValues docValues = MultiDocValues.getSortedValues(reader, "field");
int[] sort = hash.sort(BytesRef.getUTF8SortedAsUnicodeComparator()); int[] sort = hash.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
BytesRef expected = new BytesRef(); BytesRef expected = new BytesRef();
BytesRef actual = new BytesRef();
assertEquals(hash.size(), docValues.getValueCount()); assertEquals(hash.size(), docValues.getValueCount());
for (int i = 0; i < hash.size(); i++) { for (int i = 0; i < hash.size(); i++) {
hash.get(sort[i], expected); hash.get(sort[i], expected);
docValues.lookupOrd(i, actual); final BytesRef actual = docValues.lookupOrd(i);
assertEquals(expected.utf8ToString(), actual.utf8ToString()); assertEquals(expected.utf8ToString(), actual.utf8ToString());
int ord = docValues.lookupTerm(expected); int ord = docValues.lookupTerm(expected);
assertEquals(i, ord); assertEquals(i, ord);
@ -1198,7 +1116,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
DocsEnum termDocsEnum = slowR.termDocsEnum(new Term("id", entry.getKey())); DocsEnum termDocsEnum = slowR.termDocsEnum(new Term("id", entry.getKey()));
int docId = termDocsEnum.nextDoc(); int docId = termDocsEnum.nextDoc();
expected = new BytesRef(entry.getValue()); expected = new BytesRef(entry.getValue());
docValues.get(docId, actual); final BytesRef actual = docValues.get(docId);
assertEquals(expected, actual); assertEquals(expected, actual);
} }
@ -1357,8 +1275,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
BinaryDocValues docValues = r.getBinaryDocValues("dv"); BinaryDocValues docValues = r.getBinaryDocValues("dv");
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
BytesRef binaryValue = r.document(i).getBinaryValue("stored"); BytesRef binaryValue = r.document(i).getBinaryValue("stored");
BytesRef scratch = new BytesRef(); BytesRef scratch = docValues.get(i);
docValues.get(i, scratch);
assertEquals(binaryValue, scratch); assertEquals(binaryValue, scratch);
} }
} }
@ -1428,8 +1345,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
BinaryDocValues docValues = r.getSortedDocValues("dv"); BinaryDocValues docValues = r.getSortedDocValues("dv");
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
BytesRef binaryValue = r.document(i).getBinaryValue("stored"); BytesRef binaryValue = r.document(i).getBinaryValue("stored");
BytesRef scratch = new BytesRef(); BytesRef scratch = docValues.get(i);
docValues.get(i, scratch);
assertEquals(binaryValue, scratch); assertEquals(binaryValue, scratch);
} }
} }
@ -1470,8 +1386,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(0, dv.nextOrd()); assertEquals(0, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
BytesRef bytes = new BytesRef(); BytesRef bytes = dv.lookupOrd(0);
dv.lookupOrd(0, bytes);
assertEquals(new BytesRef("hello"), bytes); assertEquals(new BytesRef("hello"), bytes);
ireader.close(); ireader.close();
@ -1497,8 +1412,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(0, dv.nextOrd()); assertEquals(0, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
BytesRef bytes = new BytesRef(); BytesRef bytes = dv.lookupOrd(0);
dv.lookupOrd(0, bytes);
assertEquals(new BytesRef("hello"), bytes); assertEquals(new BytesRef("hello"), bytes);
dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field2"); dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field2");
@ -1507,7 +1421,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(0, dv.nextOrd()); assertEquals(0, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
dv.lookupOrd(0, bytes); bytes = dv.lookupOrd(0);
assertEquals(new BytesRef("world"), bytes); assertEquals(new BytesRef("world"), bytes);
ireader.close(); ireader.close();
@ -1542,15 +1456,14 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(0, dv.nextOrd()); assertEquals(0, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
BytesRef bytes = new BytesRef(); BytesRef bytes = dv.lookupOrd(0);
dv.lookupOrd(0, bytes);
assertEquals(new BytesRef("hello"), bytes); assertEquals(new BytesRef("hello"), bytes);
dv.setDocument(1); dv.setDocument(1);
assertEquals(1, dv.nextOrd()); assertEquals(1, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
dv.lookupOrd(1, bytes); bytes = dv.lookupOrd(1);
assertEquals(new BytesRef("world"), bytes); assertEquals(new BytesRef("world"), bytes);
ireader.close(); ireader.close();
@ -1577,11 +1490,10 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(1, dv.nextOrd()); assertEquals(1, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
BytesRef bytes = new BytesRef(); BytesRef bytes = dv.lookupOrd(0);
dv.lookupOrd(0, bytes);
assertEquals(new BytesRef("hello"), bytes); assertEquals(new BytesRef("hello"), bytes);
dv.lookupOrd(1, bytes); bytes = dv.lookupOrd(1);
assertEquals(new BytesRef("world"), bytes); assertEquals(new BytesRef("world"), bytes);
ireader.close(); ireader.close();
@ -1608,11 +1520,10 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(1, dv.nextOrd()); assertEquals(1, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
BytesRef bytes = new BytesRef(); BytesRef bytes = dv.lookupOrd(0);
dv.lookupOrd(0, bytes);
assertEquals(new BytesRef("hello"), bytes); assertEquals(new BytesRef("hello"), bytes);
dv.lookupOrd(1, bytes); bytes = dv.lookupOrd(1);
assertEquals(new BytesRef("world"), bytes); assertEquals(new BytesRef("world"), bytes);
ireader.close(); ireader.close();
@ -1655,14 +1566,13 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(1, dv.nextOrd()); assertEquals(1, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
BytesRef bytes = new BytesRef(); BytesRef bytes = dv.lookupOrd(0);
dv.lookupOrd(0, bytes);
assertEquals(new BytesRef("beer"), bytes); assertEquals(new BytesRef("beer"), bytes);
dv.lookupOrd(1, bytes); bytes = dv.lookupOrd(1);
assertEquals(new BytesRef("hello"), bytes); assertEquals(new BytesRef("hello"), bytes);
dv.lookupOrd(2, bytes); bytes = dv.lookupOrd(2);
assertEquals(new BytesRef("world"), bytes); assertEquals(new BytesRef("world"), bytes);
ireader.close(); ireader.close();
@ -1694,8 +1604,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(0, dv.nextOrd()); assertEquals(0, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
BytesRef bytes = new BytesRef(); BytesRef bytes = dv.lookupOrd(0);
dv.lookupOrd(0, bytes);
assertEquals(new BytesRef("hello"), bytes); assertEquals(new BytesRef("hello"), bytes);
ireader.close(); ireader.close();
@ -1729,8 +1638,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(0, dv.nextOrd()); assertEquals(0, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
BytesRef bytes = new BytesRef(); BytesRef bytes = dv.lookupOrd(0);
dv.lookupOrd(0, bytes);
assertEquals(new BytesRef("hello"), bytes); assertEquals(new BytesRef("hello"), bytes);
ireader.close(); ireader.close();
@ -1763,8 +1671,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(0, dv.nextOrd()); assertEquals(0, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
BytesRef bytes = new BytesRef(); BytesRef bytes = dv.lookupOrd(0);
dv.lookupOrd(0, bytes);
assertEquals(new BytesRef("hello"), bytes); assertEquals(new BytesRef("hello"), bytes);
ireader.close(); ireader.close();
@ -1798,8 +1705,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(0, dv.nextOrd()); assertEquals(0, dv.nextOrd());
assertEquals(NO_MORE_ORDS, dv.nextOrd()); assertEquals(NO_MORE_ORDS, dv.nextOrd());
BytesRef bytes = new BytesRef(); BytesRef bytes = dv.lookupOrd(0);
dv.lookupOrd(0, bytes);
assertEquals(new BytesRef("hello"), bytes); assertEquals(new BytesRef("hello"), bytes);
ireader.close(); ireader.close();
@ -1955,7 +1861,6 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
for (AtomicReaderContext context : ir.leaves()) { for (AtomicReaderContext context : ir.leaves()) {
AtomicReader r = context.reader(); AtomicReader r = context.reader();
SortedSetDocValues docValues = r.getSortedSetDocValues("dv"); SortedSetDocValues docValues = r.getSortedSetDocValues("dv");
BytesRef scratch = new BytesRef();
for (int i = 0; i < r.maxDoc(); i++) { for (int i = 0; i < r.maxDoc(); i++) {
String stringValues[] = r.document(i).getValues("stored"); String stringValues[] = r.document(i).getValues("stored");
if (docValues != null) { if (docValues != null) {
@ -1965,7 +1870,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assert docValues != null; assert docValues != null;
long ord = docValues.nextOrd(); long ord = docValues.nextOrd();
assert ord != NO_MORE_ORDS; assert ord != NO_MORE_ORDS;
docValues.lookupOrd(ord, scratch); BytesRef scratch = docValues.lookupOrd(ord);
assertEquals(stringValues[j], scratch.utf8ToString()); assertEquals(stringValues[j], scratch.utf8ToString());
} }
assert docValues == null || docValues.nextOrd() == NO_MORE_ORDS; assert docValues == null || docValues.nextOrd() == NO_MORE_ORDS;
@ -2158,10 +2063,9 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(1, ir.leaves().size()); assertEquals(1, ir.leaves().size());
AtomicReader ar = ir.leaves().get(0).reader(); AtomicReader ar = ir.leaves().get(0).reader();
BinaryDocValues dv = ar.getBinaryDocValues("dv1"); BinaryDocValues dv = ar.getBinaryDocValues("dv1");
BytesRef ref = new BytesRef(); BytesRef ref = dv.get(0);
dv.get(0, ref);
assertEquals(new BytesRef(), ref); assertEquals(new BytesRef(), ref);
dv.get(1, ref); ref = dv.get(1);
assertEquals(new BytesRef(), ref); assertEquals(new BytesRef(), ref);
Bits docsWithField = ar.getDocsWithField("dv1"); Bits docsWithField = ar.getDocsWithField("dv1");
assertTrue(docsWithField.get(0)); assertTrue(docsWithField.get(0));
@ -2191,10 +2095,9 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(1, ir.leaves().size()); assertEquals(1, ir.leaves().size());
AtomicReader ar = ir.leaves().get(0).reader(); AtomicReader ar = ir.leaves().get(0).reader();
BinaryDocValues dv = ar.getBinaryDocValues("dv1"); BinaryDocValues dv = ar.getBinaryDocValues("dv1");
BytesRef ref = new BytesRef(); BytesRef ref = dv.get(0);
dv.get(0, ref);
assertEquals(new BytesRef(), ref); assertEquals(new BytesRef(), ref);
dv.get(1, ref); ref = dv.get(1);
assertEquals(new BytesRef(), ref); assertEquals(new BytesRef(), ref);
Bits docsWithField = ar.getDocsWithField("dv1"); Bits docsWithField = ar.getDocsWithField("dv1");
assertTrue(docsWithField.get(0)); assertTrue(docsWithField.get(0));
@ -2228,12 +2131,11 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
assertEquals(1, ir.leaves().size()); assertEquals(1, ir.leaves().size());
AtomicReader ar = ir.leaves().get(0).reader(); AtomicReader ar = ir.leaves().get(0).reader();
BinaryDocValues dv = ar.getBinaryDocValues("dv1"); BinaryDocValues dv = ar.getBinaryDocValues("dv1");
BytesRef ref = new BytesRef(); BytesRef ref = dv.get(0);
dv.get(0, ref);
assertEquals(new BytesRef(), ref); assertEquals(new BytesRef(), ref);
dv.get(1, ref); ref = dv.get(1);
assertEquals(new BytesRef(), ref); assertEquals(new BytesRef(), ref);
dv.get(2, ref); ref = dv.get(2);
assertEquals(new BytesRef("boo"), ref); assertEquals(new BytesRef("boo"), ref);
Bits docsWithField = ar.getDocsWithField("dv1"); Bits docsWithField = ar.getDocsWithField("dv1");
assertTrue(docsWithField.get(0)); assertTrue(docsWithField.get(0));
@ -2308,10 +2210,9 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
NumericDocValues numerics = r.getNumericDocValues("dvNum"); NumericDocValues numerics = r.getNumericDocValues("dvNum");
for (int j = 0; j < r.maxDoc(); j++) { for (int j = 0; j < r.maxDoc(); j++) {
BytesRef binaryValue = r.document(j).getBinaryValue("storedBin"); BytesRef binaryValue = r.document(j).getBinaryValue("storedBin");
BytesRef scratch = new BytesRef(); BytesRef scratch = binaries.get(j);
binaries.get(j, scratch);
assertEquals(binaryValue, scratch); assertEquals(binaryValue, scratch);
sorted.get(j, scratch); scratch = sorted.get(j);
assertEquals(binaryValue, scratch); assertEquals(binaryValue, scratch);
String expected = r.document(j).get("storedNum"); String expected = r.document(j).get("storedNum");
assertEquals(Long.parseLong(expected), numerics.get(j)); assertEquals(Long.parseLong(expected), numerics.get(j));
@ -2420,10 +2321,9 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
BytesRef binaryValue = r.document(j).getBinaryValue("storedBin"); BytesRef binaryValue = r.document(j).getBinaryValue("storedBin");
if (binaryValue != null) { if (binaryValue != null) {
if (binaries != null) { if (binaries != null) {
BytesRef scratch = new BytesRef(); BytesRef scratch = binaries.get(j);
binaries.get(j, scratch);
assertEquals(binaryValue, scratch); assertEquals(binaryValue, scratch);
sorted.get(j, scratch); scratch = sorted.get(j);
assertEquals(binaryValue, scratch); assertEquals(binaryValue, scratch);
assertTrue(binaryBits.get(j)); assertTrue(binaryBits.get(j));
assertTrue(sortedBits.get(j)); assertTrue(sortedBits.get(j));
@ -2451,8 +2351,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
for (int k = 0; k < values.length; k++) { for (int k = 0; k < values.length; k++) {
long ord = sortedSet.nextOrd(); long ord = sortedSet.nextOrd();
assertTrue(ord != SortedSetDocValues.NO_MORE_ORDS); assertTrue(ord != SortedSetDocValues.NO_MORE_ORDS);
BytesRef value = new BytesRef(); BytesRef value = sortedSet.lookupOrd(ord);
sortedSet.lookupOrd(ord, value);
assertEquals(values[k], value.utf8ToString()); assertEquals(values[k], value.utf8ToString());
} }
assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd());
@ -2507,9 +2406,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
AtomicReader ar = SlowCompositeReaderWrapper.wrap(r); AtomicReader ar = SlowCompositeReaderWrapper.wrap(r);
BinaryDocValues values = ar.getBinaryDocValues("field"); BinaryDocValues values = ar.getBinaryDocValues("field");
BytesRef result = new BytesRef();
for(int j=0;j<5;j++) { for(int j=0;j<5;j++) {
values.get(0, result); BytesRef result = values.get(0);
assertTrue(result.length == 0 || result.length == 1<<i); assertTrue(result.length == 0 || result.length == 1<<i);
} }
ar.close(); ar.close();

View File

@ -2223,12 +2223,10 @@ public abstract class LuceneTestCase extends Assert {
BinaryDocValues leftValues = MultiDocValues.getBinaryValues(leftReader, field); BinaryDocValues leftValues = MultiDocValues.getBinaryValues(leftReader, field);
BinaryDocValues rightValues = MultiDocValues.getBinaryValues(rightReader, field); BinaryDocValues rightValues = MultiDocValues.getBinaryValues(rightReader, field);
if (leftValues != null && rightValues != null) { if (leftValues != null && rightValues != null) {
BytesRef scratchLeft = new BytesRef();
BytesRef scratchRight = new BytesRef();
for(int docID=0;docID<leftReader.maxDoc();docID++) { for(int docID=0;docID<leftReader.maxDoc();docID++) {
leftValues.get(docID, scratchLeft); final BytesRef left = BytesRef.deepCopyOf(leftValues.get(docID));
rightValues.get(docID, scratchRight); final BytesRef right = rightValues.get(docID);
assertEquals(info, scratchLeft, scratchRight); assertEquals(info, left, right);
} }
} else { } else {
assertNull(info, leftValues); assertNull(info, leftValues);
@ -2246,15 +2244,15 @@ public abstract class LuceneTestCase extends Assert {
BytesRef scratchLeft = new BytesRef(); BytesRef scratchLeft = new BytesRef();
BytesRef scratchRight = new BytesRef(); BytesRef scratchRight = new BytesRef();
for (int i = 0; i < leftValues.getValueCount(); i++) { for (int i = 0; i < leftValues.getValueCount(); i++) {
leftValues.lookupOrd(i, scratchLeft); final BytesRef left = BytesRef.deepCopyOf(leftValues.lookupOrd(i));
rightValues.lookupOrd(i, scratchRight); final BytesRef right = rightValues.lookupOrd(i);
assertEquals(info, scratchLeft, scratchRight); assertEquals(info, left, right);
} }
// bytes // bytes
for(int docID=0;docID<leftReader.maxDoc();docID++) { for(int docID=0;docID<leftReader.maxDoc();docID++) {
leftValues.get(docID, scratchLeft); final BytesRef left = BytesRef.deepCopyOf(leftValues.get(docID));
rightValues.get(docID, scratchRight); final BytesRef right = rightValues.get(docID);
assertEquals(info, scratchLeft, scratchRight); assertEquals(info, left, right);
} }
} else { } else {
assertNull(info, leftValues); assertNull(info, leftValues);
@ -2269,12 +2267,10 @@ public abstract class LuceneTestCase extends Assert {
// numOrds // numOrds
assertEquals(info, leftValues.getValueCount(), rightValues.getValueCount()); assertEquals(info, leftValues.getValueCount(), rightValues.getValueCount());
// ords // ords
BytesRef scratchLeft = new BytesRef();
BytesRef scratchRight = new BytesRef();
for (int i = 0; i < leftValues.getValueCount(); i++) { for (int i = 0; i < leftValues.getValueCount(); i++) {
leftValues.lookupOrd(i, scratchLeft); final BytesRef left = BytesRef.deepCopyOf(leftValues.lookupOrd(i));
rightValues.lookupOrd(i, scratchRight); final BytesRef right = rightValues.lookupOrd(i);
assertEquals(info, scratchLeft, scratchRight); assertEquals(info, left, right);
} }
// ord lists // ord lists
for(int docID=0;docID<leftReader.maxDoc();docID++) { for(int docID=0;docID<leftReader.maxDoc();docID++) {

View File

@ -50,7 +50,6 @@ public class FieldFacetAccumulator extends ValueAccumulator {
protected final boolean multiValued; protected final boolean multiValued;
protected final boolean numField; protected final boolean numField;
protected final boolean dateField; protected final boolean dateField;
protected final BytesRef value;
protected SortedSetDocValues setValues; protected SortedSetDocValues setValues;
protected SortedDocValues sortValues; protected SortedDocValues sortValues;
protected NumericDocValues numValues; protected NumericDocValues numValues;
@ -70,7 +69,6 @@ public class FieldFacetAccumulator extends ValueAccumulator {
this.numField = schemaField.getType().getNumericType()!=null; this.numField = schemaField.getType().getNumericType()!=null;
this.dateField = schemaField.getType().getClass().equals(TrieDateField.class); this.dateField = schemaField.getType().getClass().equals(TrieDateField.class);
this.parent = parent; this.parent = parent;
this.value = new BytesRef();
this.parser = AnalyticsParsers.getParser(schemaField.getType().getClass()); this.parser = AnalyticsParsers.getParser(schemaField.getType().getClass());
} }
@ -108,7 +106,7 @@ public class FieldFacetAccumulator extends ValueAccumulator {
int term; int term;
while ((term = (int)setValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { while ((term = (int)setValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
exists = true; exists = true;
setValues.lookupOrd(term, value); final BytesRef value = setValues.lookupOrd(term);
parent.collectField(doc, name, parser.parse(value) ); parent.collectField(doc, name, parser.parse(value) );
} }
} }
@ -129,11 +127,11 @@ public class FieldFacetAccumulator extends ValueAccumulator {
} }
} else { } else {
if(sortValues != null) { if(sortValues != null) {
sortValues.get(doc,value); final int ord = sortValues.getOrd(doc);
if( BytesRef.EMPTY_BYTES == value.bytes ){ if (ord < 0) {
parent.collectField(doc, name, FacetingAccumulator.MISSING_VALUE ); parent.collectField(doc, name, FacetingAccumulator.MISSING_VALUE );
} else { } else {
parent.collectField(doc, name, parser.parse(value) ); parent.collectField(doc, name, parser.parse(sortValues.lookupOrd(ord)) );
} }
} else { } else {
parent.collectField(doc, name, FacetingAccumulator.MISSING_VALUE ); parent.collectField(doc, name, FacetingAccumulator.MISSING_VALUE );

View File

@ -218,7 +218,6 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
searcher.search(query, pfilter.filter, collector); searcher.search(query, pfilter.filter, collector);
IntObjectMap groups = groupExpandCollector.getGroups(); IntObjectMap groups = groupExpandCollector.getGroups();
Map<String, DocSlice> outMap = new HashMap<>(); Map<String, DocSlice> outMap = new HashMap<>();
BytesRef bytesRef = new BytesRef();
CharsRef charsRef = new CharsRef(); CharsRef charsRef = new CharsRef();
FieldType fieldType = searcher.getSchema().getField(field).getType(); FieldType fieldType = searcher.getSchema().getField(field).getType();
for (IntObjectCursor cursor : (Iterable<IntObjectCursor>) groups) { for (IntObjectCursor cursor : (Iterable<IntObjectCursor>) groups) {
@ -235,7 +234,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
scores[i] = scoreDoc.score; scores[i] = scoreDoc.score;
} }
DocSlice slice = new DocSlice(0, docs.length, docs, scores, topDocs.totalHits, topDocs.getMaxScore()); DocSlice slice = new DocSlice(0, docs.length, docs, scores, topDocs.totalHits, topDocs.getMaxScore());
values.lookupOrd(ord, bytesRef); final BytesRef bytesRef = values.lookupOrd(ord);
fieldType.indexedToReadable(bytesRef, charsRef); fieldType.indexedToReadable(bytesRef, charsRef);
String group = charsRef.toString(); String group = charsRef.toString();
outMap.put(group, slice); outMap.put(group, slice);

View File

@ -62,8 +62,6 @@ public class FieldFacetStats {
SortedDocValues topLevelSortedValues = null; SortedDocValues topLevelSortedValues = null;
private final BytesRef tempBR = new BytesRef();
public FieldFacetStats(SolrIndexSearcher searcher, String name, SchemaField field_sf, SchemaField facet_sf, boolean calcDistinct) { public FieldFacetStats(SolrIndexSearcher searcher, String name, SchemaField field_sf, SchemaField facet_sf, boolean calcDistinct) {
this.name = name; this.name = name;
this.field_sf = field_sf; this.field_sf = field_sf;
@ -106,14 +104,12 @@ public class FieldFacetStats {
int term = topLevelSortedValues.getOrd(docID); int term = topLevelSortedValues.getOrd(docID);
int arrIdx = term; int arrIdx = term;
if (arrIdx >= 0 && arrIdx < topLevelSortedValues.getValueCount()) { if (arrIdx >= 0 && arrIdx < topLevelSortedValues.getValueCount()) {
final BytesRef br; final String key;
if (term == -1) { if (term == -1) {
br = null; key = null;
} else { } else {
br = tempBR; key = topLevelSortedValues.lookupOrd(term).utf8ToString();
topLevelSortedValues.lookupOrd(term, tempBR);
} }
String key = br == null ? null : br.utf8ToString();
while (facetStatsTerms.size() <= statsTermNum) { while (facetStatsTerms.size() <= statsTermNum) {
facetStatsTerms.add(new HashMap<String, Integer>()); facetStatsTerms.add(new HashMap<String, Integer>());
} }

View File

@ -85,8 +85,6 @@ public class DocValuesFacets {
throw new UnsupportedOperationException("Currently this faceting method is limited to " + Integer.MAX_VALUE + " unique terms"); throw new UnsupportedOperationException("Currently this faceting method is limited to " + Integer.MAX_VALUE + " unique terms");
} }
final BytesRef br = new BytesRef();
final BytesRef prefixRef; final BytesRef prefixRef;
if (prefix == null) { if (prefix == null) {
prefixRef = null; prefixRef = null;
@ -132,7 +130,7 @@ public class DocValuesFacets {
if (multiValued) { if (multiValued) {
SortedSetDocValues sub = leaf.reader().getSortedSetDocValues(fieldName); SortedSetDocValues sub = leaf.reader().getSortedSetDocValues(fieldName);
if (sub == null) { if (sub == null) {
sub = DocValues.EMPTY_SORTED_SET; sub = DocValues.emptySortedSet();
} }
final SortedDocValues singleton = DocValues.unwrapSingleton(sub); final SortedDocValues singleton = DocValues.unwrapSingleton(sub);
if (singleton != null) { if (singleton != null) {
@ -144,7 +142,7 @@ public class DocValuesFacets {
} else { } else {
SortedDocValues sub = leaf.reader().getSortedDocValues(fieldName); SortedDocValues sub = leaf.reader().getSortedDocValues(fieldName);
if (sub == null) { if (sub == null) {
sub = DocValues.EMPTY_SORTED; sub = DocValues.emptySorted();
} }
accumSingle(counts, startTermIndex, sub, disi, subIndex, ordinalMap); accumSingle(counts, startTermIndex, sub, disi, subIndex, ordinalMap);
} }
@ -194,8 +192,8 @@ public class DocValuesFacets {
long pair = sorted[i]; long pair = sorted[i];
int c = (int)(pair >>> 32); int c = (int)(pair >>> 32);
int tnum = Integer.MAX_VALUE - (int)pair; int tnum = Integer.MAX_VALUE - (int)pair;
si.lookupOrd(startTermIndex+tnum, br); final BytesRef term = si.lookupOrd(startTermIndex+tnum);
ft.indexedToReadable(br, charsRef); ft.indexedToReadable(term, charsRef);
res.add(charsRef.toString(), c); res.add(charsRef.toString(), c);
} }
@ -213,8 +211,8 @@ public class DocValuesFacets {
int c = counts[i]; int c = counts[i];
if (c<mincount || --off>=0) continue; if (c<mincount || --off>=0) continue;
if (--lim<0) break; if (--lim<0) break;
si.lookupOrd(startTermIndex+i, br); final BytesRef term = si.lookupOrd(startTermIndex+i);
ft.indexedToReadable(br, charsRef); ft.indexedToReadable(term, charsRef);
res.add(charsRef.toString(), c); res.add(charsRef.toString(), c);
} }
} }

View File

@ -84,7 +84,7 @@ public class DocValuesStats {
} }
} }
if (si == null) { if (si == null) {
si = DocValues.EMPTY_SORTED_SET; si = DocValues.emptySortedSet();
} }
if (si.getValueCount() >= Integer.MAX_VALUE) { if (si.getValueCount() >= Integer.MAX_VALUE) {
throw new UnsupportedOperationException("Currently this stats method is limited to " + Integer.MAX_VALUE + " unique terms"); throw new UnsupportedOperationException("Currently this stats method is limited to " + Integer.MAX_VALUE + " unique terms");
@ -112,7 +112,7 @@ public class DocValuesStats {
if (multiValued) { if (multiValued) {
SortedSetDocValues sub = leaf.reader().getSortedSetDocValues(fieldName); SortedSetDocValues sub = leaf.reader().getSortedSetDocValues(fieldName);
if (sub == null) { if (sub == null) {
sub = DocValues.EMPTY_SORTED_SET; sub = DocValues.emptySortedSet();
} }
final SortedDocValues singleton = DocValues.unwrapSingleton(sub); final SortedDocValues singleton = DocValues.unwrapSingleton(sub);
if (singleton != null) { if (singleton != null) {
@ -124,7 +124,7 @@ public class DocValuesStats {
} else { } else {
SortedDocValues sub = leaf.reader().getSortedDocValues(fieldName); SortedDocValues sub = leaf.reader().getSortedDocValues(fieldName);
if (sub == null) { if (sub == null) {
sub = DocValues.EMPTY_SORTED; sub = DocValues.emptySorted();
} }
accumSingle(counts, docBase, facetStats, sub, disi, subIndex, ordinalMap); accumSingle(counts, docBase, facetStats, sub, disi, subIndex, ordinalMap);
} }
@ -132,11 +132,10 @@ public class DocValuesStats {
} }
// add results in index order // add results in index order
BytesRef value = new BytesRef();
for (int ord = 0; ord < counts.length; ord++) { for (int ord = 0; ord < counts.length; ord++) {
int count = counts[ord]; int count = counts[ord];
if (count > 0) { if (count > 0) {
si.lookupOrd(ord, value); final BytesRef value = si.lookupOrd(ord);
res.accumulate(value, count); res.accumulate(value, count);
for (FieldFacetStats f : facetStats) { for (FieldFacetStats f : facetStats) {
f.accumulateTermNum(ord, value); f.accumulateTermNum(ord, value);

View File

@ -193,11 +193,10 @@ class BoolFieldSource extends ValueSource {
// figure out what ord maps to true // figure out what ord maps to true
int nord = sindex.getValueCount(); int nord = sindex.getValueCount();
BytesRef br = new BytesRef();
// if no values in the segment, default trueOrd to something other then -1 (missing) // if no values in the segment, default trueOrd to something other then -1 (missing)
int tord = -2; int tord = -2;
for (int i=0; i<nord; i++) { for (int i=0; i<nord; i++) {
sindex.lookupOrd(i, br); final BytesRef br = sindex.lookupOrd(i);
if (br.length==1 && br.bytes[br.offset]=='T') { if (br.length==1 && br.bytes[br.offset]=='T') {
tord = i; tord = i;
break; break;

View File

@ -923,61 +923,9 @@ public class TestFaceting extends SolrTestCaseJ4 {
, "*[count(//lst[@name='facet_fields']/lst)=50]" , "*[count(//lst[@name='facet_fields']/lst)=50]"
, "*[count(//lst[@name='facet_fields']/lst/int)=100]" , "*[count(//lst[@name='facet_fields']/lst/int)=100]"
); );
// Now, are all the UnInvertedFields still the same? Meaning they weren't re-fetched even when a bunch were
// requested at the same time?
assertEquals("UnInvertedField coming back from the seacher should not have changed! ",
ui0, DocValues.getSortedSet(currentSearcher.getAtomicReader(), "f0_ws"));
assertEquals("UnInvertedField coming back from the seacher should not have changed! ",
ui1, DocValues.getSortedSet(currentSearcher.getAtomicReader(), "f1_ws"));
assertEquals("UnInvertedField coming back from the seacher should not have changed! ",
ui2, DocValues.getSortedSet(currentSearcher.getAtomicReader(), "f2_ws"));
assertEquals("UnInvertedField coming back from the seacher should not have changed! ",
ui3, DocValues.getSortedSet(currentSearcher.getAtomicReader(), "f3_ws"));
assertEquals("UnInvertedField coming back from the seacher should not have changed! ",
ui4, DocValues.getSortedSet(currentSearcher.getAtomicReader(), "f4_ws"));
assertEquals("UnInvertedField coming back from the seacher should not have changed! ",
ui5, DocValues.getSortedSet(currentSearcher.getAtomicReader(), "f5_ws"));
assertEquals("UnInvertedField coming back from the seacher should not have changed! ",
ui6, DocValues.getSortedSet(currentSearcher.getAtomicReader(), "f6_ws"));
assertEquals("UnInvertedField coming back from the seacher should not have changed! ",
ui7, DocValues.getSortedSet(currentSearcher.getAtomicReader(), "f7_ws"));
assertEquals("UnInvertedField coming back from the seacher should not have changed! ",
ui8, DocValues.getSortedSet(currentSearcher.getAtomicReader(), "f8_ws"));
assertEquals("UnInvertedField coming back from the seacher should not have changed! ",
ui9, DocValues.getSortedSet(currentSearcher.getAtomicReader(), "f9_ws"));
} finally { } finally {
currentSearcherRef.decref(); currentSearcherRef.decref();
} }
} }
// assert same instance: either same object, or both wrapping same single-valued object
private void assertEquals(String msg, SortedSetDocValues dv1, SortedSetDocValues dv2) {
SortedDocValues singleton1 = DocValues.unwrapSingleton(dv1);
SortedDocValues singleton2 = DocValues.unwrapSingleton(dv2);
if (singleton1 == null || singleton2 == null) {
// actually a multi-valued field
if (dv1 instanceof MultiDocValues.MultiSortedSetDocValues) {
// if we produced more than one segment, ensure the core ordinal map is the same object
assertTrue(dv2 instanceof MultiDocValues.MultiSortedSetDocValues);
assertSame(((MultiDocValues.MultiSortedSetDocValues) dv1).mapping,
((MultiDocValues.MultiSortedSetDocValues) dv2).mapping);
} else {
// otherwise, same atomic instance
assertSame(dv1, dv2);
}
} else {
// just wrapping a field that is actually single-valued
if (singleton1 instanceof MultiDocValues.MultiSortedDocValues) {
// if we produced more than one segment, ensure the core ordinal map is the same object
assertTrue(singleton2 instanceof MultiDocValues.MultiSortedDocValues);
assertSame(((MultiDocValues.MultiSortedDocValues) singleton1).mapping,
((MultiDocValues.MultiSortedDocValues) singleton2).mapping);
} else {
// otherwise, same atomic instance
assertSame(singleton1, singleton2);
}
}
}
} }