mirror of https://github.com/apache/lucene.git
LUCENE-8834: Cache the SortedNumericDocValues.docValueCount() value whenever it is used in a loop (#698)
This commit is contained in:
parent
fe58b6f3a2
commit
97ca9df7ef
lucene/join/src/java/org/apache/lucene/search/join
solr/core/src/java/org/apache/solr
|
@ -200,7 +200,7 @@ public final class JoinUtil {
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) throws IOException {
|
public void collect(int doc) throws IOException {
|
||||||
if (sortedNumericDocValues.advanceExact(doc)) {
|
if (sortedNumericDocValues.advanceExact(doc)) {
|
||||||
for (int i = 0; i < sortedNumericDocValues.docValueCount(); i++) {
|
for (int i = 0, count = sortedNumericDocValues.docValueCount(); i < count; i++) {
|
||||||
long value = sortedNumericDocValues.nextValue();
|
long value = sortedNumericDocValues.nextValue();
|
||||||
joinValues.add(value);
|
joinValues.add(value);
|
||||||
if (needsScore) {
|
if (needsScore) {
|
||||||
|
|
|
@ -49,7 +49,7 @@ public class SortedDateStatsValues implements StatsValues {
|
||||||
if (!sndv.advanceExact(docId)) {
|
if (!sndv.advanceExact(docId)) {
|
||||||
missing();
|
missing();
|
||||||
} else {
|
} else {
|
||||||
for (int i = 0 ; i < sndv.docValueCount(); i++) {
|
for (int i = 0, count = sndv.docValueCount(); i < count; i++) {
|
||||||
dsv.accumulate(new Date(sndv.nextValue()), 1);
|
dsv.accumulate(new Date(sndv.nextValue()), 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,7 +51,7 @@ public class SortedNumericStatsValues implements StatsValues {
|
||||||
if (!sndv.advanceExact(docId)) {
|
if (!sndv.advanceExact(docId)) {
|
||||||
missing();
|
missing();
|
||||||
} else {
|
} else {
|
||||||
for (int i = 0 ; i < sndv.docValueCount(); i++) {
|
for (int i = 0, count = sndv.docValueCount(); i < count; i++) {
|
||||||
nsv.accumulate(toCorrectType(sndv.nextValue()), 1);
|
nsv.accumulate(toCorrectType(sndv.nextValue()), 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,7 @@ class MultiFieldWriter extends FieldWriter {
|
||||||
if (!vals.advanceExact(sortDoc.docId)) return false;
|
if (!vals.advanceExact(sortDoc.docId)) return false;
|
||||||
out.put(this.field,
|
out.put(this.field,
|
||||||
(IteratorWriter) w -> {
|
(IteratorWriter) w -> {
|
||||||
for (int i = 0; i < vals.docValueCount(); i++) {
|
for (int i = 0, count = vals.docValueCount(); i < count; i++) {
|
||||||
w.add(bitsToValue.apply(vals.nextValue()));
|
w.add(bitsToValue.apply(vals.nextValue()));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -310,9 +310,10 @@ public class IntervalFacets implements Iterable<FacetInterval> {
|
||||||
private void accumIntervalWithMultipleValues(SortedNumericDocValues longs) throws IOException {
|
private void accumIntervalWithMultipleValues(SortedNumericDocValues longs) throws IOException {
|
||||||
// longs should be already positioned to the correct doc
|
// longs should be already positioned to the correct doc
|
||||||
assert longs.docID() != -1;
|
assert longs.docID() != -1;
|
||||||
assert longs.docValueCount() > 0: "Should have at least one value for this document";
|
final int docValueCount = longs.docValueCount();
|
||||||
|
assert docValueCount > 0: "Should have at least one value for this document";
|
||||||
int currentInterval = 0;
|
int currentInterval = 0;
|
||||||
for (int i = 0; i < longs.docValueCount(); i++) {
|
for (int i = 0; i < docValueCount; i++) {
|
||||||
boolean evaluateNextInterval = true;
|
boolean evaluateNextInterval = true;
|
||||||
long value = longs.nextValue();
|
long value = longs.nextValue();
|
||||||
while (evaluateNextInterval && currentInterval < intervals.length) {
|
while (evaluateNextInterval && currentInterval < intervals.length) {
|
||||||
|
|
|
@ -436,7 +436,7 @@ final class NumericFacets {
|
||||||
if (valuesDocID == doc - ctx.docBase) {
|
if (valuesDocID == doc - ctx.docBase) {
|
||||||
long l = longs.nextValue(); // This document must have at least one value
|
long l = longs.nextValue(); // This document must have at least one value
|
||||||
hashTable.add(l, 1);
|
hashTable.add(l, 1);
|
||||||
for (int i = 1; i < longs.docValueCount(); i++) {
|
for (int i = 1, count = longs.docValueCount(); i < count; i++) {
|
||||||
long lnew = longs.nextValue();
|
long lnew = longs.nextValue();
|
||||||
if (lnew > l) { // Skip the value if it's equal to the last one, we don't want to double-count it
|
if (lnew > l) { // Skip the value if it's equal to the last one, we don't want to double-count it
|
||||||
hashTable.add(lnew, 1);
|
hashTable.add(lnew, 1);
|
||||||
|
|
|
@ -564,8 +564,9 @@ public class SolrDocumentFetcher {
|
||||||
case SORTED_NUMERIC:
|
case SORTED_NUMERIC:
|
||||||
final SortedNumericDocValues numericDv = leafReader.getSortedNumericDocValues(fieldName);
|
final SortedNumericDocValues numericDv = leafReader.getSortedNumericDocValues(fieldName);
|
||||||
if (numericDv != null && numericDv.advance(localId) == localId) {
|
if (numericDv != null && numericDv.advance(localId) == localId) {
|
||||||
final List<Object> outValues = new ArrayList<>(numericDv.docValueCount());
|
final int docValueCount = numericDv.docValueCount();
|
||||||
for (int i = 0; i < numericDv.docValueCount(); i++) {
|
final List<Object> outValues = new ArrayList<>(docValueCount);
|
||||||
|
for (int i = 0; i < docValueCount; i++) {
|
||||||
long number = numericDv.nextValue();
|
long number = numericDv.nextValue();
|
||||||
Object value = decodeNumberFromDV(schemaField, number, true);
|
Object value = decodeNumberFromDV(schemaField, number, true);
|
||||||
// return immediately if the number is not decodable, hence won't return an empty list.
|
// return immediately if the number is not decodable, hence won't return an empty list.
|
||||||
|
|
|
@ -387,7 +387,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
|
||||||
if (values.advanceExact(segDoc)) {
|
if (values.advanceExact(segDoc)) {
|
||||||
long l = values.nextValue(); // This document must have at least one value
|
long l = values.nextValue(); // This document must have at least one value
|
||||||
collectValFirstPhase(segDoc, l);
|
collectValFirstPhase(segDoc, l);
|
||||||
for (int i = 1; i < values.docValueCount(); i++) {
|
for (int i = 1, count = values.docValueCount(); i < count; i++) {
|
||||||
long lnew = values.nextValue();
|
long lnew = values.nextValue();
|
||||||
if (lnew != l) { // Skip the value if it's equal to the last one, we don't want to double-count it
|
if (lnew != l) { // Skip the value if it's equal to the last one, we don't want to double-count it
|
||||||
collectValFirstPhase(segDoc, lnew);
|
collectValFirstPhase(segDoc, lnew);
|
||||||
|
|
|
@ -237,7 +237,7 @@ public class HLLAgg extends StrAggValueSource {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void collectValues(int doc, HLL hll) throws IOException {
|
protected void collectValues(int doc, HLL hll) throws IOException {
|
||||||
for (int i = 0; i < values.docValueCount(); i++) {
|
for (int i = 0, count = values.docValueCount(); i < count; i++) {
|
||||||
// duplicates may be produced for a single doc, but won't matter here.
|
// duplicates may be produced for a single doc, but won't matter here.
|
||||||
long val = values.nextValue();
|
long val = values.nextValue();
|
||||||
long hash = Hash.fmix64(val);
|
long hash = Hash.fmix64(val);
|
||||||
|
|
|
@ -254,7 +254,7 @@ public class UniqueAgg extends StrAggValueSource {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void collectValues(int doc, LongSet set) throws IOException {
|
protected void collectValues(int doc, LongSet set) throws IOException {
|
||||||
for (int i = 0; i < values.docValueCount(); i++) {
|
for (int i = 0, count = values.docValueCount(); i < count; i++) {
|
||||||
// duplicates may be produced for a single doc, but won't matter here.
|
// duplicates may be produced for a single doc, but won't matter here.
|
||||||
set.add(values.nextValue());
|
set.add(values.nextValue());
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue