mirror of https://github.com/apache/lucene.git
LUCENE-5767: OrdinalMap optimizations.
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1602997 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
5755634db6
commit
e7f81b4d6c
|
@ -282,6 +282,9 @@ Optimizations
|
||||||
|
|
||||||
* LUCENE-5751: Speed up MemoryDocValues. (Adrien Grand, Robert Muir)
|
* LUCENE-5751: Speed up MemoryDocValues. (Adrien Grand, Robert Muir)
|
||||||
|
|
||||||
|
* LUCENE-5767: OrdinalMap optimizations, that mostly help on low cardinalities.
|
||||||
|
(Martijn van Groningen, Adrien Grand)
|
||||||
|
|
||||||
Bug fixes
|
Bug fixes
|
||||||
|
|
||||||
* LUCENE-5738: Ensure NativeFSLock prevents opening the file channel for the
|
* LUCENE-5738: Ensure NativeFSLock prevents opening the file channel for the
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.lucene.util.ArrayUtil;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.LongBitSet;
|
import org.apache.lucene.util.LongBitSet;
|
||||||
|
import org.apache.lucene.util.LongValues;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Abstract API that consumes numeric, binary and
|
* Abstract API that consumes numeric, binary and
|
||||||
|
@ -505,6 +506,7 @@ public abstract class DocValuesConsumer implements Closeable {
|
||||||
int nextValue;
|
int nextValue;
|
||||||
AtomicReader currentReader;
|
AtomicReader currentReader;
|
||||||
Bits currentLiveDocs;
|
Bits currentLiveDocs;
|
||||||
|
LongValues currentMap;
|
||||||
boolean nextIsSet;
|
boolean nextIsSet;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -539,6 +541,7 @@ public abstract class DocValuesConsumer implements Closeable {
|
||||||
if (readerUpto < readers.length) {
|
if (readerUpto < readers.length) {
|
||||||
currentReader = readers[readerUpto];
|
currentReader = readers[readerUpto];
|
||||||
currentLiveDocs = currentReader.getLiveDocs();
|
currentLiveDocs = currentReader.getLiveDocs();
|
||||||
|
currentMap = map.getGlobalOrds(readerUpto);
|
||||||
}
|
}
|
||||||
docIDUpto = 0;
|
docIDUpto = 0;
|
||||||
continue;
|
continue;
|
||||||
|
@ -547,7 +550,7 @@ public abstract class DocValuesConsumer implements Closeable {
|
||||||
if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) {
|
if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) {
|
||||||
nextIsSet = true;
|
nextIsSet = true;
|
||||||
int segOrd = dvs[readerUpto].getOrd(docIDUpto);
|
int segOrd = dvs[readerUpto].getOrd(docIDUpto);
|
||||||
nextValue = segOrd == -1 ? -1 : (int) map.getGlobalOrd(readerUpto, segOrd);
|
nextValue = segOrd == -1 ? -1 : (int) currentMap.get(segOrd);
|
||||||
docIDUpto++;
|
docIDUpto++;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -707,6 +710,7 @@ public abstract class DocValuesConsumer implements Closeable {
|
||||||
long nextValue;
|
long nextValue;
|
||||||
AtomicReader currentReader;
|
AtomicReader currentReader;
|
||||||
Bits currentLiveDocs;
|
Bits currentLiveDocs;
|
||||||
|
LongValues currentMap;
|
||||||
boolean nextIsSet;
|
boolean nextIsSet;
|
||||||
long ords[] = new long[8];
|
long ords[] = new long[8];
|
||||||
int ordUpto;
|
int ordUpto;
|
||||||
|
@ -751,6 +755,7 @@ public abstract class DocValuesConsumer implements Closeable {
|
||||||
if (readerUpto < readers.length) {
|
if (readerUpto < readers.length) {
|
||||||
currentReader = readers[readerUpto];
|
currentReader = readers[readerUpto];
|
||||||
currentLiveDocs = currentReader.getLiveDocs();
|
currentLiveDocs = currentReader.getLiveDocs();
|
||||||
|
currentMap = map.getGlobalOrds(readerUpto);
|
||||||
}
|
}
|
||||||
docIDUpto = 0;
|
docIDUpto = 0;
|
||||||
continue;
|
continue;
|
||||||
|
@ -766,7 +771,7 @@ public abstract class DocValuesConsumer implements Closeable {
|
||||||
if (ordLength == ords.length) {
|
if (ordLength == ords.length) {
|
||||||
ords = ArrayUtil.grow(ords, ordLength+1);
|
ords = ArrayUtil.grow(ords, ordLength+1);
|
||||||
}
|
}
|
||||||
ords[ordLength] = map.getGlobalOrd(readerUpto, ord);
|
ords[ordLength] = currentMap.get(ord);
|
||||||
ordLength++;
|
ordLength++;
|
||||||
}
|
}
|
||||||
docIDUpto++;
|
docIDUpto++;
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.MultiTermsEnum.TermsEnumWithSlice;
|
||||||
import org.apache.lucene.util.Accountable;
|
import org.apache.lucene.util.Accountable;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.apache.lucene.util.LongValues;
|
||||||
import org.apache.lucene.util.RamUsageEstimator;
|
import org.apache.lucene.util.RamUsageEstimator;
|
||||||
import org.apache.lucene.util.packed.AppendingPackedLongBuffer;
|
import org.apache.lucene.util.packed.AppendingPackedLongBuffer;
|
||||||
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
|
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
|
||||||
|
@ -373,7 +374,7 @@ public class MultiDocValues {
|
||||||
return new MultiSortedSetDocValues(values, starts, mapping);
|
return new MultiSortedSetDocValues(values, starts, mapping);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** maps per-segment ordinals to/from global ordinal space */
|
/** maps per-segment ordinals to/from global ordinal space */
|
||||||
// TODO: use more efficient packed ints structures?
|
// TODO: use more efficient packed ints structures?
|
||||||
// TODO: pull this out? its pretty generic (maps between N ord()-enabled TermsEnums)
|
// TODO: pull this out? its pretty generic (maps between N ord()-enabled TermsEnums)
|
||||||
|
@ -387,8 +388,10 @@ public class MultiDocValues {
|
||||||
final MonotonicAppendingLongBuffer globalOrdDeltas;
|
final MonotonicAppendingLongBuffer globalOrdDeltas;
|
||||||
// globalOrd -> first segment container
|
// globalOrd -> first segment container
|
||||||
final AppendingPackedLongBuffer firstSegments;
|
final AppendingPackedLongBuffer firstSegments;
|
||||||
// for every segment, segmentOrd -> (globalOrd - segmentOrd)
|
// for every segment, segmentOrd -> globalOrd
|
||||||
final MonotonicAppendingLongBuffer ordDeltas[];
|
final LongValues segmentToGlobalOrds[];
|
||||||
|
// ram usage
|
||||||
|
final long ramBytesUsed;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates an ordinal map that allows mapping ords to/from a merged
|
* Creates an ordinal map that allows mapping ords to/from a merged
|
||||||
|
@ -398,16 +401,20 @@ public class MultiDocValues {
|
||||||
* not be dense (e.g. can be FilteredTermsEnums}.
|
* not be dense (e.g. can be FilteredTermsEnums}.
|
||||||
* @throws IOException if an I/O error occurred.
|
* @throws IOException if an I/O error occurred.
|
||||||
*/
|
*/
|
||||||
public OrdinalMap(Object owner, TermsEnum subs[]) throws IOException {
|
public OrdinalMap(Object owner, TermsEnum subs[], float acceptableOverheadRatio) throws IOException {
|
||||||
// create the ordinal mappings by pulling a termsenum over each sub's
|
// create the ordinal mappings by pulling a termsenum over each sub's
|
||||||
// unique terms, and walking a multitermsenum over those
|
// unique terms, and walking a multitermsenum over those
|
||||||
this.owner = owner;
|
this.owner = owner;
|
||||||
|
// even though we accept an overhead ratio, we keep these ones with COMPACT
|
||||||
|
// since they are only used to resolve values given a global ord, which is
|
||||||
|
// slow anyway
|
||||||
globalOrdDeltas = new MonotonicAppendingLongBuffer(PackedInts.COMPACT);
|
globalOrdDeltas = new MonotonicAppendingLongBuffer(PackedInts.COMPACT);
|
||||||
firstSegments = new AppendingPackedLongBuffer(PackedInts.COMPACT);
|
firstSegments = new AppendingPackedLongBuffer(PackedInts.COMPACT);
|
||||||
ordDeltas = new MonotonicAppendingLongBuffer[subs.length];
|
final MonotonicAppendingLongBuffer[] ordDeltas = new MonotonicAppendingLongBuffer[subs.length];
|
||||||
for (int i = 0; i < ordDeltas.length; i++) {
|
for (int i = 0; i < ordDeltas.length; i++) {
|
||||||
ordDeltas[i] = new MonotonicAppendingLongBuffer();
|
ordDeltas[i] = new MonotonicAppendingLongBuffer(acceptableOverheadRatio);
|
||||||
}
|
}
|
||||||
|
long[] ordDeltaBits = new long[subs.length];
|
||||||
long segmentOrds[] = new long[subs.length];
|
long segmentOrds[] = new long[subs.length];
|
||||||
ReaderSlice slices[] = new ReaderSlice[subs.length];
|
ReaderSlice slices[] = new ReaderSlice[subs.length];
|
||||||
TermsEnumIndex indexes[] = new TermsEnumIndex[slices.length];
|
TermsEnumIndex indexes[] = new TermsEnumIndex[slices.length];
|
||||||
|
@ -431,6 +438,7 @@ public class MultiDocValues {
|
||||||
}
|
}
|
||||||
// for each per-segment ord, map it back to the global term.
|
// for each per-segment ord, map it back to the global term.
|
||||||
while (segmentOrds[segmentIndex] <= segmentOrd) {
|
while (segmentOrds[segmentIndex] <= segmentOrd) {
|
||||||
|
ordDeltaBits[segmentIndex] |= delta;
|
||||||
ordDeltas[segmentIndex].add(delta);
|
ordDeltas[segmentIndex].add(delta);
|
||||||
segmentOrds[segmentIndex]++;
|
segmentOrds[segmentIndex]++;
|
||||||
}
|
}
|
||||||
|
@ -442,14 +450,63 @@ public class MultiDocValues {
|
||||||
for (int i = 0; i < ordDeltas.length; ++i) {
|
for (int i = 0; i < ordDeltas.length; ++i) {
|
||||||
ordDeltas[i].freeze();
|
ordDeltas[i].freeze();
|
||||||
}
|
}
|
||||||
|
// ordDeltas is typically the bottleneck, so let's see what we can do to make it faster
|
||||||
|
segmentToGlobalOrds = new LongValues[subs.length];
|
||||||
|
long ramBytesUsed = BASE_RAM_BYTES_USED + globalOrdDeltas.ramBytesUsed() + firstSegments.ramBytesUsed() + RamUsageEstimator.shallowSizeOf(segmentToGlobalOrds);
|
||||||
|
for (int i = 0; i < ordDeltas.length; ++i) {
|
||||||
|
final MonotonicAppendingLongBuffer deltas = ordDeltas[i];
|
||||||
|
if (ordDeltaBits[i] == 0L) {
|
||||||
|
// segment ords perfectly match global ordinals
|
||||||
|
// likely in case of low cardinalities and large segments
|
||||||
|
segmentToGlobalOrds[i] = LongValues.IDENTITY;
|
||||||
|
} else {
|
||||||
|
final int bitsRequired = ordDeltaBits[i] < 0 ? 64 : PackedInts.bitsRequired(ordDeltaBits[i]);
|
||||||
|
final long monotonicBits = deltas.ramBytesUsed() * 8;
|
||||||
|
final long packedBits = bitsRequired * deltas.size();
|
||||||
|
if (deltas.size() <= Integer.MAX_VALUE
|
||||||
|
&& packedBits <= monotonicBits * (1 + acceptableOverheadRatio)) {
|
||||||
|
// monotonic compression mostly adds overhead, let's keep the mapping in plain packed ints
|
||||||
|
final int size = (int) deltas.size();
|
||||||
|
final PackedInts.Mutable newDeltas = PackedInts.getMutable(size, bitsRequired, acceptableOverheadRatio);
|
||||||
|
final MonotonicAppendingLongBuffer.Iterator it = deltas.iterator();
|
||||||
|
for (int ord = 0; ord < size; ++ord) {
|
||||||
|
newDeltas.set(ord, it.next());
|
||||||
|
}
|
||||||
|
assert !it.hasNext();
|
||||||
|
segmentToGlobalOrds[i] = new LongValues() {
|
||||||
|
@Override
|
||||||
|
public long get(long ord) {
|
||||||
|
return ord + newDeltas.get((int) ord);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
ramBytesUsed += newDeltas.ramBytesUsed();
|
||||||
|
} else {
|
||||||
|
segmentToGlobalOrds[i] = new LongValues() {
|
||||||
|
@Override
|
||||||
|
public long get(long ord) {
|
||||||
|
return ord + deltas.get((int) ord);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
ramBytesUsed += deltas.ramBytesUsed();
|
||||||
|
}
|
||||||
|
ramBytesUsed += RamUsageEstimator.shallowSizeOf(segmentToGlobalOrds[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.ramBytesUsed = ramBytesUsed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Create an {@link OrdinalMap} with the default overhead ratio.
|
||||||
|
* @see #OrdinalMap(Object, TermsEnum[], float) */
|
||||||
|
public OrdinalMap(Object owner, TermsEnum subs[]) throws IOException {
|
||||||
|
this(owner, subs, PackedInts.DEFAULT);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given a segment number and segment ordinal, returns
|
* Given a segment number, return a {@link LongValues} instance that maps
|
||||||
* the corresponding global ordinal.
|
* segment ordinals to global ordinals.
|
||||||
*/
|
*/
|
||||||
public long getGlobalOrd(int segmentIndex, long segmentOrd) {
|
public LongValues getGlobalOrds(int segmentIndex) {
|
||||||
return segmentOrd + ordDeltas[segmentIndex].get(segmentOrd);
|
return segmentToGlobalOrds[segmentIndex];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -477,11 +534,7 @@ public class MultiDocValues {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long ramBytesUsed() {
|
public long ramBytesUsed() {
|
||||||
long size = BASE_RAM_BYTES_USED + globalOrdDeltas.ramBytesUsed() + firstSegments.ramBytesUsed() + RamUsageEstimator.shallowSizeOf(ordDeltas);
|
return ramBytesUsed;
|
||||||
for (int i = 0; i < ordDeltas.length; i++) {
|
|
||||||
size += ordDeltas[i].ramBytesUsed();
|
|
||||||
}
|
|
||||||
return size;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -499,7 +552,7 @@ public class MultiDocValues {
|
||||||
|
|
||||||
/** Creates a new MultiSortedDocValues over <code>values</code> */
|
/** Creates a new MultiSortedDocValues over <code>values</code> */
|
||||||
MultiSortedDocValues(SortedDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException {
|
MultiSortedDocValues(SortedDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException {
|
||||||
assert values.length == mapping.ordDeltas.length;
|
assert values.length == mapping.segmentToGlobalOrds.length;
|
||||||
assert docStarts.length == values.length + 1;
|
assert docStarts.length == values.length + 1;
|
||||||
this.values = values;
|
this.values = values;
|
||||||
this.docStarts = docStarts;
|
this.docStarts = docStarts;
|
||||||
|
@ -510,7 +563,7 @@ public class MultiDocValues {
|
||||||
public int getOrd(int docID) {
|
public int getOrd(int docID) {
|
||||||
int subIndex = ReaderUtil.subIndex(docID, docStarts);
|
int subIndex = ReaderUtil.subIndex(docID, docStarts);
|
||||||
int segmentOrd = values[subIndex].getOrd(docID - docStarts[subIndex]);
|
int segmentOrd = values[subIndex].getOrd(docID - docStarts[subIndex]);
|
||||||
return segmentOrd == -1 ? segmentOrd : (int) mapping.getGlobalOrd(subIndex, segmentOrd);
|
return segmentOrd == -1 ? segmentOrd : (int) mapping.segmentToGlobalOrds[subIndex].get(segmentOrd);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -541,7 +594,7 @@ public class MultiDocValues {
|
||||||
|
|
||||||
/** Creates a new MultiSortedSetDocValues over <code>values</code> */
|
/** Creates a new MultiSortedSetDocValues over <code>values</code> */
|
||||||
MultiSortedSetDocValues(SortedSetDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException {
|
MultiSortedSetDocValues(SortedSetDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException {
|
||||||
assert values.length == mapping.ordDeltas.length;
|
assert values.length == mapping.segmentToGlobalOrds.length;
|
||||||
assert docStarts.length == values.length + 1;
|
assert docStarts.length == values.length + 1;
|
||||||
this.values = values;
|
this.values = values;
|
||||||
this.docStarts = docStarts;
|
this.docStarts = docStarts;
|
||||||
|
@ -554,7 +607,7 @@ public class MultiDocValues {
|
||||||
if (segmentOrd == NO_MORE_ORDS) {
|
if (segmentOrd == NO_MORE_ORDS) {
|
||||||
return segmentOrd;
|
return segmentOrd;
|
||||||
} else {
|
} else {
|
||||||
return mapping.getGlobalOrd(currentSubIndex, segmentOrd);
|
return mapping.segmentToGlobalOrds[currentSubIndex].get(segmentOrd);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,16 @@ import org.apache.lucene.util.packed.PackedInts;
|
||||||
* @lucene.internal */
|
* @lucene.internal */
|
||||||
public abstract class LongValues extends NumericDocValues {
|
public abstract class LongValues extends NumericDocValues {
|
||||||
|
|
||||||
|
/** An instance that returns the provided value. */
|
||||||
|
public static final LongValues IDENTITY = new LongValues() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long get(long index) {
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
/** Get value at <code>index</code>. */
|
/** Get value at <code>index</code>. */
|
||||||
public abstract long get(long index);
|
public abstract long get(long index);
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues;
|
||||||
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
|
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.apache.lucene.util.LongValues;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.RamUsageTester;
|
import org.apache.lucene.util.RamUsageTester;
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
@ -44,6 +45,10 @@ public class TestOrdinalMap extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean accept(Object o) {
|
||||||
|
return o != LongValues.IDENTITY;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
public void testRamBytesUsed() throws IOException {
|
public void testRamBytesUsed() throws IOException {
|
||||||
|
|
|
@ -40,6 +40,7 @@ import org.apache.lucene.index.ReaderUtil;
|
||||||
import org.apache.lucene.index.SortedSetDocValues;
|
import org.apache.lucene.index.SortedSetDocValues;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.apache.lucene.util.LongValues;
|
||||||
|
|
||||||
/** Compute facets counts from previously
|
/** Compute facets counts from previously
|
||||||
* indexed {@link SortedSetDocValuesFacetField},
|
* indexed {@link SortedSetDocValuesFacetField},
|
||||||
|
@ -188,7 +189,8 @@ public class SortedSetDocValuesFacetCounts extends Facets {
|
||||||
// temp ram req'ts (sum of number of ords across all
|
// temp ram req'ts (sum of number of ords across all
|
||||||
// segs)
|
// segs)
|
||||||
if (ordinalMap != null) {
|
if (ordinalMap != null) {
|
||||||
int segOrd = hits.context.ord;
|
final int segOrd = hits.context.ord;
|
||||||
|
final LongValues ordMap = ordinalMap.getGlobalOrds(segOrd);
|
||||||
|
|
||||||
int numSegOrds = (int) segValues.getValueCount();
|
int numSegOrds = (int) segValues.getValueCount();
|
||||||
|
|
||||||
|
@ -202,7 +204,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
|
||||||
int term = (int) segValues.nextOrd();
|
int term = (int) segValues.nextOrd();
|
||||||
while (term != SortedSetDocValues.NO_MORE_ORDS) {
|
while (term != SortedSetDocValues.NO_MORE_ORDS) {
|
||||||
//System.out.println(" segOrd=" + segOrd + " ord=" + term + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, term));
|
//System.out.println(" segOrd=" + segOrd + " ord=" + term + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, term));
|
||||||
counts[(int) ordinalMap.getGlobalOrd(segOrd, term)]++;
|
counts[(int) ordMap.get(term)]++;
|
||||||
term = (int) segValues.nextOrd();
|
term = (int) segValues.nextOrd();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -228,7 +230,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
|
||||||
int count = segCounts[ord];
|
int count = segCounts[ord];
|
||||||
if (count != 0) {
|
if (count != 0) {
|
||||||
//System.out.println(" migrate segOrd=" + segOrd + " ord=" + ord + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, ord));
|
//System.out.println(" migrate segOrd=" + segOrd + " ord=" + ord + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, ord));
|
||||||
counts[(int) ordinalMap.getGlobalOrd(segOrd, ord)] += count;
|
counts[(int) ordMap.get(ord)] += count;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,10 @@ public final class RamUsageTester {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean accept(Object o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/** A filter that allows to decide on what to take into account when measuring RAM usage. */
|
/** A filter that allows to decide on what to take into account when measuring RAM usage. */
|
||||||
|
@ -47,6 +51,9 @@ public final class RamUsageTester {
|
||||||
/** Whether the provided field should be taken into account when measuring RAM usage. */
|
/** Whether the provided field should be taken into account when measuring RAM usage. */
|
||||||
boolean accept(Field field);
|
boolean accept(Field field);
|
||||||
|
|
||||||
|
/** Whether the provided field value should be taken into account when measuring RAM usage. */
|
||||||
|
boolean accept(Object o);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -119,7 +126,7 @@ public final class RamUsageTester {
|
||||||
// Push refs for traversal later.
|
// Push refs for traversal later.
|
||||||
for (int i = len; --i >= 0 ;) {
|
for (int i = len; --i >= 0 ;) {
|
||||||
final Object o = Array.get(ob, i);
|
final Object o = Array.get(ob, i);
|
||||||
if (o != null && !seen.contains(o)) {
|
if (o != null && !seen.contains(o) && filter.accept(o)) {
|
||||||
stack.add(o);
|
stack.add(o);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -141,7 +148,7 @@ public final class RamUsageTester {
|
||||||
if (filter.accept(f)) {
|
if (filter.accept(f)) {
|
||||||
// Fast path to eliminate redundancies.
|
// Fast path to eliminate redundancies.
|
||||||
final Object o = f.get(ob);
|
final Object o = f.get(ob);
|
||||||
if (o != null && !seen.contains(o)) {
|
if (o != null && !seen.contains(o) && filter.accept(o)) {
|
||||||
stack.add(o);
|
stack.add(o);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.CharsRef;
|
import org.apache.lucene.util.CharsRef;
|
||||||
|
import org.apache.lucene.util.LongValues;
|
||||||
import org.apache.lucene.util.UnicodeUtil;
|
import org.apache.lucene.util.UnicodeUtil;
|
||||||
import org.apache.solr.common.params.FacetParams;
|
import org.apache.solr.common.params.FacetParams;
|
||||||
import org.apache.solr.common.util.NamedList;
|
import org.apache.solr.common.util.NamedList;
|
||||||
|
@ -247,11 +248,12 @@ public class DocValuesFacets {
|
||||||
|
|
||||||
/** accumulates per-segment single-valued facet counts, mapping to global ordinal space on-the-fly */
|
/** accumulates per-segment single-valued facet counts, mapping to global ordinal space on-the-fly */
|
||||||
static void accumSingleGeneric(int counts[], int startTermIndex, SortedDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
|
static void accumSingleGeneric(int counts[], int startTermIndex, SortedDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
|
||||||
|
final LongValues ordmap = map == null ? null : map.getGlobalOrds(subIndex);
|
||||||
int doc;
|
int doc;
|
||||||
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
int term = si.getOrd(doc);
|
int term = si.getOrd(doc);
|
||||||
if (map != null && term >= 0) {
|
if (map != null && term >= 0) {
|
||||||
term = (int) map.getGlobalOrd(subIndex, term);
|
term = (int) ordmap.get(term);
|
||||||
}
|
}
|
||||||
int arrIdx = term-startTermIndex;
|
int arrIdx = term-startTermIndex;
|
||||||
if (arrIdx>=0 && arrIdx<counts.length) counts[arrIdx]++;
|
if (arrIdx>=0 && arrIdx<counts.length) counts[arrIdx]++;
|
||||||
|
@ -293,6 +295,7 @@ public class DocValuesFacets {
|
||||||
|
|
||||||
/** accumulates per-segment multi-valued facet counts, mapping to global ordinal space on-the-fly */
|
/** accumulates per-segment multi-valued facet counts, mapping to global ordinal space on-the-fly */
|
||||||
static void accumMultiGeneric(int counts[], int startTermIndex, SortedSetDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
|
static void accumMultiGeneric(int counts[], int startTermIndex, SortedSetDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
|
||||||
|
final LongValues ordMap = map == null ? null : map.getGlobalOrds(subIndex);
|
||||||
int doc;
|
int doc;
|
||||||
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
si.setDocument(doc);
|
si.setDocument(doc);
|
||||||
|
@ -307,7 +310,7 @@ public class DocValuesFacets {
|
||||||
|
|
||||||
do {
|
do {
|
||||||
if (map != null) {
|
if (map != null) {
|
||||||
term = (int) map.getGlobalOrd(subIndex, term);
|
term = (int) ordMap.get(term);
|
||||||
}
|
}
|
||||||
int arrIdx = term-startTermIndex;
|
int arrIdx = term-startTermIndex;
|
||||||
if (arrIdx>=0 && arrIdx<counts.length) counts[arrIdx]++;
|
if (arrIdx>=0 && arrIdx<counts.length) counts[arrIdx]++;
|
||||||
|
@ -346,6 +349,7 @@ public class DocValuesFacets {
|
||||||
|
|
||||||
/** folds counts in segment ordinal space (segCounts) into global ordinal space (counts) */
|
/** folds counts in segment ordinal space (segCounts) into global ordinal space (counts) */
|
||||||
static void migrateGlobal(int counts[], int segCounts[], int subIndex, OrdinalMap map) {
|
static void migrateGlobal(int counts[], int segCounts[], int subIndex, OrdinalMap map) {
|
||||||
|
final LongValues ordMap = map.getGlobalOrds(subIndex);
|
||||||
// missing count
|
// missing count
|
||||||
counts[0] += segCounts[0];
|
counts[0] += segCounts[0];
|
||||||
|
|
||||||
|
@ -353,7 +357,7 @@ public class DocValuesFacets {
|
||||||
for (int ord = 1; ord < segCounts.length; ord++) {
|
for (int ord = 1; ord < segCounts.length; ord++) {
|
||||||
int count = segCounts[ord];
|
int count = segCounts[ord];
|
||||||
if (count != 0) {
|
if (count != 0) {
|
||||||
counts[1+(int) map.getGlobalOrd(subIndex, ord-1)] += count;
|
counts[1+(int) ordMap.get(ord-1)] += count;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TermRangeQuery;
|
import org.apache.lucene.search.TermRangeQuery;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.apache.lucene.util.LongValues;
|
||||||
import org.apache.solr.handler.component.FieldFacetStats;
|
import org.apache.solr.handler.component.FieldFacetStats;
|
||||||
import org.apache.solr.handler.component.StatsValues;
|
import org.apache.solr.handler.component.StatsValues;
|
||||||
import org.apache.solr.handler.component.StatsValuesFactory;
|
import org.apache.solr.handler.component.StatsValuesFactory;
|
||||||
|
@ -161,12 +162,13 @@ public class DocValuesStats {
|
||||||
|
|
||||||
/** accumulates per-segment single-valued stats */
|
/** accumulates per-segment single-valued stats */
|
||||||
static void accumSingle(int counts[], int docBase, FieldFacetStats[] facetStats, SortedDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
|
static void accumSingle(int counts[], int docBase, FieldFacetStats[] facetStats, SortedDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
|
||||||
|
final LongValues ordMap = map == null ? null : map.getGlobalOrds(subIndex);
|
||||||
int doc;
|
int doc;
|
||||||
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
int term = si.getOrd(doc);
|
int term = si.getOrd(doc);
|
||||||
if (term >= 0) {
|
if (term >= 0) {
|
||||||
if (map != null) {
|
if (map != null) {
|
||||||
term = (int) map.getGlobalOrd(subIndex, term);
|
term = (int) ordMap.get(term);
|
||||||
}
|
}
|
||||||
counts[term]++;
|
counts[term]++;
|
||||||
for (FieldFacetStats f : facetStats) {
|
for (FieldFacetStats f : facetStats) {
|
||||||
|
@ -178,6 +180,7 @@ public class DocValuesStats {
|
||||||
|
|
||||||
/** accumulates per-segment multi-valued stats */
|
/** accumulates per-segment multi-valued stats */
|
||||||
static void accumMulti(int counts[], int docBase, FieldFacetStats[] facetStats, SortedSetDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
|
static void accumMulti(int counts[], int docBase, FieldFacetStats[] facetStats, SortedSetDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
|
||||||
|
final LongValues ordMap = map == null ? null : map.getGlobalOrds(subIndex);
|
||||||
int doc;
|
int doc;
|
||||||
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
si.setDocument(doc);
|
si.setDocument(doc);
|
||||||
|
@ -185,7 +188,7 @@ public class DocValuesStats {
|
||||||
while ((ord = si.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
|
while ((ord = si.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
|
||||||
int term = (int) ord;
|
int term = (int) ord;
|
||||||
if (map != null) {
|
if (map != null) {
|
||||||
term = (int) map.getGlobalOrd(subIndex, term);
|
term = (int) ordMap.get(term);
|
||||||
}
|
}
|
||||||
counts[term]++;
|
counts[term]++;
|
||||||
for (FieldFacetStats f : facetStats) {
|
for (FieldFacetStats f : facetStats) {
|
||||||
|
|
Loading…
Reference in New Issue