LUCENE-5752: merge trunk

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5752@1603107 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2014-06-17 09:41:54 +00:00
commit a135294eda
17 changed files with 236 additions and 44 deletions

View File

@ -282,6 +282,12 @@ Optimizations
* LUCENE-5751: Speed up MemoryDocValues. (Adrien Grand, Robert Muir) * LUCENE-5751: Speed up MemoryDocValues. (Adrien Grand, Robert Muir)
* LUCENE-5767: OrdinalMap optimizations, that mostly help on low cardinalities.
(Martijn van Groningen, Adrien Grand)
* LUCENE-5769: SingletonSortedSetDocValues now supports random access ordinals.
(Robert Muir)
Bug fixes Bug fixes
* LUCENE-5738: Ensure NativeFSLock prevents opening the file channel for the * LUCENE-5738: Ensure NativeFSLock prevents opening the file channel for the
@ -327,6 +333,9 @@ Bug fixes
* LUCENE-5747: Project-specific settings for the eclipse development * LUCENE-5747: Project-specific settings for the eclipse development
environment will prevent automatic code reformatting. (Shawn Heisey) environment will prevent automatic code reformatting. (Shawn Heisey)
* LUCENE-5768: Hunspell condition checks containing character classes
were buggy. (Clinton Gormley, Robert Muir)
Test Framework Test Framework
* LUCENE-5622: Fail tests if they print over the given limit of bytes to * LUCENE-5622: Fail tests if they print over the given limit of bytes to

View File

@ -420,7 +420,7 @@ public class Dictionary {
String condition = ruleArgs.length > 4 ? ruleArgs[4] : "."; String condition = ruleArgs.length > 4 ? ruleArgs[4] : ".";
// at least the gascon affix file has this issue // at least the gascon affix file has this issue
if (condition.startsWith("[") && !condition.endsWith("]")) { if (condition.startsWith("[") && condition.indexOf(']') == -1) {
condition = condition + "]"; condition = condition + "]";
} }
// "dash hasn't got special meaning" (we must escape it) // "dash hasn't got special meaning" (we must escape it)

View File

@ -0,0 +1,32 @@
package org.apache.lucene.analysis.hunspell;
import org.junit.BeforeClass;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class TestCondition2 extends StemmerTestBase {
@BeforeClass
public static void beforeClass() throws Exception {
init("condition2.aff", "condition2.dic");
}
public void testStemming() {
assertStemsTo("monopolies", "monopoly");
}
}

View File

@ -0,0 +1,5 @@
SET ISO8859-1
TRY esianrtolcdugmphbyfvkwzESIANRTOLCDUGMPHBYFVKWZ'
SFX S Y 1
SFX S y ies [^aeiou]y

View File

@ -0,0 +1,2 @@
1
monopoly/S

View File

@ -39,6 +39,7 @@ import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongBitSet; import org.apache.lucene.util.LongBitSet;
import org.apache.lucene.util.LongValues;
/** /**
* Abstract API that consumes numeric, binary and * Abstract API that consumes numeric, binary and
@ -505,6 +506,7 @@ public abstract class DocValuesConsumer implements Closeable {
int nextValue; int nextValue;
AtomicReader currentReader; AtomicReader currentReader;
Bits currentLiveDocs; Bits currentLiveDocs;
LongValues currentMap;
boolean nextIsSet; boolean nextIsSet;
@Override @Override
@ -539,6 +541,7 @@ public abstract class DocValuesConsumer implements Closeable {
if (readerUpto < readers.length) { if (readerUpto < readers.length) {
currentReader = readers[readerUpto]; currentReader = readers[readerUpto];
currentLiveDocs = currentReader.getLiveDocs(); currentLiveDocs = currentReader.getLiveDocs();
currentMap = map.getGlobalOrds(readerUpto);
} }
docIDUpto = 0; docIDUpto = 0;
continue; continue;
@ -547,7 +550,7 @@ public abstract class DocValuesConsumer implements Closeable {
if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) { if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) {
nextIsSet = true; nextIsSet = true;
int segOrd = dvs[readerUpto].getOrd(docIDUpto); int segOrd = dvs[readerUpto].getOrd(docIDUpto);
nextValue = segOrd == -1 ? -1 : (int) map.getGlobalOrd(readerUpto, segOrd); nextValue = segOrd == -1 ? -1 : (int) currentMap.get(segOrd);
docIDUpto++; docIDUpto++;
return true; return true;
} }
@ -707,6 +710,7 @@ public abstract class DocValuesConsumer implements Closeable {
long nextValue; long nextValue;
AtomicReader currentReader; AtomicReader currentReader;
Bits currentLiveDocs; Bits currentLiveDocs;
LongValues currentMap;
boolean nextIsSet; boolean nextIsSet;
long ords[] = new long[8]; long ords[] = new long[8];
int ordUpto; int ordUpto;
@ -751,6 +755,7 @@ public abstract class DocValuesConsumer implements Closeable {
if (readerUpto < readers.length) { if (readerUpto < readers.length) {
currentReader = readers[readerUpto]; currentReader = readers[readerUpto];
currentLiveDocs = currentReader.getLiveDocs(); currentLiveDocs = currentReader.getLiveDocs();
currentMap = map.getGlobalOrds(readerUpto);
} }
docIDUpto = 0; docIDUpto = 0;
continue; continue;
@ -766,7 +771,7 @@ public abstract class DocValuesConsumer implements Closeable {
if (ordLength == ords.length) { if (ordLength == ords.length) {
ords = ArrayUtil.grow(ords, ordLength+1); ords = ArrayUtil.grow(ords, ordLength+1);
} }
ords[ordLength] = map.getGlobalOrd(readerUpto, ord); ords[ordLength] = currentMap.get(ord);
ordLength++; ordLength++;
} }
docIDUpto++; docIDUpto++;

View File

@ -25,6 +25,7 @@ import org.apache.lucene.index.MultiTermsEnum.TermsEnumWithSlice;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.AppendingPackedLongBuffer; import org.apache.lucene.util.packed.AppendingPackedLongBuffer;
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer; import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
@ -373,7 +374,7 @@ public class MultiDocValues {
return new MultiSortedSetDocValues(values, starts, mapping); return new MultiSortedSetDocValues(values, starts, mapping);
} }
} }
/** maps per-segment ordinals to/from global ordinal space */ /** maps per-segment ordinals to/from global ordinal space */
// TODO: use more efficient packed ints structures? // TODO: use more efficient packed ints structures?
// TODO: pull this out? its pretty generic (maps between N ord()-enabled TermsEnums) // TODO: pull this out? its pretty generic (maps between N ord()-enabled TermsEnums)
@ -387,8 +388,10 @@ public class MultiDocValues {
final MonotonicAppendingLongBuffer globalOrdDeltas; final MonotonicAppendingLongBuffer globalOrdDeltas;
// globalOrd -> first segment container // globalOrd -> first segment container
final AppendingPackedLongBuffer firstSegments; final AppendingPackedLongBuffer firstSegments;
// for every segment, segmentOrd -> (globalOrd - segmentOrd) // for every segment, segmentOrd -> globalOrd
final MonotonicAppendingLongBuffer ordDeltas[]; final LongValues segmentToGlobalOrds[];
// ram usage
final long ramBytesUsed;
/** /**
* Creates an ordinal map that allows mapping ords to/from a merged * Creates an ordinal map that allows mapping ords to/from a merged
@ -398,16 +401,20 @@ public class MultiDocValues {
* not be dense (e.g. can be FilteredTermsEnums}. * not be dense (e.g. can be FilteredTermsEnums}.
* @throws IOException if an I/O error occurred. * @throws IOException if an I/O error occurred.
*/ */
public OrdinalMap(Object owner, TermsEnum subs[]) throws IOException { public OrdinalMap(Object owner, TermsEnum subs[], float acceptableOverheadRatio) throws IOException {
// create the ordinal mappings by pulling a termsenum over each sub's // create the ordinal mappings by pulling a termsenum over each sub's
// unique terms, and walking a multitermsenum over those // unique terms, and walking a multitermsenum over those
this.owner = owner; this.owner = owner;
// even though we accept an overhead ratio, we keep these ones with COMPACT
// since they are only used to resolve values given a global ord, which is
// slow anyway
globalOrdDeltas = new MonotonicAppendingLongBuffer(PackedInts.COMPACT); globalOrdDeltas = new MonotonicAppendingLongBuffer(PackedInts.COMPACT);
firstSegments = new AppendingPackedLongBuffer(PackedInts.COMPACT); firstSegments = new AppendingPackedLongBuffer(PackedInts.COMPACT);
ordDeltas = new MonotonicAppendingLongBuffer[subs.length]; final MonotonicAppendingLongBuffer[] ordDeltas = new MonotonicAppendingLongBuffer[subs.length];
for (int i = 0; i < ordDeltas.length; i++) { for (int i = 0; i < ordDeltas.length; i++) {
ordDeltas[i] = new MonotonicAppendingLongBuffer(); ordDeltas[i] = new MonotonicAppendingLongBuffer(acceptableOverheadRatio);
} }
long[] ordDeltaBits = new long[subs.length];
long segmentOrds[] = new long[subs.length]; long segmentOrds[] = new long[subs.length];
ReaderSlice slices[] = new ReaderSlice[subs.length]; ReaderSlice slices[] = new ReaderSlice[subs.length];
TermsEnumIndex indexes[] = new TermsEnumIndex[slices.length]; TermsEnumIndex indexes[] = new TermsEnumIndex[slices.length];
@ -431,6 +438,7 @@ public class MultiDocValues {
} }
// for each per-segment ord, map it back to the global term. // for each per-segment ord, map it back to the global term.
while (segmentOrds[segmentIndex] <= segmentOrd) { while (segmentOrds[segmentIndex] <= segmentOrd) {
ordDeltaBits[segmentIndex] |= delta;
ordDeltas[segmentIndex].add(delta); ordDeltas[segmentIndex].add(delta);
segmentOrds[segmentIndex]++; segmentOrds[segmentIndex]++;
} }
@ -442,14 +450,62 @@ public class MultiDocValues {
for (int i = 0; i < ordDeltas.length; ++i) { for (int i = 0; i < ordDeltas.length; ++i) {
ordDeltas[i].freeze(); ordDeltas[i].freeze();
} }
// ordDeltas is typically the bottleneck, so let's see what we can do to make it faster
segmentToGlobalOrds = new LongValues[subs.length];
long ramBytesUsed = BASE_RAM_BYTES_USED + globalOrdDeltas.ramBytesUsed() + firstSegments.ramBytesUsed() + RamUsageEstimator.shallowSizeOf(segmentToGlobalOrds);
for (int i = 0; i < ordDeltas.length; ++i) {
final MonotonicAppendingLongBuffer deltas = ordDeltas[i];
if (ordDeltaBits[i] == 0L) {
// segment ords perfectly match global ordinals
// likely in case of low cardinalities and large segments
segmentToGlobalOrds[i] = LongValues.IDENTITY;
} else {
final int bitsRequired = ordDeltaBits[i] < 0 ? 64 : PackedInts.bitsRequired(ordDeltaBits[i]);
final long monotonicBits = deltas.ramBytesUsed() * 8;
final long packedBits = bitsRequired * deltas.size();
if (deltas.size() <= Integer.MAX_VALUE
&& packedBits <= monotonicBits * (1 + acceptableOverheadRatio)) {
// monotonic compression mostly adds overhead, let's keep the mapping in plain packed ints
final int size = (int) deltas.size();
final PackedInts.Mutable newDeltas = PackedInts.getMutable(size, bitsRequired, acceptableOverheadRatio);
final MonotonicAppendingLongBuffer.Iterator it = deltas.iterator();
for (int ord = 0; ord < size; ++ord) {
newDeltas.set(ord, it.next());
}
assert !it.hasNext();
segmentToGlobalOrds[i] = new LongValues() {
@Override
public long get(long ord) {
return ord + newDeltas.get((int) ord);
}
};
ramBytesUsed += newDeltas.ramBytesUsed();
} else {
segmentToGlobalOrds[i] = new LongValues() {
@Override
public long get(long ord) {
return ord + deltas.get((int) ord);
}
};
ramBytesUsed += deltas.ramBytesUsed();
}
ramBytesUsed += RamUsageEstimator.shallowSizeOf(segmentToGlobalOrds[i]);
}
}
this.ramBytesUsed = ramBytesUsed;
} }
/** Create an {@link OrdinalMap} with the default overhead ratio. */
public OrdinalMap(Object owner, TermsEnum subs[]) throws IOException {
this(owner, subs, PackedInts.DEFAULT);
}
/** /**
* Given a segment number and segment ordinal, returns * Given a segment number, return a {@link LongValues} instance that maps
* the corresponding global ordinal. * segment ordinals to global ordinals.
*/ */
public long getGlobalOrd(int segmentIndex, long segmentOrd) { public LongValues getGlobalOrds(int segmentIndex) {
return segmentOrd + ordDeltas[segmentIndex].get(segmentOrd); return segmentToGlobalOrds[segmentIndex];
} }
/** /**
@ -477,11 +533,7 @@ public class MultiDocValues {
@Override @Override
public long ramBytesUsed() { public long ramBytesUsed() {
long size = BASE_RAM_BYTES_USED + globalOrdDeltas.ramBytesUsed() + firstSegments.ramBytesUsed() + RamUsageEstimator.shallowSizeOf(ordDeltas); return ramBytesUsed;
for (int i = 0; i < ordDeltas.length; i++) {
size += ordDeltas[i].ramBytesUsed();
}
return size;
} }
} }
@ -499,7 +551,7 @@ public class MultiDocValues {
/** Creates a new MultiSortedDocValues over <code>values</code> */ /** Creates a new MultiSortedDocValues over <code>values</code> */
MultiSortedDocValues(SortedDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException { MultiSortedDocValues(SortedDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException {
assert values.length == mapping.ordDeltas.length; assert values.length == mapping.segmentToGlobalOrds.length;
assert docStarts.length == values.length + 1; assert docStarts.length == values.length + 1;
this.values = values; this.values = values;
this.docStarts = docStarts; this.docStarts = docStarts;
@ -510,7 +562,7 @@ public class MultiDocValues {
public int getOrd(int docID) { public int getOrd(int docID) {
int subIndex = ReaderUtil.subIndex(docID, docStarts); int subIndex = ReaderUtil.subIndex(docID, docStarts);
int segmentOrd = values[subIndex].getOrd(docID - docStarts[subIndex]); int segmentOrd = values[subIndex].getOrd(docID - docStarts[subIndex]);
return segmentOrd == -1 ? segmentOrd : (int) mapping.getGlobalOrd(subIndex, segmentOrd); return segmentOrd == -1 ? segmentOrd : (int) mapping.segmentToGlobalOrds[subIndex].get(segmentOrd);
} }
@Override @Override
@ -541,7 +593,7 @@ public class MultiDocValues {
/** Creates a new MultiSortedSetDocValues over <code>values</code> */ /** Creates a new MultiSortedSetDocValues over <code>values</code> */
MultiSortedSetDocValues(SortedSetDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException { MultiSortedSetDocValues(SortedSetDocValues values[], int docStarts[], OrdinalMap mapping) throws IOException {
assert values.length == mapping.ordDeltas.length; assert values.length == mapping.segmentToGlobalOrds.length;
assert docStarts.length == values.length + 1; assert docStarts.length == values.length + 1;
this.values = values; this.values = values;
this.docStarts = docStarts; this.docStarts = docStarts;
@ -554,7 +606,7 @@ public class MultiDocValues {
if (segmentOrd == NO_MORE_ORDS) { if (segmentOrd == NO_MORE_ORDS) {
return segmentOrd; return segmentOrd;
} else { } else {
return mapping.getGlobalOrd(currentSubIndex, segmentOrd); return mapping.segmentToGlobalOrds[currentSubIndex].get(segmentOrd);
} }
} }

View File

@ -25,10 +25,10 @@ import org.apache.lucene.util.BytesRef;
* This can be used if you want to have one multi-valued implementation * This can be used if you want to have one multi-valued implementation
* that works for single or multi-valued types. * that works for single or multi-valued types.
*/ */
final class SingletonSortedSetDocValues extends SortedSetDocValues { final class SingletonSortedSetDocValues extends RandomAccessOrds {
private final SortedDocValues in; private final SortedDocValues in;
private int docID; private long currentOrd;
private boolean set; private long ord;
/** Creates a multi-valued view over the provided SortedDocValues */ /** Creates a multi-valued view over the provided SortedDocValues */
public SingletonSortedSetDocValues(SortedDocValues in) { public SingletonSortedSetDocValues(SortedDocValues in) {
@ -43,18 +43,14 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
@Override @Override
public long nextOrd() { public long nextOrd() {
if (set) { long v = currentOrd;
return NO_MORE_ORDS; currentOrd = NO_MORE_ORDS;
} else { return v;
set = true;
return in.getOrd(docID);
}
} }
@Override @Override
public void setDocument(int docID) { public void setDocument(int docID) {
this.docID = docID; currentOrd = ord = in.getOrd(docID);
set = false;
} }
@Override @Override
@ -72,4 +68,19 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
public long lookupTerm(BytesRef key) { public long lookupTerm(BytesRef key) {
return in.lookupTerm(key); return in.lookupTerm(key);
} }
@Override
public long ordAt(int index) {
return ord;
}
@Override
public int cardinality() {
return (int) (ord >>> 63) ^ 1;
}
@Override
public TermsEnum termsEnum() {
return in.termsEnum();
}
} }

View File

@ -27,6 +27,16 @@ import org.apache.lucene.util.packed.PackedInts;
* @lucene.internal */ * @lucene.internal */
public abstract class LongValues extends NumericDocValues { public abstract class LongValues extends NumericDocValues {
/** An instance that returns the provided value. */
public static final LongValues IDENTITY = new LongValues() {
@Override
public long get(long index) {
return index;
}
};
/** Get value at <code>index</code>. */ /** Get value at <code>index</code>. */
public abstract long get(long index); public abstract long get(long index);

View File

@ -30,6 +30,7 @@ import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues;
import org.apache.lucene.index.MultiDocValues.OrdinalMap; import org.apache.lucene.index.MultiDocValues.OrdinalMap;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.RamUsageTester; import org.apache.lucene.util.RamUsageTester;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
@ -44,6 +45,10 @@ public class TestOrdinalMap extends LuceneTestCase {
} }
return true; return true;
} }
public boolean accept(Object o) {
return o != LongValues.IDENTITY;
}
}; };
public void testRamBytesUsed() throws IOException { public void testRamBytesUsed() throws IOException {

View File

@ -40,6 +40,7 @@ import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
/** Compute facets counts from previously /** Compute facets counts from previously
* indexed {@link SortedSetDocValuesFacetField}, * indexed {@link SortedSetDocValuesFacetField},
@ -188,7 +189,8 @@ public class SortedSetDocValuesFacetCounts extends Facets {
// temp ram req'ts (sum of number of ords across all // temp ram req'ts (sum of number of ords across all
// segs) // segs)
if (ordinalMap != null) { if (ordinalMap != null) {
int segOrd = hits.context.ord; final int segOrd = hits.context.ord;
final LongValues ordMap = ordinalMap.getGlobalOrds(segOrd);
int numSegOrds = (int) segValues.getValueCount(); int numSegOrds = (int) segValues.getValueCount();
@ -202,7 +204,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
int term = (int) segValues.nextOrd(); int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) { while (term != SortedSetDocValues.NO_MORE_ORDS) {
//System.out.println(" segOrd=" + segOrd + " ord=" + term + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, term)); //System.out.println(" segOrd=" + segOrd + " ord=" + term + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, term));
counts[(int) ordinalMap.getGlobalOrd(segOrd, term)]++; counts[(int) ordMap.get(term)]++;
term = (int) segValues.nextOrd(); term = (int) segValues.nextOrd();
} }
} }
@ -228,7 +230,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
int count = segCounts[ord]; int count = segCounts[ord];
if (count != 0) { if (count != 0) {
//System.out.println(" migrate segOrd=" + segOrd + " ord=" + ord + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, ord)); //System.out.println(" migrate segOrd=" + segOrd + " ord=" + ord + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, ord));
counts[(int) ordinalMap.getGlobalOrd(segOrd, ord)] += count; counts[(int) ordMap.get(ord)] += count;
} }
} }
} }

View File

@ -39,6 +39,10 @@ public final class RamUsageTester {
return true; return true;
} }
public boolean accept(Object o) {
return true;
}
}; };
/** A filter that allows to decide on what to take into account when measuring RAM usage. */ /** A filter that allows to decide on what to take into account when measuring RAM usage. */
@ -47,6 +51,9 @@ public final class RamUsageTester {
/** Whether the provided field should be taken into account when measuring RAM usage. */ /** Whether the provided field should be taken into account when measuring RAM usage. */
boolean accept(Field field); boolean accept(Field field);
/** Whether the provided field value should be taken into account when measuring RAM usage. */
boolean accept(Object o);
} }
/** /**
@ -119,7 +126,7 @@ public final class RamUsageTester {
// Push refs for traversal later. // Push refs for traversal later.
for (int i = len; --i >= 0 ;) { for (int i = len; --i >= 0 ;) {
final Object o = Array.get(ob, i); final Object o = Array.get(ob, i);
if (o != null && !seen.contains(o)) { if (o != null && !seen.contains(o) && filter.accept(o)) {
stack.add(o); stack.add(o);
} }
} }
@ -141,7 +148,7 @@ public final class RamUsageTester {
if (filter.accept(f)) { if (filter.accept(f)) {
// Fast path to eliminate redundancies. // Fast path to eliminate redundancies.
final Object o = f.get(ob); final Object o = f.get(ob);
if (o != null && !seen.contains(o)) { if (o != null && !seen.contains(o) && filter.accept(o)) {
stack.add(o); stack.add(o);
} }
} }

View File

@ -184,6 +184,9 @@ Bug Fixes
* SOLR-5426: Fixed a bug in ReverseWildCardFilter that could cause * SOLR-5426: Fixed a bug in ReverseWildCardFilter that could cause
InvalidTokenOffsetsException when highlighting. (Uwe Schindler, Arun Kumar, via hossman) InvalidTokenOffsetsException when highlighting. (Uwe Schindler, Arun Kumar, via hossman)
* SOLR-6175: DebugComponent throws NPE on shard exceptions when using shards.tolerant.
(Tomás Fernández Löbbe via shalin)
Other Changes Other Changes
--------------------- ---------------------

View File

@ -206,6 +206,11 @@ public class DebugComponent extends SearchComponent
for (ShardRequest sreq : rb.finished) { for (ShardRequest sreq : rb.finished) {
for (ShardResponse srsp : sreq.responses) { for (ShardResponse srsp : sreq.responses) {
if (srsp.getException() != null) {
// can't expect the debug content if there was an exception for this request
// this should only happen when using shards.tolerant=true
continue;
}
NamedList sdebug = (NamedList)srsp.getSolrResponse().getResponse().get("debug"); NamedList sdebug = (NamedList)srsp.getSolrResponse().getResponse().get("debug");
info = (NamedList)merge(sdebug, info, EXCLUDE_SET); info = (NamedList)merge(sdebug, info, EXCLUDE_SET);
if ((sreq.purpose & ShardRequest.PURPOSE_GET_DEBUG) != 0) { if ((sreq.purpose & ShardRequest.PURPOSE_GET_DEBUG) != 0) {
@ -257,6 +262,10 @@ public class DebugComponent extends SearchComponent
private NamedList<String> getTrackResponse(ShardResponse shardResponse) { private NamedList<String> getTrackResponse(ShardResponse shardResponse) {
NamedList<String> namedList = new NamedList<>(); NamedList<String> namedList = new NamedList<>();
if (shardResponse.getException() != null) {
namedList.add("Exception", shardResponse.getException().getMessage());
return namedList;
}
NamedList<Object> responseNL = shardResponse.getSolrResponse().getResponse(); NamedList<Object> responseNL = shardResponse.getSolrResponse().getResponse();
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
NamedList<Object> responseHeader = (NamedList<Object>)responseNL.get("responseHeader"); NamedList<Object> responseHeader = (NamedList<Object>)responseNL.get("responseHeader");

View File

@ -32,6 +32,7 @@ import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.LongValues;
import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.UnicodeUtil;
import org.apache.solr.common.params.FacetParams; import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
@ -247,11 +248,12 @@ public class DocValuesFacets {
/** accumulates per-segment single-valued facet counts, mapping to global ordinal space on-the-fly */ /** accumulates per-segment single-valued facet counts, mapping to global ordinal space on-the-fly */
static void accumSingleGeneric(int counts[], int startTermIndex, SortedDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException { static void accumSingleGeneric(int counts[], int startTermIndex, SortedDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
final LongValues ordmap = map == null ? null : map.getGlobalOrds(subIndex);
int doc; int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
int term = si.getOrd(doc); int term = si.getOrd(doc);
if (map != null && term >= 0) { if (map != null && term >= 0) {
term = (int) map.getGlobalOrd(subIndex, term); term = (int) ordmap.get(term);
} }
int arrIdx = term-startTermIndex; int arrIdx = term-startTermIndex;
if (arrIdx>=0 && arrIdx<counts.length) counts[arrIdx]++; if (arrIdx>=0 && arrIdx<counts.length) counts[arrIdx]++;
@ -293,6 +295,7 @@ public class DocValuesFacets {
/** accumulates per-segment multi-valued facet counts, mapping to global ordinal space on-the-fly */ /** accumulates per-segment multi-valued facet counts, mapping to global ordinal space on-the-fly */
static void accumMultiGeneric(int counts[], int startTermIndex, SortedSetDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException { static void accumMultiGeneric(int counts[], int startTermIndex, SortedSetDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
final LongValues ordMap = map == null ? null : map.getGlobalOrds(subIndex);
int doc; int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
si.setDocument(doc); si.setDocument(doc);
@ -307,7 +310,7 @@ public class DocValuesFacets {
do { do {
if (map != null) { if (map != null) {
term = (int) map.getGlobalOrd(subIndex, term); term = (int) ordMap.get(term);
} }
int arrIdx = term-startTermIndex; int arrIdx = term-startTermIndex;
if (arrIdx>=0 && arrIdx<counts.length) counts[arrIdx]++; if (arrIdx>=0 && arrIdx<counts.length) counts[arrIdx]++;
@ -346,6 +349,7 @@ public class DocValuesFacets {
/** folds counts in segment ordinal space (segCounts) into global ordinal space (counts) */ /** folds counts in segment ordinal space (segCounts) into global ordinal space (counts) */
static void migrateGlobal(int counts[], int segCounts[], int subIndex, OrdinalMap map) { static void migrateGlobal(int counts[], int segCounts[], int subIndex, OrdinalMap map) {
final LongValues ordMap = map.getGlobalOrds(subIndex);
// missing count // missing count
counts[0] += segCounts[0]; counts[0] += segCounts[0];
@ -353,7 +357,7 @@ public class DocValuesFacets {
for (int ord = 1; ord < segCounts.length; ord++) { for (int ord = 1; ord < segCounts.length; ord++) {
int count = segCounts[ord]; int count = segCounts[ord];
if (count != 0) { if (count != 0) {
counts[1+(int) map.getGlobalOrd(subIndex, ord-1)] += count; counts[1+(int) ordMap.get(ord-1)] += count;
} }
} }
} }

View File

@ -35,6 +35,7 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
import org.apache.solr.handler.component.FieldFacetStats; import org.apache.solr.handler.component.FieldFacetStats;
import org.apache.solr.handler.component.StatsValues; import org.apache.solr.handler.component.StatsValues;
import org.apache.solr.handler.component.StatsValuesFactory; import org.apache.solr.handler.component.StatsValuesFactory;
@ -161,12 +162,13 @@ public class DocValuesStats {
/** accumulates per-segment single-valued stats */ /** accumulates per-segment single-valued stats */
static void accumSingle(int counts[], int docBase, FieldFacetStats[] facetStats, SortedDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException { static void accumSingle(int counts[], int docBase, FieldFacetStats[] facetStats, SortedDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
final LongValues ordMap = map == null ? null : map.getGlobalOrds(subIndex);
int doc; int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
int term = si.getOrd(doc); int term = si.getOrd(doc);
if (term >= 0) { if (term >= 0) {
if (map != null) { if (map != null) {
term = (int) map.getGlobalOrd(subIndex, term); term = (int) ordMap.get(term);
} }
counts[term]++; counts[term]++;
for (FieldFacetStats f : facetStats) { for (FieldFacetStats f : facetStats) {
@ -178,6 +180,7 @@ public class DocValuesStats {
/** accumulates per-segment multi-valued stats */ /** accumulates per-segment multi-valued stats */
static void accumMulti(int counts[], int docBase, FieldFacetStats[] facetStats, SortedSetDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException { static void accumMulti(int counts[], int docBase, FieldFacetStats[] facetStats, SortedSetDocValues si, DocIdSetIterator disi, int subIndex, OrdinalMap map) throws IOException {
final LongValues ordMap = map == null ? null : map.getGlobalOrds(subIndex);
int doc; int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
si.setDocument(doc); si.setDocument(doc);
@ -185,7 +188,7 @@ public class DocValuesStats {
while ((ord = si.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { while ((ord = si.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
int term = (int) ord; int term = (int) ord;
if (map != null) { if (map != null) {
term = (int) map.getGlobalOrd(subIndex, term); term = (int) ordMap.get(term);
} }
counts[term]++; counts[term]++;
for (FieldFacetStats f : facetStats) { for (FieldFacetStats f : facetStats) {

View File

@ -7,6 +7,7 @@ import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
@ -18,7 +19,9 @@ import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.request.CoreAdminRequest; import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.ShardParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -96,6 +99,7 @@ public class DistributedDebugComponentTest extends SolrJettyTestBase {
collection2 = null; collection2 = null;
jetty.stop(); jetty.stop();
jetty=null; jetty=null;
resetExceptionIgnores();
} }
@Test @Test
@ -367,6 +371,35 @@ public class DistributedDebugComponentTest extends SolrJettyTestBase {
assertSameKeys((NamedList<?>)nonDistribResponse.getDebugMap().get("timing"), (NamedList<?>)distribResponse.getDebugMap().get("timing")); assertSameKeys((NamedList<?>)nonDistribResponse.getDebugMap().get("timing"), (NamedList<?>)distribResponse.getDebugMap().get("timing"));
} }
public void testTolerantSearch() throws SolrServerException {
String badShard = "[ff01::0083]:3334";
SolrQuery query = new SolrQuery();
query.setQuery("*:*");
query.set("debug", "true");
query.set("distrib", "true");
query.setFields("id", "text");
query.set("shards", shard1 + "," + shard2 + "," + badShard);
try {
ignoreException("Server refused connection");
// verify that the request would fail if shards.tolerant=false
collection1.query(query);
fail("Expecting exception");
} catch (SolrException e) {
//expected
}
query.set(ShardParams.SHARDS_TOLERANT, "true");
QueryResponse response = collection1.query(query);
assertTrue((Boolean)response.getResponseHeader().get("partialResults"));
@SuppressWarnings("unchecked")
NamedList<String> badShardTrack = (NamedList<String>) ((NamedList<NamedList<String>>)
((NamedList<NamedList<NamedList<String>>>)response.getDebugMap().get("track")).get("EXECUTE_QUERY")).get(badShard);
assertEquals("Unexpected response size for shard", 1, badShardTrack.size());
Entry<String, String> exception = badShardTrack.iterator().next();
assertEquals("Expected key 'Exception' not found", "Exception", exception.getKey());
assertTrue("Unexpected exception message", exception.getValue().contains("Server refused connection"));
unIgnoreException("Server refused connection");
}
/** /**
* Compares the same section on the two query responses * Compares the same section on the two query responses
*/ */