Merge branch 'apache-https-master' into jira/solr-8593

This commit is contained in:
Kevin Risden 2016-10-27 15:08:46 -05:00
commit 1cd8da9863
204 changed files with 6623 additions and 1566 deletions

View File

@ -28,6 +28,7 @@
</orderEntry>
<orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
<orderEntry type="module" scope="TEST" module-name="solr-test-framework" />
<orderEntry type="module" module-name="lucene-core" />
<orderEntry type="module" module-name="solr-core" />
<orderEntry type="module" module-name="solrj" />
</component>

View File

@ -45,6 +45,9 @@ Optimizations
that have a facet value, so sparse faceting works as expected
(Adrien Grand via Mike McCandless)
* LUCENE-7519: Add optimized APIs to compute browse-only top level
facets (Mike McCandless)
Other
* LUCENE-7328: Remove LegacyNumericEncoding from GeoPointField. (Nick Knize)
@ -104,6 +107,9 @@ Bug Fixes
allTermsRequired is false and context filters are specified (Mike
McCandless)
* LUCENE-7429: AnalyzerWrapper can now modify the normalization chain too and
DelegatingAnalyzerWrapper does the right thing automatically. (Adrien Grand)
Improvements
* LUCENE-7439: FuzzyQuery now matches all terms within the specified
@ -111,6 +117,10 @@ Improvements
* LUCENE-7496: Better toString for SweetSpotSimilarity (janhoy)
* LUCENE-7520: Highlighter's WeightedSpanTermExtractor shouldn't attempt to expand a MultiTermQuery
when its field doesn't match the field the extraction is scoped to.
(Cao Manh Dat via David Smiley)
Optimizations
* LUCENE-7501: BKDReader should not store the split dimension explicitly in the
@ -118,6 +128,8 @@ Optimizations
Other
* LUCENE-7513: Upgrade randomizedtesting to 2.4.0. (Dawid Weiss)
* LUCENE-7452: Block join query exception suggests how to find a doc, which
violates orthogonality requirement. (Mikhail Khludnev)

View File

@ -131,7 +131,7 @@ public final class CustomAnalyzer extends Analyzer {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer tk = tokenizer.create(attributeFactory());
final Tokenizer tk = tokenizer.create(attributeFactory(fieldName));
TokenStream ts = tk;
for (final TokenFilterFactory filter : tokenFilters) {
ts = filter.create(ts);

View File

@ -85,7 +85,7 @@ public final class CollationKeyAnalyzer extends Analyzer {
}
@Override
protected AttributeFactory attributeFactory() {
protected AttributeFactory attributeFactory(String fieldName) {
return factory;
}

View File

@ -221,6 +221,12 @@ class Lucene53NormsProducer extends NormsProducer {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return true;
}
@Override
public long cost() {
// TODO

View File

@ -476,6 +476,12 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return true;
}
@Override
public long cost() {
// TODO
@ -523,6 +529,13 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
return doc = NO_MORE_DOCS;
}
@Override
public boolean advanceExact(int target) throws IOException {
doc = target;
value = values.get(doc);
return value != 0 || docsWithField.get(doc);
}
@Override
public long cost() {
return maxDoc;
@ -695,6 +708,16 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
return doc = hiDoc;
}
@Override
public boolean advanceExact(int target) throws IOException {
if (advance(target) == target) {
return true;
}
--index;
doc = target;
return false;
}
@Override
public long longValue() {
assert index >= 0;
@ -890,6 +913,11 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
return sparseValues.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return sparseValues.advanceExact(target);
}
@Override
public long cost() {
return sparseValues.cost();
@ -934,6 +962,13 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
ord = (int) ordinals.get(target);
return ord != -1;
}
@Override
public int ordValue() {
return ord;
@ -1016,6 +1051,11 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
return sparseValues.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return sparseValues.advanceExact(target);
}
@Override
public long cost() {
return sparseValues.cost();
@ -1060,6 +1100,12 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return docsWithField.get(docID);
}
@Override
public long cost() {
// TODO
@ -1121,6 +1167,14 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
startOffset = ordIndex.get(docID);
endOffset = ordIndex.get(docID+1L);
return endOffset > startOffset;
}
@Override
public long cost() {
// TODO
@ -1184,6 +1238,15 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
}
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
int ord = (int) ordinals.get(docID);
startOffset = offsets[ord];
endOffset = offsets[ord+1];
return endOffset > startOffset;
}
@Override
public long cost() {
// TODO

View File

@ -106,7 +106,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
public void testSortedVariableLengthBigVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedVsStoredFields(atLeast(300), 1, 32766);
doTestSortedVsStoredFields(atLeast(300), 1d, 1, 32766);
}
}
@ -114,7 +114,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
public void testSortedVariableLengthManyVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1, 500);
doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1d, 1, 500);
}
}
@ -201,6 +201,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
}
final IndexReader indexReader = writer.getReader();
TestUtil.checkReader(indexReader);
writer.close();
for (LeafReaderContext context : indexReader.leaves()) {

View File

@ -144,7 +144,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
if (values == null) {
return null;
} else {
DocIdSetIterator docsWithField = getNumericDocsWithField(fieldInfo);
DocValuesIterator docsWithField = getNumericDocsWithField(fieldInfo);
return new NumericDocValues() {
@Override
@ -167,6 +167,11 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
return docsWithField.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return docsWithField.advanceExact(target);
}
@Override
public long longValue() throws IOException {
return values.apply(docsWithField.docID());
@ -215,11 +220,15 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
};
}
private DocIdSetIterator getNumericDocsWithField(FieldInfo fieldInfo) throws IOException {
private static abstract class DocValuesIterator extends DocIdSetIterator {
abstract boolean advanceExact(int target) throws IOException;
}
private DocValuesIterator getNumericDocsWithField(FieldInfo fieldInfo) throws IOException {
final OneField field = fields.get(fieldInfo.name);
final IndexInput in = data.clone();
final BytesRefBuilder scratch = new BytesRefBuilder();
return new DocIdSetIterator() {
return new DocValuesIterator() {
int doc = -1;
@ -250,6 +259,15 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
}
return doc = NO_MORE_DOCS;
}
@Override
boolean advanceExact(int target) throws IOException {
this.doc = target;
in.seek(field.dataStartFilePointer + (1+field.pattern.length()+2)*target);
SimpleTextUtil.readLine(in, scratch); // data
SimpleTextUtil.readLine(in, scratch); // 'T' or 'F'
return scratch.byteAt(0) == (byte) 'T';
}
};
}
@ -265,7 +283,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
final BytesRefBuilder scratch = new BytesRefBuilder();
final DecimalFormat decoder = new DecimalFormat(field.pattern, new DecimalFormatSymbols(Locale.ROOT));
DocIdSetIterator docsWithField = getBinaryDocsWithField(fieldInfo);
DocValuesIterator docsWithField = getBinaryDocsWithField(fieldInfo);
IntFunction<BytesRef> values = new IntFunction<BytesRef>() {
final BytesRefBuilder term = new BytesRefBuilder();
@ -316,6 +334,11 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
return docsWithField.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return docsWithField.advanceExact(target);
}
@Override
public BytesRef binaryValue() throws IOException {
return values.apply(docsWithField.docID());
@ -323,13 +346,13 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
};
}
private DocIdSetIterator getBinaryDocsWithField(FieldInfo fieldInfo) throws IOException {
private DocValuesIterator getBinaryDocsWithField(FieldInfo fieldInfo) throws IOException {
final OneField field = fields.get(fieldInfo.name);
final IndexInput in = data.clone();
final BytesRefBuilder scratch = new BytesRefBuilder();
final DecimalFormat decoder = new DecimalFormat(field.pattern, new DecimalFormatSymbols(Locale.ROOT));
return new DocIdSetIterator() {
return new DocValuesIterator() {
int doc = -1;
@ -371,6 +394,26 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
}
return doc = NO_MORE_DOCS;
}
@Override
boolean advanceExact(int target) throws IOException {
this.doc = target;
in.seek(field.dataStartFilePointer + (9+field.pattern.length() + field.maxLength+2)*target);
SimpleTextUtil.readLine(in, scratch);
assert StringHelper.startsWith(scratch.get(), LENGTH);
int len;
try {
len = decoder.parse(new String(scratch.bytes(), LENGTH.length, scratch.length() - LENGTH.length, StandardCharsets.UTF_8)).intValue();
} catch (ParseException pe) {
throw new CorruptIndexException("failed to parse int length", in, pe);
}
// skip past bytes
byte bytes[] = new byte[len];
in.readBytes(bytes, 0, len);
SimpleTextUtil.readLine(in, scratch); // newline
SimpleTextUtil.readLine(in, scratch); // 'T' or 'F'
return scratch.byteAt(0) == (byte) 'T';
}
};
}
@ -425,6 +468,19 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
return doc = NO_MORE_DOCS;
}
@Override
public boolean advanceExact(int target) throws IOException {
this.doc = target;
in.seek(field.dataStartFilePointer + field.numValues * (9 + field.pattern.length() + field.maxLength) + target * (1 + field.ordPattern.length()));
SimpleTextUtil.readLine(in, scratch);
try {
ord = (int) ordDecoder.parse(scratch.get().utf8ToString()).longValue()-1;
} catch (ParseException pe) {
throw new CorruptIndexException("failed to parse ord", in, pe);
}
return ord >= 0;
}
@Override
public int ordValue() {
return ord;
@ -488,6 +544,15 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
return doc;
}
@Override
public boolean advanceExact(int target) throws IOException {
if (binary.advanceExact(target)) {
setCurrentDoc();
return true;
}
return false;
}
long values[];
int index;
@ -569,6 +634,20 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
return doc = NO_MORE_DOCS;
}
@Override
public boolean advanceExact(int target) throws IOException {
in.seek(field.dataStartFilePointer + field.numValues * (9 + field.pattern.length() + field.maxLength) + target * (1 + field.ordPattern.length()));
SimpleTextUtil.readLine(in, scratch);
String ordList = scratch.get().utf8ToString().trim();
doc = target;
if (ordList.isEmpty() == false) {
currentOrds = ordList.split(",");
currentIndex = 0;
return true;
}
return false;
}
@Override
public long nextOrd() throws IOException {
if (currentIndex == currentOrds.length) {

View File

@ -342,6 +342,15 @@ class SimpleTextDocValuesWriter extends DocValuesConsumer {
return doc;
}
@Override
public boolean advanceExact(int target) throws IOException {
if (values.advanceExact(target)) {
setCurrentDoc();
return true;
}
return false;
}
final StringBuilder builder = new StringBuilder();
BytesRef binaryValue;

View File

@ -238,7 +238,7 @@ public abstract class Analyzer implements Closeable {
throw new IllegalStateException("Normalization threw an unexpected exeption", e);
}
final AttributeFactory attributeFactory = attributeFactory();
final AttributeFactory attributeFactory = attributeFactory(fieldName);
try (TokenStream ts = normalize(fieldName,
new StringTokenStream(attributeFactory, filteredText, text.length()))) {
final TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
@ -286,9 +286,10 @@ public abstract class Analyzer implements Closeable {
/** Return the {@link AttributeFactory} to be used for
* {@link #tokenStream analysis} and
* {@link #normalize(String, String) normalization}. The default
* implementation returns {@link TokenStream#DEFAULT_TOKEN_ATTRIBUTE_FACTORY}. */
protected AttributeFactory attributeFactory() {
* {@link #normalize(String, String) normalization} on the given
* {@code FieldName}. The default implementation returns
* {@link TokenStream#DEFAULT_TOKEN_ATTRIBUTE_FACTORY}. */
protected AttributeFactory attributeFactory(String fieldName) {
return TokenStream.DEFAULT_TOKEN_ATTRIBUTE_FACTORY;
}

View File

@ -19,6 +19,8 @@ package org.apache.lucene.analysis;
import java.io.Reader;
import org.apache.lucene.util.AttributeFactory;
/**
* Extension to {@link Analyzer} suitable for Analyzers which wrap
* other Analyzers.
@ -81,6 +83,22 @@ public abstract class AnalyzerWrapper extends Analyzer {
return components;
}
/**
* Wraps / alters the given TokenStream for normalization purposes, taken
* from the wrapped Analyzer, to form new components. It is through this
* method that new TokenFilters can be added by AnalyzerWrappers. By default,
* the given token stream are returned.
*
* @param fieldName
* Name of the field which is to be analyzed
* @param in
* TokenStream taken from the wrapped Analyzer
* @return Wrapped / altered TokenStreamComponents.
*/
protected TokenStream wrapTokenStreamForNormalization(String fieldName, TokenStream in) {
return in;
}
/**
* Wraps / alters the given Reader. Through this method AnalyzerWrappers can
* implement {@link #initReader(String, Reader)}. By default, the given reader
@ -96,11 +114,31 @@ public abstract class AnalyzerWrapper extends Analyzer {
return reader;
}
/**
* Wraps / alters the given Reader. Through this method AnalyzerWrappers can
* implement {@link #initReaderForNormalization(String, Reader)}. By default,
* the given reader is returned.
*
* @param fieldName
* name of the field which is to be analyzed
* @param reader
* the reader to wrap
* @return the wrapped reader
*/
protected Reader wrapReaderForNormalization(String fieldName, Reader reader) {
return reader;
}
@Override
protected final TokenStreamComponents createComponents(String fieldName) {
return wrapComponents(fieldName, getWrappedAnalyzer(fieldName).createComponents(fieldName));
}
@Override
protected final TokenStream normalize(String fieldName, TokenStream in) {
return wrapTokenStreamForNormalization(fieldName, getWrappedAnalyzer(fieldName).normalize(fieldName, in));
}
@Override
public int getPositionIncrementGap(String fieldName) {
return getWrappedAnalyzer(fieldName).getPositionIncrementGap(fieldName);
@ -115,4 +153,14 @@ public abstract class AnalyzerWrapper extends Analyzer {
public final Reader initReader(String fieldName, Reader reader) {
return getWrappedAnalyzer(fieldName).initReader(fieldName, wrapReader(fieldName, reader));
}
@Override
protected final Reader initReaderForNormalization(String fieldName, Reader reader) {
return getWrappedAnalyzer(fieldName).initReaderForNormalization(fieldName, wrapReaderForNormalization(fieldName, reader));
}
@Override
protected final AttributeFactory attributeFactory(String fieldName) {
return getWrappedAnalyzer(fieldName).attributeFactory(fieldName);
}
}

View File

@ -55,11 +55,21 @@ public abstract class DelegatingAnalyzerWrapper extends AnalyzerWrapper {
return super.wrapComponents(fieldName, components);
}
@Override
protected final TokenStream wrapTokenStreamForNormalization(String fieldName, TokenStream in) {
return super.wrapTokenStreamForNormalization(fieldName, in);
}
@Override
protected final Reader wrapReader(String fieldName, Reader reader) {
return super.wrapReader(fieldName, reader);
}
@Override
protected final Reader wrapReaderForNormalization(String fieldName, Reader reader) {
return super.wrapReaderForNormalization(fieldName, reader);
}
private static final class DelegatingReuseStrategy extends ReuseStrategy {
DelegatingAnalyzerWrapper wrapper;
private final ReuseStrategy fallbackStrategy;

View File

@ -227,6 +227,11 @@ public abstract class DocValuesConsumer implements Closeable {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return finalCost;
@ -319,6 +324,11 @@ public abstract class DocValuesConsumer implements Closeable {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return finalCost;
@ -416,6 +426,11 @@ public abstract class DocValuesConsumer implements Closeable {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int docValueCount() {
return currentSub.values.docValueCount();
@ -574,6 +589,11 @@ public abstract class DocValuesConsumer implements Closeable {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return finalCost;
@ -731,6 +751,11 @@ public abstract class DocValuesConsumer implements Closeable {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long nextOrd() throws IOException {
long subOrd = currentSub.values.nextOrd();

View File

@ -157,6 +157,11 @@ public abstract class NormsConsumer implements Closeable {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return 0;

View File

@ -112,6 +112,9 @@ final class IndexedDISI extends DocIdSetIterator {
private int doc = -1;
private int index = -1;
// SPARSE variables
boolean exists;
// DENSE variables
private long word;
private int wordIndex = -1;
@ -129,7 +132,7 @@ final class IndexedDISI extends DocIdSetIterator {
@Override
public int advance(int target) throws IOException {
final int targetBlock = target & 0xFFFF0000;
if (block != targetBlock) {
if (block < targetBlock) {
advanceBlock(targetBlock);
}
if (block == targetBlock) {
@ -138,7 +141,19 @@ final class IndexedDISI extends DocIdSetIterator {
}
readBlockHeader();
}
return doc = method.readFirstDoc(this);
boolean found = method.advanceWithinBlock(this, block);
assert found;
return doc;
}
public boolean advanceExact(int target) throws IOException {
final int targetBlock = target & 0xFFFF0000;
if (block < targetBlock) {
advanceBlock(targetBlock);
}
boolean found = block == targetBlock && method.advanceExactWithinBlock(this, target);
this.doc = target;
return found;
}
private void advanceBlock(int targetBlock) throws IOException {
@ -185,11 +200,6 @@ final class IndexedDISI extends DocIdSetIterator {
enum Method {
SPARSE {
@Override
int readFirstDoc(IndexedDISI disi) throws IOException {
disi.index++;
return disi.block | Short.toUnsignedInt(disi.slice.readShort());
}
@Override
boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException {
final int targetInBlock = target & 0xFFFF;
@ -199,23 +209,37 @@ final class IndexedDISI extends DocIdSetIterator {
disi.index++;
if (doc >= targetInBlock) {
disi.doc = disi.block | doc;
disi.exists = true;
return true;
}
}
return false;
}
@Override
boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException {
final int targetInBlock = target & 0xFFFF;
// TODO: binary search
if (target == disi.doc) {
return disi.exists;
}
for (; disi.index < disi.nextBlockIndex;) {
int doc = Short.toUnsignedInt(disi.slice.readShort());
disi.index++;
if (doc >= targetInBlock) {
if (doc != targetInBlock) {
disi.index--;
disi.slice.seek(disi.slice.getFilePointer() - Short.BYTES);
break;
}
disi.exists = true;
return true;
}
}
disi.exists = false;
return false;
}
},
DENSE {
@Override
int readFirstDoc(IndexedDISI disi) throws IOException {
do {
disi.word = disi.slice.readLong();
disi.wordIndex++;
} while (disi.word == 0L);
disi.index = disi.numberOfOnes;
disi.numberOfOnes += Long.bitCount(disi.word);
return disi.block | (disi.wordIndex << 6) | Long.numberOfTrailingZeros(disi.word);
}
@Override
boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException {
final int targetInBlock = target & 0xFFFF;
@ -244,26 +268,42 @@ final class IndexedDISI extends DocIdSetIterator {
}
return false;
}
@Override
boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException {
final int targetInBlock = target & 0xFFFF;
final int targetWordIndex = targetInBlock >>> 6;
for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) {
disi.word = disi.slice.readLong();
disi.numberOfOnes += Long.bitCount(disi.word);
}
disi.wordIndex = targetWordIndex;
long leftBits = disi.word >>> target;
disi.index = disi.numberOfOnes - Long.bitCount(leftBits);
return (leftBits & 1L) != 0;
}
},
ALL {
@Override
int readFirstDoc(IndexedDISI disi) {
return disi.block;
}
@Override
boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException {
disi.doc = target;
disi.index = target - disi.gap;
return true;
}
@Override
boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException {
disi.index = target - disi.gap;
return true;
}
};
/** Read the first document of the current block. */
abstract int readFirstDoc(IndexedDISI disi) throws IOException;
/** Advance to the first doc from the block that is equal to or greater than {@code target}.
* Return true if there is such a doc and false otherwise. */
abstract boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException;
/** Advance the iterator exactly to the position corresponding to the given {@code target}
* and return whether this document exists. */
abstract boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException;
}
}

View File

@ -340,77 +340,147 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
return getNumeric(entry);
}
private static abstract class DenseNumericDocValues extends NumericDocValues {
final int maxDoc;
int doc = -1;
DenseNumericDocValues(int maxDoc) {
this.maxDoc = maxDoc;
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() throws IOException {
return advance(doc + 1);
}
@Override
public int advance(int target) throws IOException {
if (target >= maxDoc) {
return doc = NO_MORE_DOCS;
}
return doc = target;
}
@Override
public boolean advanceExact(int target) {
doc = target;
return true;
}
@Override
public long cost() {
return maxDoc;
}
}
private static abstract class SparseNumericDocValues extends NumericDocValues {
final IndexedDISI disi;
SparseNumericDocValues(IndexedDISI disi) {
this.disi = disi;
}
@Override
public int advance(int target) throws IOException {
return disi.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return disi.advanceExact(target);
}
@Override
public int nextDoc() throws IOException {
return disi.nextDoc();
}
@Override
public int docID() {
return disi.docID();
}
@Override
public long cost() {
return disi.cost();
}
}
private NumericDocValues getNumeric(NumericEntry entry) throws IOException {
if (entry.docsWithFieldOffset == -2) {
// empty
return DocValues.emptyNumeric();
} else if (entry.docsWithFieldOffset == -1) {
// dense
final LongValues normValues = getNumericValues(entry);
return new NumericDocValues() {
int doc = -1;
@Override
public long longValue() throws IOException {
return normValues.get(doc);
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() throws IOException {
return advance(doc + 1);
}
@Override
public int advance(int target) throws IOException {
if (target >= maxDoc) {
return doc = NO_MORE_DOCS;
if (entry.bitsPerValue == 0) {
return new DenseNumericDocValues(maxDoc) {
@Override
public long longValue() throws IOException {
return entry.minValue;
}
return doc = target;
};
} else {
final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength);
final LongValues values = DirectReader.getInstance(slice, entry.bitsPerValue);
if (entry.table != null) {
final long[] table = entry.table;
return new DenseNumericDocValues(maxDoc) {
@Override
public long longValue() throws IOException {
return table[(int) values.get(doc)];
}
};
} else {
final long mul = entry.gcd;
final long delta = entry.minValue;
return new DenseNumericDocValues(maxDoc) {
@Override
public long longValue() throws IOException {
return mul * values.get(doc) + delta;
}
};
}
@Override
public long cost() {
return maxDoc;
}
};
}
} else {
// sparse
final LongValues values = getNumericValues(entry);
final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numValues);
return new NumericDocValues() {
@Override
public int advance(int target) throws IOException {
return disi.advance(target);
if (entry.bitsPerValue == 0) {
return new SparseNumericDocValues(disi) {
@Override
public long longValue() throws IOException {
return entry.minValue;
}
};
} else {
final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength);
final LongValues values = DirectReader.getInstance(slice, entry.bitsPerValue);
if (entry.table != null) {
final long[] table = entry.table;
return new SparseNumericDocValues(disi) {
@Override
public long longValue() throws IOException {
return table[(int) values.get(disi.index())];
}
};
} else {
final long mul = entry.gcd;
final long delta = entry.minValue;
return new SparseNumericDocValues(disi) {
@Override
public long longValue() throws IOException {
return mul * values.get(disi.index()) + delta;
}
};
}
@Override
public int nextDoc() throws IOException {
return disi.nextDoc();
}
@Override
public int docID() {
return disi.docID();
}
@Override
public long cost() {
return disi.cost();
}
@Override
public long longValue() throws IOException {
return values.get(disi.index());
}
};
}
}
}
@ -456,6 +526,79 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
}
}
private static abstract class DenseBinaryDocValues extends BinaryDocValues {
final int maxDoc;
int doc = -1;
DenseBinaryDocValues(int maxDoc) {
this.maxDoc = maxDoc;
}
@Override
public int nextDoc() throws IOException {
return advance(doc + 1);
}
@Override
public int docID() {
return doc;
}
@Override
public long cost() {
return maxDoc;
}
@Override
public int advance(int target) throws IOException {
if (target >= maxDoc) {
return doc = NO_MORE_DOCS;
}
return doc = target;
}
@Override
public boolean advanceExact(int target) throws IOException {
doc = target;
return true;
}
}
private static abstract class SparseBinaryDocValues extends BinaryDocValues {
final IndexedDISI disi;
SparseBinaryDocValues(IndexedDISI disi) {
this.disi = disi;
}
@Override
public int nextDoc() throws IOException {
return disi.nextDoc();
}
@Override
public int docID() {
return disi.docID();
}
@Override
public long cost() {
return disi.cost();
}
@Override
public int advance(int target) throws IOException {
return disi.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return disi.advanceExact(target);
}
}
@Override
public BinaryDocValues getBinary(FieldInfo field) throws IOException {
BinaryEntry entry = binaries.get(field.name);
@ -463,105 +606,77 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
return DocValues.emptyBinary();
}
IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength);
BytesRefs bytesRefs;
if (entry.minLength == entry.maxLength) {
bytesRefs = new BytesRefs() {
BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength);
@Override
public BytesRef get(int index) throws IOException {
bytesSlice.seek((long) index * bytes.length);
bytesSlice.readBytes(bytes.bytes, 0, bytes.length);
return bytes;
}
};
} else {
final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength);
final LongValues addresses = DirectMonotonicReader.getInstance(entry.addressesMeta, addressesData);
bytesRefs = new BytesRefs() {
BytesRef bytes = new BytesRef(entry.maxLength);
@Override
BytesRef get(int index) throws IOException {
long startOffset = addresses.get(index);
bytes.length = (int) (addresses.get(index + 1L) - startOffset);
bytesSlice.seek(startOffset);
bytesSlice.readBytes(bytes.bytes, 0, bytes.length);
return bytes;
}
};
}
final IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength);
if (entry.docsWithFieldOffset == -1) {
// dense
return new BinaryDocValues() {
if (entry.minLength == entry.maxLength) {
// fixed length
final int length = entry.maxLength;
return new DenseBinaryDocValues(maxDoc) {
final BytesRef bytes = new BytesRef(new byte[length], 0, length);
int doc = -1;
@Override
public int nextDoc() throws IOException {
return advance(doc + 1);
}
@Override
public int docID() {
return doc;
}
@Override
public long cost() {
return maxDoc;
}
@Override
public int advance(int target) throws IOException {
if (target >= maxDoc) {
return doc = NO_MORE_DOCS;
@Override
public BytesRef binaryValue() throws IOException {
bytesSlice.seek((long) doc * length);
bytesSlice.readBytes(bytes.bytes, 0, length);
return bytes;
}
return doc = target;
}
};
} else {
// variable length
final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength);
final LongValues addresses = DirectMonotonicReader.getInstance(entry.addressesMeta, addressesData);
return new DenseBinaryDocValues(maxDoc) {
final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength);
@Override
public BytesRef binaryValue() throws IOException {
return bytesRefs.get(doc);
}
};
@Override
public BytesRef binaryValue() throws IOException {
long startOffset = addresses.get(doc);
bytes.length = (int) (addresses.get(doc + 1L) - startOffset);
bytesSlice.seek(startOffset);
bytesSlice.readBytes(bytes.bytes, 0, bytes.length);
return bytes;
}
};
}
} else {
// sparse
final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField);
return new BinaryDocValues() {
if (entry.minLength == entry.maxLength) {
// fixed length
final int length = entry.maxLength;
return new SparseBinaryDocValues(disi) {
final BytesRef bytes = new BytesRef(new byte[length], 0, length);
@Override
public int nextDoc() throws IOException {
return disi.nextDoc();
}
@Override
public BytesRef binaryValue() throws IOException {
bytesSlice.seek((long) disi.index() * length);
bytesSlice.readBytes(bytes.bytes, 0, length);
return bytes;
}
};
} else {
// variable length
final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength);
final LongValues addresses = DirectMonotonicReader.getInstance(entry.addressesMeta, addressesData);
return new SparseBinaryDocValues(disi) {
final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength);
@Override
public int docID() {
return disi.docID();
}
@Override
public long cost() {
return disi.cost();
}
@Override
public int advance(int target) throws IOException {
return disi.advance(target);
}
@Override
public BytesRef binaryValue() throws IOException {
return bytesRefs.get(disi.index());
}
};
@Override
public BytesRef binaryValue() throws IOException {
final int index = disi.index();
long startOffset = addresses.get(index);
bytes.length = (int) (addresses.get(index + 1L) - startOffset);
bytesSlice.seek(startOffset);
bytesSlice.readBytes(bytes.bytes, 0, bytes.length);
return bytes;
}
};
}
}
}
private static abstract class BytesRefs {
abstract BytesRef get(int index) throws IOException;
}
@Override
public SortedDocValues getSorted(FieldInfo field) throws IOException {
SortedEntry entry = sorted.get(field.name);
@ -615,6 +730,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
return doc = target;
}
@Override
public boolean advanceExact(int target) {
doc = target;
return true;
}
@Override
public int ordValue() {
return (int) ords.get(doc);
@ -645,6 +766,11 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
return disi.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return disi.advanceExact(target);
}
@Override
public int ordValue() {
return (int) ords.get(disi.index());
@ -959,6 +1085,15 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
return doc = target;
}
@Override
public boolean advanceExact(int target) throws IOException {
start = addresses.get(target);
end = addresses.get(target + 1L);
count = (int) (end - start);
doc = target;
return true;
}
@Override
public long nextValue() throws IOException {
return values.get(start++);
@ -1000,6 +1135,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
return disi.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
set = false;
return disi.advanceExact(target);
}
@Override
public long nextValue() throws IOException {
set();
@ -1072,6 +1213,14 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
return doc = target;
}
@Override
public boolean advanceExact(int target) throws IOException {
start = addresses.get(target);
end = addresses.get(target + 1L);
doc = target;
return true;
}
@Override
public long nextOrd() throws IOException {
if (start == end) {
@ -1112,6 +1261,12 @@ final class Lucene70DocValuesProducer extends DocValuesProducer implements Close
return disi.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
set = false;
return disi.advanceExact(target);
}
@Override
public long nextOrd() throws IOException {
if (set == false) {

View File

@ -95,8 +95,78 @@ final class Lucene70NormsProducer extends NormsProducer {
long normsOffset;
}
static abstract class LongValues {
abstract long get(int index) throws IOException;
static abstract class DenseNormsIterator extends NumericDocValues {
final int maxDoc;
int doc = -1;
DenseNormsIterator(int maxDoc) {
this.maxDoc = maxDoc;
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() throws IOException {
return advance(doc + 1);
}
@Override
public int advance(int target) throws IOException {
if (target >= maxDoc) {
return doc = NO_MORE_DOCS;
}
return doc = target;
}
@Override
public boolean advanceExact(int target) throws IOException {
this.doc = target;
return true;
}
@Override
public long cost() {
return maxDoc;
}
}
static abstract class SparseNormsIterator extends NumericDocValues {
final IndexedDISI disi;
SparseNormsIterator(IndexedDISI disi) {
this.disi = disi;
}
@Override
public int docID() {
return disi.docID();
}
@Override
public int nextDoc() throws IOException {
return disi.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return disi.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return disi.advanceExact(target);
}
@Override
public long cost() {
return disi.cost();
}
}
private void readFields(IndexInput meta, FieldInfos infos) throws IOException {
@ -131,111 +201,87 @@ final class Lucene70NormsProducer extends NormsProducer {
return DocValues.emptyNumeric();
} else if (entry.docsWithFieldOffset == -1) {
// dense
final LongValues normValues = getNormValues(entry);
return new NumericDocValues() {
int doc = -1;
@Override
public long longValue() throws IOException {
return normValues.get(doc);
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() throws IOException {
return advance(doc + 1);
}
@Override
public int advance(int target) throws IOException {
if (target >= maxDoc) {
return doc = NO_MORE_DOCS;
if (entry.bytesPerNorm == 0) {
return new DenseNormsIterator(maxDoc) {
@Override
public long longValue() throws IOException {
return entry.normsOffset;
}
return doc = target;
}
@Override
public long cost() {
return maxDoc;
}
};
} else {
// sparse
final LongValues normValues = getNormValues(entry);
final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField);
return new NumericDocValues() {
@Override
public int advance(int target) throws IOException {
return disi.advance(target);
}
@Override
public int nextDoc() throws IOException {
return disi.nextDoc();
}
@Override
public int docID() {
return disi.docID();
}
@Override
public long cost() {
return entry.numDocsWithField;
}
@Override
public long longValue() throws IOException {
return normValues.get(disi.index());
}
};
}
}
private LongValues getNormValues(NormsEntry entry) throws IOException {
if (entry.bytesPerNorm == 0) {
return new LongValues() {
@Override
long get(int index) {
return entry.normsOffset;
}
};
} else {
};
}
final RandomAccessInput slice = data.randomAccessSlice(entry.normsOffset, entry.numDocsWithField * (long) entry.bytesPerNorm);
switch (entry.bytesPerNorm) {
case 1:
return new LongValues() {
return new DenseNormsIterator(maxDoc) {
@Override
long get(int index) throws IOException {
return slice.readByte(index);
public long longValue() throws IOException {
return slice.readByte(doc);
}
};
case 2:
return new LongValues() {
return new DenseNormsIterator(maxDoc) {
@Override
long get(int index) throws IOException {
return slice.readShort(((long) index) << 1);
public long longValue() throws IOException {
return slice.readShort(((long) doc) << 1);
}
};
case 4:
return new LongValues() {
return new DenseNormsIterator(maxDoc) {
@Override
long get(int index) throws IOException {
return slice.readInt(((long) index) << 2);
public long longValue() throws IOException {
return slice.readInt(((long) doc) << 2);
}
};
case 8:
return new LongValues() {
return new DenseNormsIterator(maxDoc) {
@Override
long get(int index) throws IOException {
return slice.readLong(((long) index) << 3);
public long longValue() throws IOException {
return slice.readLong(((long) doc) << 3);
}
};
default:
// should not happen, we already validate bytesPerNorm in readFields
throw new AssertionError();
}
} else {
// sparse
final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField);
if (entry.bytesPerNorm == 0) {
return new SparseNormsIterator(disi) {
@Override
public long longValue() throws IOException {
return entry.normsOffset;
}
};
}
final RandomAccessInput slice = data.randomAccessSlice(entry.normsOffset, entry.numDocsWithField * (long) entry.bytesPerNorm);
switch (entry.bytesPerNorm) {
case 1:
return new SparseNormsIterator(disi) {
@Override
public long longValue() throws IOException {
return slice.readByte(disi.index());
}
};
case 2:
return new SparseNormsIterator(disi) {
@Override
public long longValue() throws IOException {
return slice.readShort(((long) disi.index()) << 1);
}
};
case 4:
return new SparseNormsIterator(disi) {
@Override
public long longValue() throws IOException {
return slice.readInt(((long) disi.index()) << 2);
}
};
case 8:
return new SparseNormsIterator(disi) {
@Override
public long longValue() throws IOException {
return slice.readLong(((long) disi.index()) << 3);
}
};
default:

View File

@ -19,13 +19,12 @@ package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BytesRef;
/**
* A per-document numeric value.
*/
public abstract class BinaryDocValues extends DocIdSetIterator {
public abstract class BinaryDocValues extends DocValuesIterator {
/** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */
@ -33,6 +32,8 @@ public abstract class BinaryDocValues extends DocIdSetIterator {
/**
* Returns the binary value for the current document ID.
* It is illegal to call this method after {@link #advanceExact(int)}
* returned {@code false}.
* @return binary value
*/
public abstract BytesRef binaryValue() throws IOException;

View File

@ -153,6 +153,11 @@ class BinaryDocValuesWriter extends DocValuesWriter {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return docsWithField.cost();

View File

@ -2062,13 +2062,83 @@ public final class CheckIndex implements Closeable {
return status;
}
@FunctionalInterface
private static interface DocValuesIteratorSupplier {
DocValuesIterator get(FieldInfo fi) throws IOException;
}
private static void checkDVIterator(FieldInfo fi, int maxDoc, DocValuesIteratorSupplier producer) throws IOException {
String field = fi.name;
// Check advance
DocValuesIterator it1 = producer.get(fi);
DocValuesIterator it2 = producer.get(fi);
int i = 0;
for (int doc = it1.nextDoc(); ; doc = it1.nextDoc()) {
if (i++ % 10 == 1) {
int doc2 = it2.advance(doc - 1);
if (doc2 < doc - 1) {
throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " went backwords (got: " + doc2 + ")");
}
if (doc2 == doc - 1) {
doc2 = it2.nextDoc();
}
if (doc2 != doc) {
throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " was not found through advance() (got: " + doc2 + ")");
}
if (it2.docID() != doc) {
throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " reports wrong doc ID (got: " + it2.docID() + ")");
}
}
if (doc == NO_MORE_DOCS) {
break;
}
}
// Check advanceExact
it1 = producer.get(fi);
it2 = producer.get(fi);
i = 0;
int lastDoc = -1;
for (int doc = it1.nextDoc(); doc != NO_MORE_DOCS ; doc = it1.nextDoc()) {
if (i++ % 13 == 1) {
boolean found = it2.advanceExact(doc - 1);
if ((doc - 1 == lastDoc) != found) {
throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " disagrees about whether document exists (got: " + found + ")");
}
if (it2.docID() != doc - 1) {
throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " reports wrong doc ID (got: " + it2.docID() + ")");
}
boolean found2 = it2.advanceExact(doc - 1);
if (found != found2) {
throw new RuntimeException("dv iterator field=" + field + ": doc=" + (doc-1) + " has unstable advanceExact");
}
if (i % 1 == 0) {
int doc2 = it2.nextDoc();
if (doc != doc2) {
throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " was not found through advance() (got: " + doc2 + ")");
}
if (it2.docID() != doc) {
throw new RuntimeException("dv iterator field=" + field + ": doc=" + doc + " reports wrong doc ID (got: " + it2.docID() + ")");
}
}
}
lastDoc = doc;
}
}
private static void checkBinaryDocValues(String fieldName, int maxDoc, BinaryDocValues bdv) throws IOException {
int doc;
if (bdv.docID() != -1) {
throw new RuntimeException("binary dv iterator for field: " + fieldName + " should start at docID=-1, but got " + bdv.docID());
}
// TODO: we could add stats to DVs, e.g. total doc count w/ a value for this field
// TODO: check advance too
while ((doc = bdv.nextDoc()) != NO_MORE_DOCS) {
BytesRef value = bdv.binaryValue();
value.isValid();
@ -2083,7 +2153,6 @@ public final class CheckIndex implements Closeable {
FixedBitSet seenOrds = new FixedBitSet(dv.getValueCount());
int maxOrd2 = -1;
int docID;
// TODO: check advance too
while ((docID = dv.nextDoc()) != NO_MORE_DOCS) {
int ord = dv.ordValue();
if (ord == -1) {
@ -2119,7 +2188,6 @@ public final class CheckIndex implements Closeable {
LongBitSet seenOrds = new LongBitSet(dv.getValueCount());
long maxOrd2 = -1;
int docID;
// TODO: check advance too
while ((docID = dv.nextDoc()) != NO_MORE_DOCS) {
long lastOrd = -1;
long ord;
@ -2164,7 +2232,6 @@ public final class CheckIndex implements Closeable {
if (ndv.docID() != -1) {
throw new RuntimeException("dv iterator for field: " + fieldName + " should start at docID=-1, but got " + ndv.docID());
}
// TODO: check advance too
while (true) {
int docID = ndv.nextDoc();
if (docID == NO_MORE_DOCS) {
@ -2191,7 +2258,6 @@ public final class CheckIndex implements Closeable {
throw new RuntimeException("dv iterator for field: " + fieldName + " should start at docID=-1, but got " + ndv.docID());
}
// TODO: we could add stats to DVs, e.g. total doc count w/ a value for this field
// TODO: check advance too
while ((doc = ndv.nextDoc()) != NO_MORE_DOCS) {
ndv.longValue();
}
@ -2201,23 +2267,28 @@ public final class CheckIndex implements Closeable {
switch(fi.getDocValuesType()) {
case SORTED:
status.totalSortedFields++;
checkDVIterator(fi, maxDoc, dvReader::getSorted);
checkBinaryDocValues(fi.name, maxDoc, dvReader.getSorted(fi));
checkSortedDocValues(fi.name, maxDoc, dvReader.getSorted(fi));
break;
case SORTED_NUMERIC:
status.totalSortedNumericFields++;
checkDVIterator(fi, maxDoc, dvReader::getSortedNumeric);
checkSortedNumericDocValues(fi.name, maxDoc, dvReader.getSortedNumeric(fi));
break;
case SORTED_SET:
status.totalSortedSetFields++;
checkDVIterator(fi, maxDoc, dvReader::getSortedSet);
checkSortedSetDocValues(fi.name, maxDoc, dvReader.getSortedSet(fi));
break;
case BINARY:
status.totalBinaryFields++;
checkDVIterator(fi, maxDoc, dvReader::getBinary);
checkBinaryDocValues(fi.name, maxDoc, dvReader.getBinary(fi));
break;
case NUMERIC:
status.totalNumericFields++;
checkDVIterator(fi, maxDoc, dvReader::getNumeric);
checkNumericDocValues(fi.name, dvReader.getNumeric(fi));
break;
default:

View File

@ -35,26 +35,27 @@ public final class DocValues {
*/
public static final BinaryDocValues emptyBinary() {
return new BinaryDocValues() {
private boolean exhausted = false;
private int doc = -1;
@Override
public int advance(int target) {
assert exhausted == false;
assert target >= 0;
exhausted = true;
return NO_MORE_DOCS;
return doc = NO_MORE_DOCS;
}
@Override
public boolean advanceExact(int target) throws IOException {
doc = target;
return false;
}
@Override
public int docID() {
return exhausted ? NO_MORE_DOCS : -1;
return doc;
}
@Override
public int nextDoc() {
assert exhausted == false;
exhausted = true;
return NO_MORE_DOCS;
return doc = NO_MORE_DOCS;
}
@Override
@ -75,26 +76,27 @@ public final class DocValues {
*/
public static final NumericDocValues emptyNumeric() {
return new NumericDocValues() {
private boolean exhausted = false;
private int doc = -1;
@Override
public int advance(int target) {
assert exhausted == false;
assert target >= 0;
exhausted = true;
return NO_MORE_DOCS;
return doc = NO_MORE_DOCS;
}
@Override
public boolean advanceExact(int target) throws IOException {
doc = target;
return false;
}
@Override
public int docID() {
return exhausted ? NO_MORE_DOCS : -1;
return doc;
}
@Override
public int nextDoc() {
assert exhausted == false;
exhausted = true;
return NO_MORE_DOCS;
return doc = NO_MORE_DOCS;
}
@Override
@ -140,26 +142,27 @@ public final class DocValues {
final BytesRef empty = new BytesRef();
return new SortedDocValues() {
private boolean exhausted = false;
private int doc = -1;
@Override
public int advance(int target) {
assert exhausted == false;
assert target >= 0;
exhausted = true;
return NO_MORE_DOCS;
return doc = NO_MORE_DOCS;
}
@Override
public boolean advanceExact(int target) throws IOException {
doc = target;
return false;
}
@Override
public int docID() {
return exhausted ? NO_MORE_DOCS : -1;
return doc;
}
@Override
public int nextDoc() {
assert exhausted == false;
exhausted = true;
return NO_MORE_DOCS;
return doc = NO_MORE_DOCS;
}
@Override
@ -191,26 +194,27 @@ public final class DocValues {
public static final SortedNumericDocValues emptySortedNumeric(int maxDoc) {
return new SortedNumericDocValues() {
private boolean exhausted = false;
private int doc = -1;
@Override
public int advance(int target) {
assert exhausted == false;
assert target >= 0;
exhausted = true;
return NO_MORE_DOCS;
return doc = NO_MORE_DOCS;
}
@Override
public boolean advanceExact(int target) throws IOException {
doc = target;
return false;
}
@Override
public int docID() {
return exhausted ? NO_MORE_DOCS : -1;
return doc;
}
@Override
public int nextDoc() {
assert exhausted == false;
exhausted = true;
return NO_MORE_DOCS;
return doc = NO_MORE_DOCS;
}
@Override
@ -237,26 +241,27 @@ public final class DocValues {
final BytesRef empty = new BytesRef();
return new SortedSetDocValues() {
private boolean exhausted = false;
private int doc = -1;
@Override
public int advance(int target) {
assert exhausted == false;
assert target >= 0;
exhausted = true;
return NO_MORE_DOCS;
return doc = NO_MORE_DOCS;
}
@Override
public boolean advanceExact(int target) throws IOException {
doc = target;
return false;
}
@Override
public int docID() {
return exhausted ? NO_MORE_DOCS : -1;
return doc;
}
@Override
public int nextDoc() {
assert exhausted == false;
exhausted = true;
return NO_MORE_DOCS;
return doc = NO_MORE_DOCS;
}
@Override

View File

@ -0,0 +1,33 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.search.DocIdSetIterator;
abstract class DocValuesIterator extends DocIdSetIterator {
/** Advance the iterator to exactly {@code target} and return whether
* {@code target} has a value.
* {@code target} must be greater than or equal to the current
* {@link #docID() doc ID} and must be a valid doc ID, ie. &ge; 0 and
* &lt; {@code maxDoc}.
* After this method returns, {@link #docID()} retuns {@code target}. */
public abstract boolean advanceExact(int target) throws IOException;
}

View File

@ -49,6 +49,11 @@ public abstract class FilterBinaryDocValues extends BinaryDocValues {
return in.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return in.advanceExact(target);
}
@Override
public long cost() {
return in.cost();

View File

@ -47,6 +47,11 @@ public abstract class FilterNumericDocValues extends NumericDocValues {
return in.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return in.advanceExact(target);
}
@Override
public long cost() {
return in.cost();

View File

@ -1898,7 +1898,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
final int size = mergeExceptions.size();
for(int i=0;i<size;i++) {
final MergePolicy.OneMerge merge = mergeExceptions.get(i);
if (merge.maxNumSegments != -1) {
if (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS) {
throw new IOException("background merge hit exception: " + merge.segString(), merge.getException());
}
}
@ -1926,12 +1926,12 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
* runningMerges are maxNumSegments merges. */
private synchronized boolean maxNumSegmentsMergesPending() {
for (final MergePolicy.OneMerge merge : pendingMerges) {
if (merge.maxNumSegments != -1)
if (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS)
return true;
}
for (final MergePolicy.OneMerge merge : runningMerges) {
if (merge.maxNumSegments != -1)
if (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS)
return true;
}
@ -2059,7 +2059,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
// point, try again to log the config here:
messageState();
assert maxNumSegments == -1 || maxNumSegments > 0;
assert maxNumSegments == UNBOUNDED_MAX_MERGE_SEGMENTS || maxNumSegments > 0;
assert trigger != null;
if (stopMerges) {
return false;
@ -2771,7 +2771,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
// Best-effort up front check:
testReserveDocs(numDocs);
final IOContext context = new IOContext(new MergeInfo(Math.toIntExact(numDocs), -1, false, -1));
final IOContext context = new IOContext(new MergeInfo(Math.toIntExact(numDocs), -1, false, UNBOUNDED_MAX_MERGE_SEGMENTS));
// TODO: somehow we should fix this merge so it's
// abortable so that IW.close(false) is able to stop it
@ -3797,7 +3797,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
infoStream.message("IW", "after commitMerge: " + segString());
}
if (merge.maxNumSegments != -1 && !dropSegment) {
if (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS && !dropSegment) {
// cascade the forceMerge:
if (!segmentsToMerge.containsKey(merge.info)) {
segmentsToMerge.put(merge.info, Boolean.FALSE);
@ -3876,7 +3876,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "hit exception during merge");
}
} else if (merge.rateLimiter.getAbort() == false && (merge.maxNumSegments != -1 || (!closed && !closing))) {
} else if (merge.rateLimiter.getAbort() == false && (merge.maxNumSegments != UNBOUNDED_MAX_MERGE_SEGMENTS || (!closed && !closing))) {
// This merge (and, generally, any change to the
// segments) may now enable new merges, so we call
// merge policy & update pending merges.
@ -4015,7 +4015,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
testPoint("startMergeInit");
assert merge.registerDone;
assert merge.maxNumSegments == -1 || merge.maxNumSegments > 0;
assert merge.maxNumSegments == UNBOUNDED_MAX_MERGE_SEGMENTS || merge.maxNumSegments > 0;
if (tragedy != null) {
throw new IllegalStateException("this writer hit an unrecoverable error; cannot merge", tragedy);

View File

@ -17,6 +17,8 @@
package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
@ -70,6 +72,12 @@ public final class LegacyBinaryDocValuesWrapper extends BinaryDocValues {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return docsWithField.get(target);
}
@Override
public long cost() {
return 0;

View File

@ -17,6 +17,8 @@
package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.util.Bits;
/**
@ -69,6 +71,13 @@ public final class LegacyNumericDocValuesWrapper extends NumericDocValues {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
value = values.get(docID);
return value != 0 || docsWithField.get(docID);
}
@Override
public long cost() {
// TODO

View File

@ -17,6 +17,8 @@
package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.util.BytesRef;
/**
@ -70,6 +72,13 @@ public final class LegacySortedDocValuesWrapper extends SortedDocValues {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
ord = values.getOrd(docID);
return ord != -1;
}
@Override
public long cost() {
return 0;

View File

@ -17,6 +17,8 @@
package org.apache.lucene.index;
import java.io.IOException;
/**
* Wraps a {@link LegacySortedNumericDocValues} into a {@link SortedNumericDocValues}.
*
@ -71,6 +73,14 @@ public final class LegacySortedNumericDocValuesWrapper extends SortedNumericDocV
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
values.setDocument(docID);
upto = 0;
return values.count() != 0;
}
@Override
public long cost() {
return 0;

View File

@ -17,6 +17,8 @@
package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.util.BytesRef;
/**
@ -71,6 +73,14 @@ public final class LegacySortedSetDocValuesWrapper extends SortedSetDocValues {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
values.setDocument(docID);
ord = values.nextOrd();
return ord != NO_MORE_ORDS;
}
@Override
public long cost() {
return 0;

View File

@ -138,6 +138,27 @@ public class MultiDocValues {
}
}
@Override
public boolean advanceExact(int targetDocID) throws IOException {
if (targetDocID <= docID) {
throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
}
int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
if (readerIndex >= nextLeaf) {
if (readerIndex == leaves.size()) {
throw new IllegalArgumentException("Out of range: " + targetDocID);
}
currentLeaf = leaves.get(readerIndex);
currentValues = currentLeaf.reader().getNormValues(field);
nextLeaf = readerIndex+1;
}
docID = targetDocID;
if (currentValues == null) {
return false;
}
return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
}
@Override
public long longValue() throws IOException {
return currentValues.longValue();
@ -243,6 +264,26 @@ public class MultiDocValues {
}
}
@Override
public boolean advanceExact(int targetDocID) throws IOException {
if (targetDocID <= docID) {
throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
}
int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
if (readerIndex >= nextLeaf) {
if (readerIndex == leaves.size()) {
throw new IllegalArgumentException("Out of range: " + targetDocID);
}
currentLeaf = leaves.get(readerIndex);
currentValues = currentLeaf.reader().getNumericDocValues(field);
nextLeaf = readerIndex+1;
}
docID = targetDocID;
if (currentValues == null) {
return false;
}
return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
}
@Override
public long longValue() throws IOException {
return currentValues.longValue();
@ -347,6 +388,27 @@ public class MultiDocValues {
}
}
@Override
public boolean advanceExact(int targetDocID) throws IOException {
if (targetDocID <= docID) {
throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
}
int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
if (readerIndex >= nextLeaf) {
if (readerIndex == leaves.size()) {
throw new IllegalArgumentException("Out of range: " + targetDocID);
}
currentLeaf = leaves.get(readerIndex);
currentValues = currentLeaf.reader().getBinaryDocValues(field);
nextLeaf = readerIndex+1;
}
docID = targetDocID;
if (currentValues == null) {
return false;
}
return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
}
@Override
public BytesRef binaryValue() throws IOException {
return currentValues.binaryValue();
@ -461,6 +523,27 @@ public class MultiDocValues {
}
}
@Override
public boolean advanceExact(int targetDocID) throws IOException {
if (targetDocID <= docID) {
throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
}
int readerIndex = ReaderUtil.subIndex(targetDocID, leaves);
if (readerIndex >= nextLeaf) {
if (readerIndex == leaves.size()) {
throw new IllegalArgumentException("Out of range: " + targetDocID);
}
currentLeaf = leaves.get(readerIndex);
currentValues = values[readerIndex];
nextLeaf = readerIndex+1;
}
docID = targetDocID;
if (currentValues == null) {
return false;
}
return currentValues.advanceExact(targetDocID - currentLeaf.docBase);
}
@Override
public long cost() {
return finalTotalCost;
@ -922,6 +1005,27 @@ public class MultiDocValues {
}
}
@Override
public boolean advanceExact(int targetDocID) throws IOException {
if (targetDocID <= docID) {
throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
}
int readerIndex = ReaderUtil.subIndex(targetDocID, docStarts);
if (readerIndex >= nextLeaf) {
if (readerIndex == values.length) {
throw new IllegalArgumentException("Out of range: " + targetDocID);
}
currentDocStart = docStarts[readerIndex];
currentValues = values[readerIndex];
nextLeaf = readerIndex+1;
}
docID = targetDocID;
if (currentValues == null) {
return false;
}
return currentValues.advanceExact(targetDocID - currentDocStart);
}
@Override
public int ordValue() {
return (int) mapping.getGlobalOrds(nextLeaf-1).get(currentValues.ordValue());
@ -1028,6 +1132,27 @@ public class MultiDocValues {
}
}
@Override
public boolean advanceExact(int targetDocID) throws IOException {
if (targetDocID < docID) {
throw new IllegalArgumentException("can only advance beyond current document: on docID=" + docID + " but targetDocID=" + targetDocID);
}
int readerIndex = ReaderUtil.subIndex(targetDocID, docStarts);
if (readerIndex >= nextLeaf) {
if (readerIndex == values.length) {
throw new IllegalArgumentException("Out of range: " + targetDocID);
}
currentDocStart = docStarts[readerIndex];
currentValues = values[readerIndex];
nextLeaf = readerIndex+1;
}
docID = targetDocID;
if (currentValues == null) {
return false;
}
return currentValues.advanceExact(targetDocID - currentDocStart);
}
@Override
public long nextOrd() throws IOException {
long segmentOrd = currentValues.nextOrd();

View File

@ -132,6 +132,11 @@ class NormValuesWriter {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return docsWithField.cost();

View File

@ -19,12 +19,10 @@ package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.search.DocIdSetIterator;
/**
* A per-document numeric value.
*/
public abstract class NumericDocValues extends DocIdSetIterator {
public abstract class NumericDocValues extends DocValuesIterator {
/** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */
@ -32,7 +30,10 @@ public abstract class NumericDocValues extends DocIdSetIterator {
/**
* Returns the numeric value for the current document ID.
* It is illegal to call this method after {@link #advanceExact(int)}
* returned {@code false}.
* @return numeric value
*/
public abstract long longValue() throws IOException;
}

View File

@ -118,6 +118,11 @@ class NumericDocValuesWriter extends DocValuesWriter {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return docsWithField.cost();

View File

@ -359,6 +359,11 @@ class ReadersAndUpdates {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
// TODO
@ -461,6 +466,11 @@ class ReadersAndUpdates {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return currentValues.cost();

View File

@ -27,7 +27,6 @@ import java.io.IOException;
*/
final class SingletonSortedNumericDocValues extends SortedNumericDocValues {
private final NumericDocValues in;
private long value;
public SingletonSortedNumericDocValues(NumericDocValues in) {
if (in.docID() != -1) {
@ -51,20 +50,17 @@ final class SingletonSortedNumericDocValues extends SortedNumericDocValues {
@Override
public int nextDoc() throws IOException {
int docID = in.nextDoc();
if (docID != NO_MORE_DOCS) {
value = in.longValue();
}
return docID;
return in.nextDoc();
}
@Override
public int advance(int target) throws IOException {
int docID = in.advance(target);
if (docID != NO_MORE_DOCS) {
value = in.longValue();
}
return docID;
return in.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return in.advanceExact(target);
}
@Override
@ -73,8 +69,8 @@ final class SingletonSortedNumericDocValues extends SortedNumericDocValues {
}
@Override
public long nextValue() {
return value;
public long nextValue() throws IOException {
return in.longValue();
}
@Override

View File

@ -29,7 +29,6 @@ import org.apache.lucene.util.BytesRef;
*/
final class SingletonSortedSetDocValues extends SortedSetDocValues {
private final SortedDocValues in;
private long currentOrd;
private long ord;
/** Creates a multi-valued view over the provided SortedDocValues */
@ -55,8 +54,8 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
@Override
public long nextOrd() {
long v = currentOrd;
currentOrd = NO_MORE_ORDS;
long v = ord;
ord = NO_MORE_ORDS;
return v;
}
@ -64,7 +63,7 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
public int nextDoc() throws IOException {
int docID = in.nextDoc();
if (docID != NO_MORE_DOCS) {
currentOrd = ord = in.ordValue();
ord = in.ordValue();
}
return docID;
}
@ -73,11 +72,20 @@ final class SingletonSortedSetDocValues extends SortedSetDocValues {
public int advance(int target) throws IOException {
int docID = in.advance(target);
if (docID != NO_MORE_DOCS) {
currentOrd = ord = in.ordValue();
ord = in.ordValue();
}
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
if (in.advanceExact(target)) {
ord = in.ordValue();
return true;
}
return false;
}
@Override
public BytesRef lookupOrd(long ord) throws IOException {
// cast is ok: single-valued cannot exceed Integer.MAX_VALUE

View File

@ -40,6 +40,8 @@ public abstract class SortedDocValues extends BinaryDocValues {
/**
* Returns the ordinal for the current docID.
* It is illegal to call this method after {@link #advanceExact(int)}
* returned {@code false}.
* @return ordinal for the document: this is dense, starts at 0, then
* increments by 1 for the next value in sorted order.
*/
@ -107,4 +109,5 @@ public abstract class SortedDocValues extends BinaryDocValues {
public TermsEnum termsEnum() throws IOException {
return new SortedDocValuesTermsEnum(this);
}
}

View File

@ -165,6 +165,11 @@ class SortedDocValuesWriter extends DocValuesWriter {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
return docsWithField.cost();

View File

@ -18,14 +18,12 @@ package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.search.DocIdSetIterator;
/**
* A list of per-document numeric values, sorted
* according to {@link Long#compare(long, long)}.
*/
public abstract class SortedNumericDocValues extends DocIdSetIterator {
public abstract class SortedNumericDocValues extends DocValuesIterator {
/** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */
@ -40,6 +38,8 @@ public abstract class SortedNumericDocValues extends DocIdSetIterator {
/**
* Retrieves the number of values for the current document. This must always
* be greater than zero.
* It is illegal to call this method after {@link #advanceExact(int)}
* returned {@code false}.
*/
public abstract int docValueCount();
}

View File

@ -154,6 +154,11 @@ class SortedNumericDocValuesWriter extends DocValuesWriter {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int docValueCount() {
return valueCount;

View File

@ -19,7 +19,6 @@ package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BytesRef;
/**
@ -30,7 +29,7 @@ import org.apache.lucene.util.BytesRef;
* dictionary value (ordinal) can be retrieved for each document. Ordinals
* are dense and in increasing sorted order.
*/
public abstract class SortedSetDocValues extends DocIdSetIterator {
public abstract class SortedSetDocValues extends DocValuesIterator {
/** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */
@ -43,6 +42,8 @@ public abstract class SortedSetDocValues extends DocIdSetIterator {
/**
* Returns the next ordinal for the current document.
* It is illegal to call this method after {@link #advanceExact(int)}
* returned {@code false}.
* @return next ordinal for the document, or {@link #NO_MORE_ORDS}.
* ordinals are dense, start at 0, then increment by 1 for
* the next value in sorted order.

View File

@ -225,6 +225,11 @@ class SortedSetDocValuesWriter extends DocValuesWriter {
throw new UnsupportedOperationException();
}
@Override
public boolean advanceExact(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long getValueCount() {
return ordMap.length;

View File

@ -185,6 +185,12 @@ class SortingLeafReader extends FilterLeafReader {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return dvs.docsWithField.get(target);
}
@Override
public BytesRef binaryValue() {
return dvs.values[docID];
@ -254,6 +260,12 @@ class SortingLeafReader extends FilterLeafReader {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return dvs.docsWithField.get(target);
}
@Override
public long longValue() {
return dvs.values[docID];
@ -394,6 +406,12 @@ class SortingLeafReader extends FilterLeafReader {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return ords[target] != -1;
}
@Override
public int ordValue() {
return ords[docID];
@ -467,6 +485,13 @@ class SortingLeafReader extends FilterLeafReader {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
ordUpto = 0;
return ords[docID] != null;
}
@Override
public long nextOrd() {
if (ordUpto == ords[docID].length) {
@ -538,6 +563,13 @@ class SortingLeafReader extends FilterLeafReader {
}
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
upto = 0;
return values[docID] != null;
}
@Override
public long nextValue() {
if (upto == values[docID].length) {

View File

@ -178,11 +178,7 @@ public abstract class FieldComparator<T> {
}
private double getValueForDoc(int doc) throws IOException {
int curDocID = currentReaderValues.docID();
if (doc > curDocID) {
curDocID = currentReaderValues.advance(doc);
}
if (doc == curDocID) {
if (currentReaderValues.advanceExact(doc)) {
return Double.longBitsToDouble(currentReaderValues.longValue());
} else {
return missingValue;
@ -242,11 +238,7 @@ public abstract class FieldComparator<T> {
}
private float getValueForDoc(int doc) throws IOException {
int curDocID = currentReaderValues.docID();
if (doc > curDocID) {
curDocID = currentReaderValues.advance(doc);
}
if (doc == curDocID) {
if (currentReaderValues.advanceExact(doc)) {
return Float.intBitsToFloat((int) currentReaderValues.longValue());
} else {
return missingValue;
@ -308,11 +300,7 @@ public abstract class FieldComparator<T> {
}
private int getValueForDoc(int doc) throws IOException {
int curDocID = currentReaderValues.docID();
if (doc > curDocID) {
curDocID = currentReaderValues.advance(doc);
}
if (doc == curDocID) {
if (currentReaderValues.advanceExact(doc)) {
return (int) currentReaderValues.longValue();
} else {
return missingValue;
@ -372,11 +360,7 @@ public abstract class FieldComparator<T> {
}
private long getValueForDoc(int doc) throws IOException {
int curDocID = currentReaderValues.docID();
if (doc > curDocID) {
curDocID = currentReaderValues.advance(doc);
}
if (doc == curDocID) {
if (currentReaderValues.advanceExact(doc)) {
return currentReaderValues.longValue();
} else {
return missingValue;
@ -656,15 +640,11 @@ public abstract class FieldComparator<T> {
}
private int getOrdForDoc(int doc) throws IOException {
int curDocID = termsIndex.docID();
if (doc > curDocID) {
if (termsIndex.advance(doc) == doc) {
return termsIndex.ordValue();
}
} else if (doc == curDocID) {
if (termsIndex.advanceExact(doc)) {
return termsIndex.ordValue();
} else {
return -1;
}
return -1;
}
@Override
@ -864,11 +844,7 @@ public abstract class FieldComparator<T> {
}
private BytesRef getValueForDoc(int doc) throws IOException {
int curDocID = docTerms.docID();
if (doc > curDocID) {
curDocID = docTerms.advance(doc);
}
if (doc == curDocID) {
if (docTerms.advanceExact(doc)) {
return docTerms.binaryValue();
} else {
return null;

View File

@ -131,6 +131,15 @@ public class SortedNumericSelector {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
if (in.advanceExact(target)) {
value = in.nextValue();
return true;
}
return false;
}
@Override
public long cost() {
return in.cost();
@ -181,6 +190,15 @@ public class SortedNumericSelector {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
if (in.advanceExact(target)) {
setValue();
return true;
}
return false;
}
@Override
public long cost() {
return in.cost();

View File

@ -117,6 +117,15 @@ public class SortedSetSelector {
return docID();
}
@Override
public boolean advanceExact(int target) throws IOException {
if (in.advanceExact(target)) {
setOrd();
return true;
}
return false;
}
@Override
public long cost() {
return in.cost();
@ -179,6 +188,15 @@ public class SortedSetSelector {
return docID();
}
@Override
public boolean advanceExact(int target) throws IOException {
if (in.advanceExact(target)) {
setOrd();
return true;
}
return false;
}
@Override
public long cost() {
return in.cost();
@ -248,6 +266,15 @@ public class SortedSetSelector {
return docID();
}
@Override
public boolean advanceExact(int target) throws IOException {
if (in.advanceExact(target)) {
setOrd();
return true;
}
return false;
}
@Override
public long cost() {
return in.cost();
@ -329,6 +356,15 @@ public class SortedSetSelector {
return docID();
}
@Override
public boolean advanceExact(int target) throws IOException {
if (in.advanceExact(target)) {
setOrd();
return true;
}
return false;
}
@Override
public long cost() {
return in.cost();

View File

@ -244,11 +244,7 @@ public class BM25Similarity extends Similarity {
if (norms == null) {
norm = k1;
} else {
int normsDocID = norms.docID();
if (normsDocID < doc) {
normsDocID = norms.advance(doc);
}
if (normsDocID == doc) {
if (norms.advanceExact(doc)) {
norm = cache[(byte)norms.longValue() & 0xFF];
} else {
norm = cache[0];
@ -310,7 +306,7 @@ public class BM25Similarity extends Similarity {
"tfNorm, computed from:", subs);
} else {
byte norm;
if (norms.advance(doc) == doc) {
if (norms.advanceExact(doc)) {
norm = (byte) norms.longValue();
} else {
norm = 0;

View File

@ -279,11 +279,7 @@ public abstract class SimilarityBase extends Similarity {
if (norms == null) {
return 1F;
}
int normsDocID = norms.docID();
if (normsDocID < doc) {
normsDocID = norms.advance(doc);
}
if (normsDocID == doc) {
if (norms.advanceExact(doc)) {
return decodeNormValue((byte) norms.longValue());
} else {
return decodeNormValue((byte) 0);

View File

@ -599,11 +599,7 @@ public abstract class TFIDFSimilarity extends Similarity {
return raw;
} else {
long normValue;
int normsDocID = norms.docID();
if (normsDocID < doc) {
normsDocID = norms.advance(doc);
}
if (normsDocID == doc) {
if (norms.advanceExact(doc)) {
normValue = norms.longValue();
} else {
normValue = 0;
@ -649,7 +645,7 @@ public abstract class TFIDFSimilarity extends Similarity {
private Explanation explainField(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) throws IOException {
Explanation tfExplanation = Explanation.match(tf(freq.getValue()), "tf(freq="+freq.getValue()+"), with freq of:", freq);
float norm;
if (norms != null && norms.advance(doc) == doc) {
if (norms != null && norms.advanceExact(doc)) {
norm = decodeNormValue(norms.longValue());
} else {
norm = 1f;

View File

@ -0,0 +1,107 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.analysis;
import java.io.IOException;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
public class TestDelegatingAnalyzerWrapper extends LuceneTestCase {
public void testDelegatesNormalization() {
Analyzer analyzer1 = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
DelegatingAnalyzerWrapper w1 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
return analyzer1;
}
};
assertEquals(new BytesRef("Ab C"), w1.normalize("foo", "Ab C"));
Analyzer analyzer2 = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true);
DelegatingAnalyzerWrapper w2 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
return analyzer2;
}
};
assertEquals(new BytesRef("ab c"), w2.normalize("foo", "Ab C"));
}
public void testDelegatesAttributeFactory() throws Exception {
Analyzer analyzer1 = new MockBytesAnalyzer();
DelegatingAnalyzerWrapper w1 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
return analyzer1;
}
};
assertEquals(new BytesRef("Ab C".getBytes(StandardCharsets.UTF_16LE)), w1.normalize("foo", "Ab C"));
}
public void testDelegatesCharFilter() throws Exception {
Analyzer analyzer1 = new Analyzer() {
@Override
protected Reader initReaderForNormalization(String fieldName, Reader reader) {
return new DummyCharFilter(reader, 'b', 'z');
}
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(attributeFactory(fieldName));
return new TokenStreamComponents(tokenizer);
}
};
DelegatingAnalyzerWrapper w1 = new DelegatingAnalyzerWrapper(Analyzer.GLOBAL_REUSE_STRATEGY) {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
return analyzer1;
}
};
assertEquals(new BytesRef("az c"), w1.normalize("foo", "ab c"));
}
private static class DummyCharFilter extends CharFilter {
private final char match, repl;
public DummyCharFilter(Reader input, char match, char repl) {
super(input);
this.match = match;
this.repl = repl;
}
@Override
protected int correct(int currentOff) {
return currentOff;
}
@Override
public int read(char[] cbuf, int off, int len) throws IOException {
final int read = input.read(cbuf, off, len);
for (int i = 0; i < read; ++i) {
if (cbuf[off+i] == match) {
cbuf[off+i] = repl;
}
}
return read;
}
}
}

View File

@ -26,7 +26,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
public abstract class AbstractTestCompressionMode extends LuceneTestCase {
@ -35,7 +35,7 @@ public abstract class AbstractTestCompressionMode extends LuceneTestCase {
static byte[] randomArray() {
final int max = random().nextBoolean()
? random().nextInt(4)
: random().nextInt(256);
: random().nextInt(255);
final int length = random().nextBoolean()
? random().nextInt(20)
: random().nextInt(192 * 1024);
@ -45,7 +45,7 @@ public abstract class AbstractTestCompressionMode extends LuceneTestCase {
static byte[] randomArray(int length, int max) {
final byte[] arr = new byte[length];
for (int i = 0; i < arr.length; ++i) {
arr[i] = (byte) RandomInts.randomIntBetween(random(), 0, max);
arr[i] = (byte) RandomNumbers.randomIntBetween(random(), 0, max);
}
return arr;
}
@ -130,7 +130,7 @@ public abstract class AbstractTestCompressionMode extends LuceneTestCase {
}
public void testIncompressible() throws IOException {
final byte[] decompressed = new byte[RandomInts.randomIntBetween(random(), 20, 256)];
final byte[] decompressed = new byte[RandomNumbers.randomIntBetween(random(), 20, 256)];
for (int i = 0; i < decompressed.length; ++i) {
decompressed[i] = (byte) i;
}

View File

@ -20,7 +20,7 @@ package org.apache.lucene.codecs.compressing;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
public abstract class AbstractTestLZ4CompressionMode extends AbstractTestCompressionMode {
@ -88,7 +88,7 @@ public abstract class AbstractTestLZ4CompressionMode extends AbstractTestCompres
public void testLongMatchs() throws IOException {
// match length >= 20
final byte[] decompressed = new byte[RandomInts.randomIntBetween(random(), 300, 1024)];
final byte[] decompressed = new byte[RandomNumbers.randomIntBetween(random(), 300, 1024)];
for (int i = 0; i < decompressed.length; ++i) {
decompressed[i] = (byte) i;
}
@ -97,10 +97,10 @@ public abstract class AbstractTestLZ4CompressionMode extends AbstractTestCompres
public void testLongLiterals() throws IOException {
// long literals (length >= 16) which are not the last literals
final byte[] decompressed = randomArray(RandomInts.randomIntBetween(random(), 400, 1024), 256);
final byte[] decompressed = randomArray(RandomNumbers.randomIntBetween(random(), 400, 1024), 256);
final int matchRef = random().nextInt(30);
final int matchOff = RandomInts.randomIntBetween(random(), decompressed.length - 40, decompressed.length - 20);
final int matchLength = RandomInts.randomIntBetween(random(), 4, 10);
final int matchOff = RandomNumbers.randomIntBetween(random(), decompressed.length - 40, decompressed.length - 20);
final int matchLength = RandomNumbers.randomIntBetween(random(), 4, 10);
System.arraycopy(decompressed, matchRef, decompressed, matchOff, matchLength);
test(decompressed);
}

View File

@ -32,24 +32,24 @@ import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.packed.PackedInts;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
public class TestForUtil extends LuceneTestCase {
public void testEncodeDecode() throws IOException {
final int iterations = RandomInts.randomIntBetween(random(), 1, 1000);
final int iterations = RandomNumbers.randomIntBetween(random(), 1, 1000);
final float acceptableOverheadRatio = random().nextFloat();
final int[] values = new int[(iterations - 1) * BLOCK_SIZE + ForUtil.MAX_DATA_SIZE];
for (int i = 0; i < iterations; ++i) {
final int bpv = random().nextInt(32);
if (bpv == 0) {
final int value = RandomInts.randomIntBetween(random(), 0, Integer.MAX_VALUE);
final int value = RandomNumbers.randomIntBetween(random(), 0, Integer.MAX_VALUE);
for (int j = 0; j < BLOCK_SIZE; ++j) {
values[i * BLOCK_SIZE + j] = value;
}
} else {
for (int j = 0; j < BLOCK_SIZE; ++j) {
values[i * BLOCK_SIZE + j] = RandomInts.randomIntBetween(random(),
values[i * BLOCK_SIZE + j] = RandomNumbers.randomIntBetween(random(),
0, (int) PackedInts.maxValue(bpv));
}
}

View File

@ -153,7 +153,7 @@ public class TestIndexedDISI extends LuceneTestCase {
public void testRandom() throws IOException {
try (Directory dir = newDirectory()) {
for (int i = 0; i < 100; ++i) {
for (int i = 0; i < 10; ++i) {
doTestRandom(dir);
}
}
@ -217,6 +217,32 @@ public class TestIndexedDISI extends LuceneTestCase {
}
}
for (int step : new int[] {10, 100, 1000, 10000, 100000}) {
try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) {
IndexedDISI disi = new IndexedDISI(in, 0L, length, cardinality);
BitSetIterator disi2 = new BitSetIterator(set, cardinality);
int index = -1;
for (int target = 0; target < set.length(); ) {
target += TestUtil.nextInt(random(), 0, step);
int doc = disi2.docID();
while (doc < target) {
doc = disi2.nextDoc();
index++;
}
boolean exists = disi.advanceExact(target);
assertEquals(doc == target, exists);
if (exists) {
assertEquals(index, disi.index());
} else if (random().nextBoolean()) {
assertEquals(doc, disi.nextDoc());
assertEquals(index, disi.index());
target = doc;
}
}
}
}
dir.deleteFile("foo");
}

View File

@ -104,7 +104,7 @@ public class TestLucene70DocValuesFormat extends BaseCompressingDocValuesFormatT
public void testSortedVariableLengthBigVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedVsStoredFields(atLeast(300), 1, 32766);
doTestSortedVsStoredFields(atLeast(300), 1d, 1, 32766);
}
}
@ -112,7 +112,7 @@ public class TestLucene70DocValuesFormat extends BaseCompressingDocValuesFormatT
public void testSortedVariableLengthManyVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1, 500);
doTestSortedVsStoredFields(TestUtil.nextInt(random(), 1024, 2049), 1d, 1, 500);
}
}

View File

@ -30,7 +30,7 @@ import org.apache.lucene.util.TimeUnits;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
/**
* This test creates an index with one segment that is a little larger than 4GB.
@ -69,7 +69,7 @@ public class Test4GBStoredFields extends LuceneTestCase {
final FieldType ft = new FieldType();
ft.setStored(true);
ft.freeze();
final int valueLength = RandomInts.randomIntBetween(random(), 1 << 13, 1 << 20);
final int valueLength = RandomNumbers.randomIntBetween(random(), 1 << 13, 1 << 20);
final byte[] value = new byte[valueLength];
for (int i = 0; i < valueLength; ++i) {
// random so that even compressing codecs can't compress it

View File

@ -34,7 +34,7 @@ import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
public class TestBooleanOr extends LuceneTestCase {
@ -239,7 +239,7 @@ public class TestBooleanOr extends LuceneTestCase {
if (i == matches.length) {
return DocIdSetIterator.NO_MORE_DOCS;
}
return RandomInts.randomIntBetween(random(), max, matches[i]);
return RandomNumbers.randomIntBetween(random(), max, matches[i]);
}
@Override
public long cost() {

View File

@ -424,7 +424,7 @@ public class TestBooleanRewrites extends LuceneTestCase {
for (ScoreDoc scoreDoc : td2.scoreDocs) {
final float expectedScore = expectedScores.get(scoreDoc.doc);
final float actualScore = scoreDoc.score;
assertEquals(expectedScore, actualScore, 10e-5);
assertEquals(expectedScore, actualScore, expectedScore / 100); // error under 1%
}
}
}

View File

@ -23,7 +23,7 @@ import java.util.List;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.lucene.util.packed.PackedInts;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
@Slow
public class TestTimSorterWorstCase extends LuceneTestCase {
@ -33,9 +33,9 @@ public class TestTimSorterWorstCase extends LuceneTestCase {
// but not so big we blow up available heap.
final int length;
if (TEST_NIGHTLY) {
length = RandomInts.randomIntBetween(random(), 140000000, 400000000);
length = RandomNumbers.randomIntBetween(random(), 140000000, 400000000);
} else {
length = RandomInts.randomIntBetween(random(), 140000000, 200000000);
length = RandomNumbers.randomIntBetween(random(), 140000000, 200000000);
}
final PackedInts.Mutable arr = generateWorstCaseArray(length);
new TimSorter(0) {

View File

@ -21,7 +21,7 @@ import java.util.*;
import org.apache.lucene.util.*;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES;
@ -29,7 +29,7 @@ public class TestOperations extends LuceneTestCase {
/** Test string union. */
public void testStringUnion() {
List<BytesRef> strings = new ArrayList<>();
for (int i = RandomInts.randomIntBetween(random(), 0, 1000); --i >= 0;) {
for (int i = RandomNumbers.randomIntBetween(random(), 0, 1000); --i >= 0;) {
strings.add(new BytesRef(TestUtil.randomUnicodeString(random())));
}

View File

@ -42,14 +42,14 @@ import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.packed.PackedInts.Reader;
import org.junit.Ignore;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
public class TestPackedInts extends LuceneTestCase {
public void testByteCount() {
final int iters = atLeast(3);
for (int i = 0; i < iters; ++i) {
final int valueCount = RandomInts.randomIntBetween(random(), 1, Integer.MAX_VALUE);
final int valueCount = RandomNumbers.randomIntBetween(random(), 1, Integer.MAX_VALUE);
for (PackedInts.Format format : PackedInts.Format.values()) {
for (int bpv = 1; bpv <= 64; ++bpv) {
final long byteCount = format.byteCount(PackedInts.VERSION_CURRENT, valueCount, bpv);
@ -206,7 +206,7 @@ public class TestPackedInts extends LuceneTestCase {
public void testEndPointer() throws IOException {
final Directory dir = newDirectory();
final int valueCount = RandomInts.randomIntBetween(random(), 1, 1000);
final int valueCount = RandomNumbers.randomIntBetween(random(), 1, 1000);
final IndexOutput out = dir.createOutput("tests.bin", newIOContext(random()));
for (int i = 0; i < valueCount; ++i) {
out.writeLong(0);
@ -224,7 +224,7 @@ public class TestPackedInts extends LuceneTestCase {
// test iterator
in.seek(0L);
final PackedInts.ReaderIterator it = PackedInts.getReaderIteratorNoHeader(in, format, version, valueCount, bpv, RandomInts.randomIntBetween(random(), 1, 1<<16));
final PackedInts.ReaderIterator it = PackedInts.getReaderIteratorNoHeader(in, format, version, valueCount, bpv, RandomNumbers.randomIntBetween(random(), 1, 1<<16));
for (int i = 0; i < valueCount; ++i) {
it.next();
}
@ -981,9 +981,9 @@ public class TestPackedInts extends LuceneTestCase {
}
public void testPackedLongValues() {
final long[] arr = new long[RandomInts.randomIntBetween(random(), 1, TEST_NIGHTLY ? 1000000 : 100000)];
final long[] arr = new long[RandomNumbers.randomIntBetween(random(), 1, TEST_NIGHTLY ? 1000000 : 100000)];
float[] ratioOptions = new float[]{PackedInts.DEFAULT, PackedInts.COMPACT, PackedInts.FAST};
for (int bpv : new int[]{0, 1, 63, 64, RandomInts.randomIntBetween(random(), 2, 62)}) {
for (int bpv : new int[]{0, 1, 63, 64, RandomNumbers.randomIntBetween(random(), 2, 62)}) {
for (DataType dataType : Arrays.asList(DataType.DELTA_PACKED)) {
final int pageSize = 1 << TestUtil.nextInt(random(), 6, 20);
float acceptableOverheadRatio = ratioOptions[TestUtil.nextInt(random(), 0, ratioOptions.length - 1)];
@ -1063,7 +1063,7 @@ public class TestPackedInts extends LuceneTestCase {
final int[] bitsPerValues = new int[longs.length];
final boolean[] skip = new boolean[longs.length];
for (int i = 0; i < longs.length; ++i) {
final int bpv = RandomInts.randomIntBetween(random(), 1, 64);
final int bpv = RandomNumbers.randomIntBetween(random(), 1, 64);
bitsPerValues[i] = random().nextBoolean() ? bpv : TestUtil.nextInt(random(), bpv, 64);
if (bpv == 64) {
longs[i] = random().nextLong();

View File

@ -36,7 +36,8 @@ import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.BytesRef;
/**
* Default implementation of {@link SortedSetDocValuesFacetCounts}
* Default implementation of {@link SortedSetDocValuesFacetCounts}. You must ensure the original
* {@link IndexReader} passed to the constructor is not closed whenever you use this class!
*/
public class DefaultSortedSetDocValuesReaderState extends SortedSetDocValuesReaderState {

View File

@ -18,6 +18,7 @@ package org.apache.lucene.facet.sortedset;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
@ -33,11 +34,15 @@ import org.apache.lucene.facet.TopOrdAndIntQueue;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState.OrdRange;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.ConjunctionDISI;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
@ -77,6 +82,17 @@ public class SortedSetDocValuesFacetCounts extends Facets {
count(hits.getMatchingDocs());
}
/** Returns all facet counts, same result as searching on {@link MatchAllDocsQuery} but faster. */
public SortedSetDocValuesFacetCounts(SortedSetDocValuesReaderState state)
throws IOException {
this.state = state;
this.field = state.getField();
dv = state.getDocValues();
counts = new int[state.getSize()];
//System.out.println("field=" + field);
countAll();
}
@Override
public FacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
if (topN <= 0) {
@ -176,7 +192,8 @@ public class SortedSetDocValuesFacetCounts extends Facets {
continue;
}
DocIdSetIterator docs = hits.bits.iterator();
DocIdSetIterator it = ConjunctionDISI.intersectIterators(Arrays.asList(
hits.bits.iterator(), segValues));
// TODO: yet another option is to count all segs
// first, only in seg-ord space, and then do a
@ -196,19 +213,12 @@ public class SortedSetDocValuesFacetCounts extends Facets {
if (hits.totalHits < numSegOrds/10) {
//System.out.println(" remap as-we-go");
// Remap every ord to global ord as we iterate:
int doc;
while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
//System.out.println(" doc=" + doc);
if (doc > segValues.docID()) {
segValues.advance(doc);
}
if (doc == segValues.docID()) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
//System.out.println(" segOrd=" + segOrd + " ord=" + term + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, term));
counts[(int) ordMap.get(term)]++;
term = (int) segValues.nextOrd();
}
for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
//System.out.println(" segOrd=" + segOrd + " ord=" + term + " globalOrd=" + ordinalMap.getGlobalOrd(segOrd, term));
counts[(int) ordMap.get(term)]++;
term = (int) segValues.nextOrd();
}
}
} else {
@ -216,19 +226,12 @@ public class SortedSetDocValuesFacetCounts extends Facets {
// First count in seg-ord space:
final int[] segCounts = new int[numSegOrds];
int doc;
while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
//System.out.println(" doc=" + doc);
if (doc > segValues.docID()) {
segValues.advance(doc);
}
if (doc == segValues.docID()) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
//System.out.println(" ord=" + term);
segCounts[term]++;
term = (int) segValues.nextOrd();
}
for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
//System.out.println(" ord=" + term);
segCounts[term]++;
term = (int) segValues.nextOrd();
}
}
@ -244,12 +247,76 @@ public class SortedSetDocValuesFacetCounts extends Facets {
} else {
// No ord mapping (e.g., single segment index):
// just aggregate directly into counts:
int doc;
while ((doc = docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
if (doc > segValues.docID()) {
segValues.advance(doc);
for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
counts[term]++;
term = (int) segValues.nextOrd();
}
if (doc == segValues.docID()) {
}
}
}
}
/** Does all the "real work" of tallying up the counts. */
private final void countAll() throws IOException {
//System.out.println("ssdv count");
MultiDocValues.OrdinalMap ordinalMap;
// TODO: is this right? really, we need a way to
// verify that this ordinalMap "matches" the leaves in
// matchingDocs...
if (dv instanceof MultiDocValues.MultiSortedSetDocValues) {
ordinalMap = ((MultiSortedSetDocValues) dv).mapping;
} else {
ordinalMap = null;
}
IndexReader origReader = state.getOrigReader();
for(LeafReaderContext context : origReader.leaves()) {
LeafReader reader = context.reader();
SortedSetDocValues segValues = reader.getSortedSetDocValues(field);
if (segValues == null) {
continue;
}
Bits liveDocs = reader.getLiveDocs();
if (ordinalMap != null) {
final LongValues ordMap = ordinalMap.getGlobalOrds(context.ord);
int numSegOrds = (int) segValues.getValueCount();
// First count in seg-ord space:
final int[] segCounts = new int[numSegOrds];
int docID;
while ((docID = segValues.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
if (liveDocs == null || liveDocs.get(docID)) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
segCounts[term]++;
term = (int) segValues.nextOrd();
}
}
}
// Then, migrate to global ords:
for(int ord=0;ord<numSegOrds;ord++) {
int count = segCounts[ord];
if (count != 0) {
counts[(int) ordMap.get(ord)] += count;
}
}
} else {
// No ord mapping (e.g., single segment index):
// just aggregate directly into counts:
int docID;
while ((docID = segValues.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
if (liveDocs == null || liveDocs.get(docID)) {
int term = (int) segValues.nextOrd();
while (term != SortedSetDocValues.NO_MORE_ORDS) {
counts[term]++;

View File

@ -24,8 +24,12 @@ import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.ConjunctionDISI;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
/** Computes facets counts, assuming the default encoding
@ -50,6 +54,16 @@ public class FastTaxonomyFacetCounts extends IntTaxonomyFacets {
count(fc.getMatchingDocs());
}
/** Create {@code FastTaxonomyFacetCounts}, using the
* specified {@code indexFieldName} for ordinals, and
* counting all non-deleted documents in the index. This is
* the same result as searching on {@link MatchAllDocsQuery},
* but faster */
public FastTaxonomyFacetCounts(String indexFieldName, IndexReader reader, TaxonomyReader taxoReader, FacetsConfig config) throws IOException {
super(indexFieldName, taxoReader, config);
countAll(reader);
}
private final void count(List<MatchingDocs> matchingDocs) throws IOException {
for(MatchingDocs hits : matchingDocs) {
BinaryDocValues dv = hits.context.reader().getBinaryDocValues(indexFieldName);
@ -82,4 +96,39 @@ public class FastTaxonomyFacetCounts extends IntTaxonomyFacets {
rollup();
}
private final void countAll(IndexReader reader) throws IOException {
for(LeafReaderContext context : reader.leaves()) {
BinaryDocValues dv = context.reader().getBinaryDocValues(indexFieldName);
if (dv == null) { // this reader does not have DocValues for the requested category list
continue;
}
Bits liveDocs = context.reader().getLiveDocs();
for (int doc = dv.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = dv.nextDoc()) {
if (liveDocs != null && liveDocs.get(doc) == false) {
continue;
}
final BytesRef bytesRef = dv.binaryValue();
byte[] bytes = bytesRef.bytes;
int end = bytesRef.offset + bytesRef.length;
int ord = 0;
int offset = bytesRef.offset;
int prev = 0;
while (offset < end) {
byte b = bytes[offset++];
if (b >= 0) {
prev = ord = ((ord << 7) | b) + prev;
++values[ord];
ord = 0;
} else {
ord = (ord << 7) | (b & 0x7F);
}
}
}
}
rollup();
}
}

View File

@ -76,7 +76,7 @@ public abstract class TaxonomyFacets extends Facets {
protected FacetsConfig.DimConfig verifyDim(String dim) {
FacetsConfig.DimConfig dimConfig = config.getDimConfig(dim);
if (!dimConfig.indexFieldName.equals(indexFieldName)) {
throw new IllegalArgumentException("dimension \"" + dim + "\" was not indexed into field \"" + indexFieldName);
throw new IllegalArgumentException("dimension \"" + dim + "\" was not indexed into field \"" + indexFieldName + "\"");
}
return dimConfig;
}

View File

@ -16,6 +16,7 @@
*/
package org.apache.lucene.facet.sortedset;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -73,11 +74,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Per-top-reader state:
SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.getIndexReader());
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c);
SortedSetDocValuesFacetCounts facets = getAllFacets(searcher, state);
assertEquals("dim=a path=[] value=4 childCount=3\n foo (2)\n bar (1)\n zoo (1)\n", facets.getTopChildren(10, "a").toString());
assertEquals("dim=b path=[] value=1 childCount=1\n baz (1)\n", facets.getTopChildren(10, "b").toString());
@ -171,9 +168,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Per-top-reader state:
SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.getIndexReader());
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c);
SortedSetDocValuesFacetCounts facets = getAllFacets(searcher, state);
// Ask for top 10 labels for any dims that have counts:
List<FacetResult> results = facets.getAllDims(10);
@ -215,9 +210,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Per-top-reader state:
SortedSetDocValuesReaderState state = new DefaultSortedSetDocValuesReaderState(searcher.getIndexReader());
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c);
SortedSetDocValuesFacetCounts facets = getAllFacets(searcher, state);
// Ask for top 10 labels for any dims that have counts:
assertEquals("dim=a path=[] value=2 childCount=2\n foo1 (1)\n foo2 (1)\n", facets.getTopChildren(10, "a").toString());
@ -312,4 +305,14 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
w.close();
IOUtils.close(searcher.getIndexReader(), indexDir, taxoDir);
}
private static SortedSetDocValuesFacetCounts getAllFacets(IndexSearcher searcher, SortedSetDocValuesReaderState state) throws IOException {
if (random().nextBoolean()) {
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
return new SortedSetDocValuesFacetCounts(state, c);
} else {
return new SortedSetDocValuesFacetCounts(state);
}
}
}

View File

@ -17,6 +17,7 @@
package org.apache.lucene.facet.taxonomy;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.HashMap;
@ -102,16 +103,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
// Aggregate the facet counts:
FacetsCollector c = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to
// wrap collecting the "normal" hits and also facets:
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, c);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
// Retrieve & verify results:
assertEquals("dim=Publish Date path=[] value=5 childCount=3\n 2010 (2)\n 2012 (2)\n 1999 (1)\n", facets.getTopChildren(10, "Publish Date").toString());
@ -120,7 +112,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// Now user drills down on Publish Date/2010:
DrillDownQuery q2 = new DrillDownQuery(config);
q2.add("Publish Date", "2010");
c = new FacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(q2, c);
facets = new FastTaxonomyFacetCounts(taxoReader, config, c);
assertEquals("dim=Author path=[] value=2 childCount=2\n Bob (1)\n Lisa (1)\n", facets.getTopChildren(10, "Author").toString());
@ -185,10 +177,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = getTaxonomyFacetCounts(taxoReader, new FacetsConfig(), c);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
// Ask for top 10 labels for any dims that have counts:
List<FacetResult> results = facets.getAllDims(10);
@ -301,15 +290,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
// Aggregate the facet counts:
FacetsCollector c = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to
// wrap collecting the "normal" hits and also facets:
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
expectThrows(IllegalArgumentException.class, () -> {
facets.getSpecificValue("a");
@ -344,10 +325,8 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
assertEquals(1, facets.getSpecificValue("dim", "test\u001Fone"));
assertEquals(1, facets.getSpecificValue("dim", "test\u001Etwo"));
@ -387,11 +366,8 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
assertEquals(1, facets.getTopChildren(10, "dim").value);
assertEquals(1, facets.getTopChildren(10, "dim2").value);
assertEquals(1, facets.getTopChildren(10, "dim3").value);
@ -432,15 +408,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
// Aggregate the facet counts:
FacetsCollector c = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to
// wrap collecting the "normal" hits and also facets:
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
FacetResult result = facets.getTopChildren(Integer.MAX_VALUE, "dim");
assertEquals(numLabels, result.labelValues.length);
@ -544,9 +512,8 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, newSearcher(r), taxoReader, config);
for (FacetResult result : facets.getAllDims(10)) {
assertEquals(r.numDocs(), result.value.intValue());
}
@ -572,10 +539,8 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
final FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, newSearcher(r), taxoReader, config);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
List<FacetResult> res1 = facets.getAllDims(10);
List<FacetResult> res2 = facets.getAllDims(10);
assertEquals("calling getFacetResults twice should return the .equals()=true result", res1, res2);
@ -601,9 +566,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, newSearcher(r), taxoReader, config);
assertEquals(10, facets.getTopChildren(2, "a").childCount);
@ -754,4 +717,21 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
w.close();
IOUtils.close(tw, searcher.getIndexReader(), tr, indexDir, taxoDir);
}
private static Facets getAllFacets(String indexFieldName, IndexSearcher searcher, TaxonomyReader taxoReader, FacetsConfig config) throws IOException {
if (random().nextBoolean()) {
// Aggregate the facet counts:
FacetsCollector c = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to
// wrap collecting the "normal" hits and also facets:
searcher.search(new MatchAllDocsQuery(), c);
return new FastTaxonomyFacetCounts(taxoReader, config, c);
} else {
return new FastTaxonomyFacetCounts(indexFieldName, searcher.getIndexReader(), taxoReader, config);
}
}
}

View File

@ -217,13 +217,14 @@ public class WeightedSpanTermExtractor {
} else if (isQueryUnsupported(query.getClass())) {
// nothing
} else {
if (query instanceof MultiTermQuery &&
(!expandMultiTermQuery || !fieldNameComparator(((MultiTermQuery)query).getField()))) {
return;
}
Query origQuery = query;
final IndexReader reader = getLeafContext().reader();
Query rewritten;
if (query instanceof MultiTermQuery) {
if (!expandMultiTermQuery) {
return;
}
rewritten = MultiTermQuery.SCORING_BOOLEAN_REWRITE.rewrite(reader, (MultiTermQuery) query);
} else {
rewritten = origQuery.rewrite(reader);
@ -508,11 +509,7 @@ public class WeightedSpanTermExtractor {
*/
public Map<String,WeightedSpanTerm> getWeightedSpanTerms(Query query, float boost, TokenStream tokenStream,
String fieldName) throws IOException {
if (fieldName != null) {
this.fieldName = fieldName;
} else {
this.fieldName = null;
}
this.fieldName = fieldName;
Map<String,WeightedSpanTerm> terms = new PositionCheckingMap<>();
this.tokenStream = tokenStream;

View File

@ -33,6 +33,7 @@ import java.util.StringTokenizer;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.CachingTokenFilter;
import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockPayloadAnalyzer;
import org.apache.lucene.analysis.MockTokenFilter;
@ -1339,6 +1340,22 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
public void testNotRewriteMultiTermQuery() throws IOException {
// field "bar": (not the field we ultimately want to extract)
MultiTermQuery mtq = new TermRangeQuery("bar", new BytesRef("aa"), new BytesRef("zz"), true, true) ;
WeightedSpanTermExtractor extractor = new WeightedSpanTermExtractor() {
@Override
protected void extract(Query query, float boost, Map<String, WeightedSpanTerm> terms) throws IOException {
assertEquals(mtq, query);
super.extract(query, boost, terms);
}
};
extractor.setExpandMultiTermQuery(true);
extractor.setMaxDocCharsToAnalyze(51200);
extractor.getWeightedSpanTerms(
mtq, 3, new CannedTokenStream(new Token("aa",0,2), new Token("bb", 2,4)), "foo"); // field "foo"
}
public void testGetBestSingleFragmentWithWeights() throws Exception {
TestHighlightRunner helper = new TestHighlightRunner() {

View File

@ -7,7 +7,7 @@
/cglib/cglib-nodep = 2.2
/com.adobe.xmp/xmpcore = 5.1.2
com.carrotsearch.randomizedtesting.version = 2.3.4
com.carrotsearch.randomizedtesting.version = 2.4.0
/com.carrotsearch.randomizedtesting/junit4-ant = ${com.carrotsearch.randomizedtesting.version}
/com.carrotsearch.randomizedtesting/randomizedtesting-runner = ${com.carrotsearch.randomizedtesting.version}

View File

@ -102,7 +102,7 @@ public class BlockJoinSelector {
}
return new SortedDocValues() {
private int ord;
private int ord = -1;
private int docID = -1;
@Override
@ -168,6 +168,60 @@ public class BlockJoinSelector {
return nextDoc();
}
@Override
public boolean advanceExact(int targetParentDocID) throws IOException {
if (targetParentDocID < docID) {
throw new IllegalArgumentException("target must be after the current document: current=" + docID + " target=" + targetParentDocID);
}
int previousDocId = docID;
docID = targetParentDocID;
if (targetParentDocID == previousDocId) {
return ord != -1;
}
docID = targetParentDocID;
ord = -1;
if (parents.get(targetParentDocID) == false) {
return false;
}
int prevParentDocId = docID == 0 ? -1 : parents.prevSetBit(docID - 1);
int childDoc = values.docID();
if (childDoc <= prevParentDocId) {
childDoc = values.advance(prevParentDocId + 1);
}
if (childDoc >= docID) {
return false;
}
boolean hasValue = false;
for (int doc = values.docID(); doc < docID; doc = values.nextDoc()) {
if (children.get(doc)) {
ord = values.ordValue();
hasValue = true;
values.nextDoc();
break;
}
}
if (hasValue == false) {
return false;
}
for (int doc = values.docID(); doc < docID; doc = values.nextDoc()) {
if (children.get(doc)) {
switch (selection) {
case MIN:
ord = Math.min(ord, values.ordValue());
break;
case MAX:
ord = Math.max(ord, values.ordValue());
break;
default:
throw new AssertionError();
}
}
}
return true;
}
@Override
public int ordValue() {
return ord;
@ -287,6 +341,54 @@ public class BlockJoinSelector {
}
}
@Override
public boolean advanceExact(int targetParentDocID) throws IOException {
if (targetParentDocID <= parentDocID) {
throw new IllegalArgumentException("target must be after the current document: current=" + parentDocID + " target=" + targetParentDocID);
}
parentDocID = targetParentDocID;
if (parents.get(targetParentDocID) == false) {
return false;
}
int prevParentDocId = parentDocID == 0 ? -1 : parents.prevSetBit(parentDocID - 1);
int childDoc = values.docID();
if (childDoc <= prevParentDocId) {
childDoc = values.advance(prevParentDocId + 1);
}
if (childDoc >= parentDocID) {
return false;
}
boolean hasValue = false;
for (int doc = values.docID(); doc < parentDocID; doc = values.nextDoc()) {
if (children.get(doc)) {
value = values.longValue();
hasValue = true;
values.nextDoc();
break;
}
}
if (hasValue == false) {
return false;
}
for (int doc = values.docID(); doc < parentDocID; doc = values.nextDoc()) {
if (children.get(doc)) {
switch (selection) {
case MIN:
value = Math.min(value, values.longValue());
break;
case MAX:
value = Math.max(value, values.longValue());
break;
default:
throw new AssertionError();
}
}
}
return true;
}
@Override
public long longValue() {
return value;

View File

@ -73,6 +73,13 @@ interface GenericTermsCollector extends Collector {
return docID;
}
@Override
public boolean advanceExact(int dest) throws IOException {
boolean exists = target.advanceExact(dest);
out.println("\nadvanceExact(" + dest + ") -> exists# "+exists);
return exists;
}
@Override
public long cost() {
return target.cost();

View File

@ -149,6 +149,12 @@ public class TestBlockJoinSelector extends LuceneTestCase {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return ords[docID] != -1;
}
@Override
public int ordValue() {
assert ords[docID] != -1;
@ -256,6 +262,12 @@ public class TestBlockJoinSelector extends LuceneTestCase {
}
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return docsWithValue.get(docID);
}
@Override
public long longValue() {
return values[docID];

View File

@ -95,7 +95,7 @@ import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.packed.PackedInts;
import org.junit.Test;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
public class TestJoinUtil extends LuceneTestCase {
@ -517,7 +517,7 @@ public class TestJoinUtil extends LuceneTestCase {
Map<String, Float> lowestScoresPerParent = new HashMap<>();
Map<String, Float> highestScoresPerParent = new HashMap<>();
int numParents = RandomInts.randomIntBetween(random(), 16, 64);
int numParents = RandomNumbers.randomIntBetween(random(), 16, 64);
for (int p = 0; p < numParents; p++) {
String parentId = Integer.toString(p);
Document parentDoc = new Document();
@ -525,7 +525,7 @@ public class TestJoinUtil extends LuceneTestCase {
parentDoc.add(new StringField("type", "to", Field.Store.NO));
parentDoc.add(new SortedDocValuesField("join_field", new BytesRef(parentId)));
iw.addDocument(parentDoc);
int numChildren = RandomInts.randomIntBetween(random(), 2, 16);
int numChildren = RandomNumbers.randomIntBetween(random(), 2, 16);
int lowest = Integer.MAX_VALUE;
int highest = Integer.MIN_VALUE;
for (int c = 0; c < numChildren; c++) {
@ -589,7 +589,7 @@ public class TestJoinUtil extends LuceneTestCase {
int minChildDocsPerParent = 2;
int maxChildDocsPerParent = 16;
int numParents = RandomInts.randomIntBetween(random(), 16, 64);
int numParents = RandomNumbers.randomIntBetween(random(), 16, 64);
int[] childDocsPerParent = new int[numParents];
for (int p = 0; p < numParents; p++) {
String parentId = Integer.toString(p);
@ -598,7 +598,7 @@ public class TestJoinUtil extends LuceneTestCase {
parentDoc.add(new StringField("type", "to", Field.Store.NO));
parentDoc.add(new SortedDocValuesField("join_field", new BytesRef(parentId)));
iw.addDocument(parentDoc);
int numChildren = RandomInts.randomIntBetween(random(), minChildDocsPerParent, maxChildDocsPerParent);
int numChildren = RandomNumbers.randomIntBetween(random(), minChildDocsPerParent, maxChildDocsPerParent);
childDocsPerParent[p] = numChildren;
for (int c = 0; c < numChildren; c++) {
String childId = Integer.toString(p + c);
@ -622,11 +622,11 @@ public class TestJoinUtil extends LuceneTestCase {
Query fromQuery = new TermQuery(new Term("type", "from"));
Query toQuery = new TermQuery(new Term("type", "to"));
int iters = RandomInts.randomIntBetween(random(), 3, 9);
int iters = RandomNumbers.randomIntBetween(random(), 3, 9);
for (int i = 1; i <= iters; i++) {
final ScoreMode scoreMode = ScoreMode.values()[random().nextInt(ScoreMode.values().length)];
int min = RandomInts.randomIntBetween(random(), minChildDocsPerParent, maxChildDocsPerParent - 1);
int max = RandomInts.randomIntBetween(random(), min, maxChildDocsPerParent);
int min = RandomNumbers.randomIntBetween(random(), minChildDocsPerParent, maxChildDocsPerParent - 1);
int max = RandomNumbers.randomIntBetween(random(), min, maxChildDocsPerParent);
if (VERBOSE) {
System.out.println("iter=" + i);
System.out.println("scoreMode=" + scoreMode);
@ -1067,7 +1067,7 @@ public class TestJoinUtil extends LuceneTestCase {
);
IndexIterationContext context = new IndexIterationContext();
int numRandomValues = nDocs / RandomInts.randomIntBetween(random, 1, 4);
int numRandomValues = nDocs / RandomNumbers.randomIntBetween(random, 1, 4);
context.randomUniqueValues = new String[numRandomValues];
Set<String> trackSet = new HashSet<>();
context.randomFrom = new boolean[numRandomValues];

View File

@ -1 +0,0 @@
9f4c0e1de0837092115c89a38c12ae57db6983e7

View File

@ -0,0 +1 @@
0222eb23dd6f45541acf6a5ac69cd9e9bdce25d2

View File

@ -969,6 +969,12 @@ public class MemoryIndex {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return docID == 0;
}
@Override
public long cost() {
return 1;

View File

@ -143,6 +143,11 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
return sdv.advance(target + context.docBase);
}
@Override
public boolean advanceExact(int target) throws IOException {
return sdv.advanceExact(target + context.docBase);
}
@Override
public long cost() {
return 0;
@ -187,6 +192,10 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
return vals.advance(target);
}
@Override
public boolean advanceExact(int target) throws IOException {
return vals.advanceExact(target);
}
@Override
public long cost() {
return vals.cost();
}

View File

@ -38,7 +38,7 @@ import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.packed.PackedInts;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
public class TestDocValuesFieldSources extends LuceneTestCase {
@ -81,7 +81,7 @@ public class TestDocValuesFieldSources extends LuceneTestCase {
f.setBytesValue(new BytesRef((String) vals[i]));
break;
case NUMERIC:
final int bitsPerValue = RandomInts.randomIntBetween(random(), 1, 31); // keep it an int
final int bitsPerValue = RandomNumbers.randomIntBetween(random(), 1, 31); // keep it an int
vals[i] = (long) random().nextInt((int) PackedInts.maxValue(bitsPerValue));
f.setLongValue((Long) vals[i]);
break;

View File

@ -80,7 +80,7 @@ import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.TestUtil;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
public class TestGeo3DPoint extends LuceneTestCase {
@ -206,7 +206,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
int iters = atLeast(10);
int recurseDepth = RandomInts.randomIntBetween(random(), 5, 15);
int recurseDepth = RandomNumbers.randomIntBetween(random(), 5, 15);
iters = atLeast(50);
@ -358,7 +358,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
case 0:
// Split on X:
{
int splitValue = RandomInts.randomIntBetween(random(), cell.xMinEnc, cell.xMaxEnc);
int splitValue = RandomNumbers.randomIntBetween(random(), cell.xMinEnc, cell.xMaxEnc);
if (VERBOSE) {
log.println(" now split on x=" + splitValue);
}
@ -384,7 +384,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
case 1:
// Split on Y:
{
int splitValue = RandomInts.randomIntBetween(random(), cell.yMinEnc, cell.yMaxEnc);
int splitValue = RandomNumbers.randomIntBetween(random(), cell.yMinEnc, cell.yMaxEnc);
if (VERBOSE) {
log.println(" now split on y=" + splitValue);
}
@ -410,7 +410,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
case 2:
// Split on Z:
{
int splitValue = RandomInts.randomIntBetween(random(), cell.zMinEnc, cell.zMaxEnc);
int splitValue = RandomNumbers.randomIntBetween(random(), cell.zMinEnc, cell.zMaxEnc);
if (VERBOSE) {
log.println(" now split on z=" + splitValue);
}

View File

@ -30,7 +30,7 @@ public final class MockBytesAnalyzer extends Analyzer {
}
@Override
protected AttributeFactory attributeFactory() {
protected AttributeFactory attributeFactory(String fieldName) {
return MockUTF16TermAttributeImpl.UTF16_TERM_ATTRIBUTE_FACTORY;
}
}

View File

@ -24,7 +24,7 @@ import org.apache.lucene.codecs.TermVectorsFormat;
import org.apache.lucene.codecs.compressing.dummy.DummyCompressingCodec;
import org.apache.lucene.util.TestUtil;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
/**
* A codec that uses {@link CompressingStoredFieldsFormat} for its stored
@ -55,9 +55,9 @@ public abstract class CompressingCodec extends FilterCodec {
* suffix
*/
public static CompressingCodec randomInstance(Random random) {
final int chunkSize = random.nextBoolean() ? RandomInts.randomIntBetween(random, 1, 10) : RandomInts.randomIntBetween(random, 1, 1 << 15);
final int chunkDocs = random.nextBoolean() ? RandomInts.randomIntBetween(random, 1, 10) : RandomInts.randomIntBetween(random, 64, 1024);
final int blockSize = random.nextBoolean() ? RandomInts.randomIntBetween(random, 1, 10) : RandomInts.randomIntBetween(random, 1, 1024);
final int chunkSize = random.nextBoolean() ? RandomNumbers.randomIntBetween(random, 1, 10) : RandomNumbers.randomIntBetween(random, 1, 1 << 15);
final int chunkDocs = random.nextBoolean() ? RandomNumbers.randomIntBetween(random, 1, 10) : RandomNumbers.randomIntBetween(random, 64, 1024);
final int blockSize = random.nextBoolean() ? RandomNumbers.randomIntBetween(random, 1, 10) : RandomNumbers.randomIntBetween(random, 1, 1024);
return randomInstance(random, chunkSize, chunkDocs, false, blockSize);
}
@ -79,10 +79,10 @@ public abstract class CompressingCodec extends FilterCodec {
*/
public static CompressingCodec randomInstance(Random random, boolean withSegmentSuffix) {
return randomInstance(random,
RandomInts.randomIntBetween(random, 1, 1 << 15),
RandomInts.randomIntBetween(random, 64, 1024),
RandomNumbers.randomIntBetween(random, 1, 1 << 15),
RandomNumbers.randomIntBetween(random, 64, 1024),
withSegmentSuffix,
RandomInts.randomIntBetween(random, 1, 1024));
RandomNumbers.randomIntBetween(random, 1, 1024));
}
private final CompressingStoredFieldsFormat storedFieldsFormat;

View File

@ -398,6 +398,7 @@ public class AssertingLeafReader extends FilterLeafReader {
private final NumericDocValues in;
private final int maxDoc;
private int lastDocID = -1;
private boolean exists;
public AssertingNumericDocValues(NumericDocValues in, int maxDoc) {
this.in = in;
@ -420,6 +421,7 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == NO_MORE_DOCS || docID < maxDoc;
assert docID == in.docID();
lastDocID = docID;
exists = docID != NO_MORE_DOCS;
return docID;
}
@ -432,9 +434,22 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID >= target;
assert docID == NO_MORE_DOCS || docID < maxDoc;
lastDocID = docID;
exists = docID != NO_MORE_DOCS;
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
assertThread("Numeric doc values", creationThread);
assert target >= 0;
assert target >= in.docID();
assert target < maxDoc;
exists = in.advanceExact(target);
assert in.docID() == target;
lastDocID = target;
return exists;
}
@Override
public long cost() {
assertThread("Numeric doc values", creationThread);
@ -446,8 +461,7 @@ public class AssertingLeafReader extends FilterLeafReader {
@Override
public long longValue() throws IOException {
assertThread("Numeric doc values", creationThread);
assert in.docID() != -1;
assert in.docID() != NO_MORE_DOCS;
assert exists;
return in.longValue();
}
@ -463,6 +477,7 @@ public class AssertingLeafReader extends FilterLeafReader {
private final BinaryDocValues in;
private final int maxDoc;
private int lastDocID = -1;
private boolean exists;
public AssertingBinaryDocValues(BinaryDocValues in, int maxDoc) {
this.in = in;
@ -485,6 +500,7 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == NO_MORE_DOCS || docID < maxDoc;
assert docID == in.docID();
lastDocID = docID;
exists = docID != NO_MORE_DOCS;
return docID;
}
@ -497,9 +513,22 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID >= target;
assert docID == NO_MORE_DOCS || docID < maxDoc;
lastDocID = docID;
exists = docID != NO_MORE_DOCS;
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
assertThread("Numeric doc values", creationThread);
assert target >= 0;
assert target >= in.docID();
assert target < maxDoc;
exists = in.advanceExact(target);
assert in.docID() == target;
lastDocID = target;
return exists;
}
@Override
public long cost() {
assertThread("Binary doc values", creationThread);
@ -511,8 +540,7 @@ public class AssertingLeafReader extends FilterLeafReader {
@Override
public BytesRef binaryValue() throws IOException {
assertThread("Binary doc values", creationThread);
assert in.docID() != -1;
assert in.docID() != NO_MORE_DOCS;
assert exists;
return in.binaryValue();
}
@ -529,6 +557,7 @@ public class AssertingLeafReader extends FilterLeafReader {
private final int maxDoc;
private final int valueCount;
private int lastDocID = -1;
private boolean exists;
public AssertingSortedDocValues(SortedDocValues in, int maxDoc) {
this.in = in;
@ -551,6 +580,7 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == NO_MORE_DOCS || docID < maxDoc;
assert docID == in.docID();
lastDocID = docID;
exists = docID != NO_MORE_DOCS;
return docID;
}
@ -563,9 +593,22 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID >= target;
assert docID == NO_MORE_DOCS || docID < maxDoc;
lastDocID = docID;
exists = docID != NO_MORE_DOCS;
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
assertThread("Numeric doc values", creationThread);
assert target >= 0;
assert target >= in.docID();
assert target < maxDoc;
exists = in.advanceExact(target);
assert in.docID() == target;
lastDocID = target;
return exists;
}
@Override
public long cost() {
assertThread("Sorted doc values", creationThread);
@ -577,6 +620,7 @@ public class AssertingLeafReader extends FilterLeafReader {
@Override
public int ordValue() {
assertThread("Sorted doc values", creationThread);
assert exists;
int ord = in.ordValue();
assert ord >= -1 && ord < valueCount;
return ord;
@ -625,6 +669,7 @@ public class AssertingLeafReader extends FilterLeafReader {
private final int maxDoc;
private int lastDocID = -1;
private int valueUpto;
private boolean exists;
public AssertingSortedNumericDocValues(SortedNumericDocValues in, int maxDoc) {
this.in = in;
@ -645,6 +690,7 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == in.docID();
lastDocID = docID;
valueUpto = 0;
exists = docID != NO_MORE_DOCS;
return docID;
}
@ -659,9 +705,23 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == NO_MORE_DOCS || docID < maxDoc;
lastDocID = docID;
valueUpto = 0;
exists = docID != NO_MORE_DOCS;
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
assertThread("Numeric doc values", creationThread);
assert target >= 0;
assert target >= in.docID();
assert target < maxDoc;
exists = in.advanceExact(target);
assert in.docID() == target;
lastDocID = target;
valueUpto = 0;
return exists;
}
@Override
public long cost() {
assertThread("Sorted numeric doc values", creationThread);
@ -673,6 +733,7 @@ public class AssertingLeafReader extends FilterLeafReader {
@Override
public long nextValue() throws IOException {
assertThread("Sorted numeric doc values", creationThread);
assert exists;
assert valueUpto < in.docValueCount(): "valueUpto=" + valueUpto + " in.docValueCount()=" + in.docValueCount();
valueUpto++;
return in.nextValue();
@ -681,6 +742,7 @@ public class AssertingLeafReader extends FilterLeafReader {
@Override
public int docValueCount() {
assertThread("Sorted numeric doc values", creationThread);
assert exists;
assert in.docValueCount() > 0;
return in.docValueCount();
}
@ -693,7 +755,8 @@ public class AssertingLeafReader extends FilterLeafReader {
private final int maxDoc;
private final long valueCount;
private int lastDocID = -1;
long lastOrd = NO_MORE_ORDS;
private long lastOrd = NO_MORE_ORDS;
private boolean exists;
public AssertingSortedSetDocValues(SortedSetDocValues in, int maxDoc) {
this.in = in;
@ -717,6 +780,7 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == in.docID();
lastDocID = docID;
lastOrd = -2;
exists = docID != NO_MORE_DOCS;
return docID;
}
@ -731,9 +795,23 @@ public class AssertingLeafReader extends FilterLeafReader {
assert docID == NO_MORE_DOCS || docID < maxDoc;
lastDocID = docID;
lastOrd = -2;
exists = docID != NO_MORE_DOCS;
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
assertThread("Numeric doc values", creationThread);
assert target >= 0;
assert target >= in.docID();
assert target < maxDoc;
exists = in.advanceExact(target);
assert in.docID() == target;
lastDocID = target;
lastOrd = -2;
return exists;
}
@Override
public long cost() {
assertThread("Sorted set doc values", creationThread);
@ -746,6 +824,7 @@ public class AssertingLeafReader extends FilterLeafReader {
public long nextOrd() throws IOException {
assertThread("Sorted set doc values", creationThread);
assert lastOrd != NO_MORE_ORDS;
assert exists;
long ord = in.nextOrd();
assert ord < valueCount;
assert ord == NO_MORE_ORDS || ord > lastOrd;

View File

@ -30,6 +30,8 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.CountDownLatch;
import java.util.function.LongSupplier;
import java.util.function.Supplier;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
@ -556,7 +558,6 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
IndexReader ireader = DirectoryReader.open(directory); // read-only=true
assert ireader.leaves().size() == 1;
BinaryDocValues dv = ireader.leaves().get(0).reader().getBinaryDocValues("dv");
BytesRef scratch = new BytesRef();
for(int i=0;i<2;i++) {
Document doc2 = ireader.leaves().get(0).reader().document(i);
String expected;
@ -1185,20 +1186,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
dir.close();
}
static abstract class LongProducer {
abstract long next();
}
private void doTestNumericsVsStoredFields(final long minValue, final long maxValue) throws Exception {
doTestNumericsVsStoredFields(new LongProducer() {
@Override
long next() {
return TestUtil.nextLong(random(), minValue, maxValue);
}
});
}
private void doTestNumericsVsStoredFields(LongProducer longs) throws Exception {
private void doTestNumericsVsStoredFields(double density, LongSupplier longs) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@ -1216,8 +1204,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// for numbers of values <= 256, all storage layouts are tested
assert numDocs > 256;
for (int i = 0; i < numDocs; i++) {
if (random().nextDouble() > density) {
writer.addDocument(new Document());
continue;
}
idField.setStringValue(Integer.toString(i));
long value = longs.next();
long value = longs.getAsLong();
storedField.setStringValue(Long.toString(value));
dvField.setLongValue(value);
writer.addDocument(doc);
@ -1241,20 +1233,28 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare
DirectoryReader ir = DirectoryReader.open(dir);
TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
NumericDocValues docValues = r.getNumericDocValues("dv");
NumericDocValues docValues = DocValues.getNumeric(r, "dv");
docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
long storedValue = Long.parseLong(r.document(i).get("stored"));
assertEquals(i, docValues.nextDoc());
assertEquals(storedValue, docValues.longValue());
String storedValue = r.document(i).get("stored");
if (storedValue == null) {
assertTrue(docValues.docID() > i);
} else {
assertEquals(i, docValues.docID());
assertEquals(Long.parseLong(storedValue), docValues.longValue());
docValues.nextDoc();
}
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
dir.close();
}
private void doTestSortedNumericsVsStoredFields(LongProducer counts, LongProducer values) throws Exception {
private void doTestSortedNumericsVsStoredFields(LongSupplier counts, LongSupplier values) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@ -1268,10 +1268,10 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
Document doc = new Document();
doc.add(new StringField("id", Integer.toString(i), Field.Store.NO));
int valueCount = (int) counts.next();
int valueCount = (int) counts.getAsLong();
long valueArray[] = new long[valueCount];
for (int j = 0; j < valueCount; j++) {
long value = values.next();
long value = values.getAsLong();
valueArray[j] = value;
doc.add(new SortedNumericDocValuesField("dv", value));
}
@ -1300,6 +1300,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare
DirectoryReader ir = DirectoryReader.open(dir);
TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
SortedNumericDocValues docValues = DocValues.getSortedNumeric(r, "dv");
@ -1326,39 +1327,74 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
public void testBooleanNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestNumericsVsStoredFields(0, 1);
doTestNumericsVsStoredFields(1, () -> random().nextInt(2));
}
}
public void testSparseBooleanNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestNumericsVsStoredFields(random().nextDouble(), () -> random().nextInt(2));
}
}
public void testByteNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestNumericsVsStoredFields(Byte.MIN_VALUE, Byte.MAX_VALUE);
doTestNumericsVsStoredFields(1, () -> TestUtil.nextInt(random(), Byte.MIN_VALUE, Byte.MAX_VALUE));
}
}
public void testSparseByteNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestNumericsVsStoredFields(random().nextDouble(), () -> TestUtil.nextInt(random(), Byte.MIN_VALUE, Byte.MAX_VALUE));
}
}
public void testShortNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestNumericsVsStoredFields(Short.MIN_VALUE, Short.MAX_VALUE);
doTestNumericsVsStoredFields(1, () -> TestUtil.nextInt(random(), Short.MIN_VALUE, Short.MAX_VALUE));
}
}
public void testSparseShortNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestNumericsVsStoredFields(random().nextDouble(), () -> TestUtil.nextInt(random(), Short.MIN_VALUE, Short.MAX_VALUE));
}
}
public void testIntNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestNumericsVsStoredFields(Integer.MIN_VALUE, Integer.MAX_VALUE);
doTestNumericsVsStoredFields(1, random()::nextInt);
}
}
public void testSparseIntNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestNumericsVsStoredFields(random().nextDouble(), random()::nextInt);
}
}
public void testLongNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestNumericsVsStoredFields(Long.MIN_VALUE, Long.MAX_VALUE);
doTestNumericsVsStoredFields(1, random()::nextLong);
}
}
private void doTestBinaryVsStoredFields(int minLength, int maxLength) throws Exception {
public void testSparseLongNumericsVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestNumericsVsStoredFields(random().nextDouble(), random()::nextLong);
}
}
private void doTestBinaryVsStoredFields(double density, Supplier<byte[]> bytes) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@ -1373,15 +1409,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// index some docs
int numDocs = atLeast(300);
for (int i = 0; i < numDocs; i++) {
idField.setStringValue(Integer.toString(i));
final int length;
if (minLength == maxLength) {
length = minLength; // fixed length
} else {
length = TestUtil.nextInt(random(), minLength, maxLength);
if (random().nextDouble() > density) {
writer.addDocument(new Document());
continue;
}
byte buffer[] = new byte[length];
random().nextBytes(buffer);
idField.setStringValue(Integer.toString(i));
byte[] buffer = bytes.get();
storedField.setBytesValue(buffer);
dvField.setBytesValue(buffer);
writer.addDocument(doc);
@ -1399,28 +1432,44 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare
DirectoryReader ir = writer.getReader();
TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
BinaryDocValues docValues = r.getBinaryDocValues("dv");
BinaryDocValues docValues = DocValues.getBinary(r, "dv");
docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
BytesRef binaryValue = r.document(i).getBinaryValue("stored");
assertEquals(i, docValues.nextDoc());
assertEquals(binaryValue, docValues.binaryValue());
if (binaryValue == null) {
assertTrue(docValues.docID() > i);
} else {
assertEquals(i, docValues.docID());
assertEquals(binaryValue, docValues.binaryValue());
docValues.nextDoc();
}
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
// compare again
writer.forceMerge(1);
ir = writer.getReader();
TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
BinaryDocValues docValues = r.getBinaryDocValues("dv");
BinaryDocValues docValues = DocValues.getBinary(r, "dv");
docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
BytesRef binaryValue = r.document(i).getBinaryValue("stored");
assertEquals(i, docValues.nextDoc());
assertEquals(binaryValue, docValues.binaryValue());
if (binaryValue == null) {
assertTrue(docValues.docID() > i);
} else {
assertEquals(i, docValues.docID());
assertEquals(binaryValue, docValues.binaryValue());
docValues.nextDoc();
}
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
writer.close();
@ -1428,21 +1477,46 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
}
public void testBinaryFixedLengthVsStoredFields() throws Exception {
doTestBinaryFixedLengthVsStoredFields(1);
}
public void testSparseBinaryFixedLengthVsStoredFields() throws Exception {
doTestBinaryFixedLengthVsStoredFields(random().nextDouble());
}
private void doTestBinaryFixedLengthVsStoredFields(double density) throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
int fixedLength = TestUtil.nextInt(random(), 0, 10);
doTestBinaryVsStoredFields(fixedLength, fixedLength);
doTestBinaryVsStoredFields(density, () -> {
byte buffer[] = new byte[fixedLength];
random().nextBytes(buffer);
return buffer;
});
}
}
public void testBinaryVariableLengthVsStoredFields() throws Exception {
doTestBinaryVariableLengthVsStoredFields(1);
}
public void testSparseBinaryVariableLengthVsStoredFields() throws Exception {
doTestBinaryVariableLengthVsStoredFields(random().nextDouble());
}
public void doTestBinaryVariableLengthVsStoredFields(double density) throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestBinaryVsStoredFields(0, 10);
doTestBinaryVsStoredFields(density, () -> {
final int length = random().nextInt(10);
byte buffer[] = new byte[length];
random().nextBytes(buffer);
return buffer;
});
}
}
protected void doTestSortedVsStoredFields(int numDocs, int minLength, int maxLength) throws Exception {
protected void doTestSortedVsStoredFields(int numDocs, double density, Supplier<byte[]> bytes) throws Exception {
Directory dir = newFSDirectory(createTempDir("dvduel"));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
@ -1456,15 +1530,12 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// index some docs
for (int i = 0; i < numDocs; i++) {
idField.setStringValue(Integer.toString(i));
final int length;
if (minLength == maxLength) {
length = minLength; // fixed length
} else {
length = TestUtil.nextInt(random(), minLength, maxLength);
if (random().nextDouble() > density) {
writer.addDocument(new Document());
continue;
}
byte buffer[] = new byte[length];
random().nextBytes(buffer);
idField.setStringValue(Integer.toString(i));
byte[] buffer = bytes.get();
storedField.setBytesValue(buffer);
dvField.setBytesValue(buffer);
writer.addDocument(doc);
@ -1482,28 +1553,44 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare
DirectoryReader ir = writer.getReader();
TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
BinaryDocValues docValues = DocValues.getBinary(r, "dv");
docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
BytesRef binaryValue = r.document(i).getBinaryValue("stored");
assertEquals(i, docValues.nextDoc());
assertEquals(binaryValue, docValues.binaryValue());
if (binaryValue == null) {
assertTrue(docValues.docID() > i);
} else {
assertEquals(i, docValues.docID());
assertEquals(binaryValue, docValues.binaryValue());
docValues.nextDoc();
}
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
writer.forceMerge(1);
// compare again
ir = writer.getReader();
TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
BinaryDocValues docValues = DocValues.getBinary(r, "dv");
docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
BytesRef binaryValue = r.document(i).getBinaryValue("stored");
assertEquals(i, docValues.nextDoc());
assertEquals(binaryValue, docValues.binaryValue());
if (binaryValue == null) {
assertTrue(docValues.docID() > i);
} else {
assertEquals(i, docValues.docID());
assertEquals(binaryValue, docValues.binaryValue());
docValues.nextDoc();
}
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
writer.close();
@ -1514,17 +1601,41 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
int fixedLength = TestUtil.nextInt(random(), 1, 10);
doTestSortedVsStoredFields(atLeast(300), fixedLength, fixedLength);
doTestSortedVsStoredFields(atLeast(300), 1, fixedLength, fixedLength);
}
}
public void testSparseSortedFixedLengthVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
int fixedLength = TestUtil.nextInt(random(), 1, 10);
doTestSortedVsStoredFields(atLeast(300), random().nextDouble(), fixedLength, fixedLength);
}
}
public void testSortedVariableLengthVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedVsStoredFields(atLeast(300), 1, 10);
doTestSortedVsStoredFields(atLeast(300), 1, 1, 10);
}
}
public void testSparseSortedVariableLengthVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedVsStoredFields(atLeast(300), random().nextDouble(), 1, 10);
}
}
protected void doTestSortedVsStoredFields(int numDocs, double density, int minLength, int maxLength) throws Exception {
doTestSortedVsStoredFields(numDocs, density, () -> {
int length = TestUtil.nextInt(random(), minLength, maxLength);
byte[] buffer = new byte[length];
random().nextBytes(buffer);
return buffer;
});
}
public void testSortedSetOneValue() throws IOException {
Directory directory = newDirectory();
RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory);
@ -2001,6 +2112,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare
DirectoryReader ir = writer.getReader();
TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
SortedSetDocValues docValues = r.getSortedSetDocValues("dv");
@ -2029,6 +2141,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// compare again
ir = writer.getReader();
TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
SortedSetDocValues docValues = r.getSortedSetDocValues("dv");
@ -2067,18 +2180,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedNumericsVsStoredFields(
new LongProducer() {
@Override
long next() {
return 1;
}
},
new LongProducer() {
@Override
long next() {
return TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE);
}
}
() -> 1,
random()::nextLong
);
}
}
@ -2087,18 +2190,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedNumericsVsStoredFields(
new LongProducer() {
@Override
long next() {
return random().nextBoolean() ? 0 : 1;
}
},
new LongProducer() {
@Override
long next() {
return TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE);
}
}
() -> random().nextBoolean() ? 0 : 1,
random()::nextLong
);
}
}
@ -2107,18 +2200,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedNumericsVsStoredFields(
new LongProducer() {
@Override
long next() {
return TestUtil.nextLong(random(), 0, 50);
}
},
new LongProducer() {
@Override
long next() {
return TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE);
}
}
() -> TestUtil.nextLong(random(), 0, 50),
random()::nextLong
);
}
}
@ -2131,18 +2214,8 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedNumericsVsStoredFields(
new LongProducer() {
@Override
long next() {
return TestUtil.nextLong(random(), 0, 6);
}
},
new LongProducer() {
@Override
long next() {
return values[random().nextInt(values.length)];
}
}
() -> TestUtil.nextLong(random(), 0, 6),
() -> values[random().nextInt(values.length)]
);
}
}
@ -2198,22 +2271,31 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
}
public void testGCDCompression() throws Exception {
doTestGCDCompression(1);
}
public void testSparseGCDCompression() throws Exception {
doTestGCDCompression(random().nextDouble());
}
private void doTestGCDCompression(double density) throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
final long min = - (((long) random().nextInt(1 << 30)) << 32);
final long mul = random().nextInt() & 0xFFFFFFFFL;
final LongProducer longs = new LongProducer() {
@Override
long next() {
return min + mul * random().nextInt(1 << 20);
}
final LongSupplier longs = () -> {
return min + mul * random().nextInt(1 << 20);
};
doTestNumericsVsStoredFields(longs);
doTestNumericsVsStoredFields(density, longs);
}
}
public void testZeros() throws Exception {
doTestNumericsVsStoredFields(0, 0);
doTestNumericsVsStoredFields(1, () -> 0);
}
public void testSparseZeros() throws Exception {
doTestNumericsVsStoredFields(random().nextDouble(), () -> 0);
}
public void testZeroOrMin() throws Exception {
@ -2221,13 +2303,10 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes
// the GCD of 0 and MIN_VALUE is negative
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
final LongProducer longs = new LongProducer() {
@Override
long next() {
return random().nextBoolean() ? 0 : Long.MIN_VALUE;
}
final LongSupplier longs = () -> {
return random().nextBoolean() ? 0 : Long.MIN_VALUE;
};
doTestNumericsVsStoredFields(longs);
doTestNumericsVsStoredFields(1, longs);
}
}

View File

@ -361,6 +361,12 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return target == 0;
}
@Override
public long cost() {
return 1;
@ -414,6 +420,12 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return target == 0;
}
@Override
public long cost() {
return 1;

View File

@ -21,6 +21,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.function.LongSupplier;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
@ -59,9 +60,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
});
@ -73,9 +74,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
});
@ -86,9 +87,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return TestUtil.nextLong(r, Short.MIN_VALUE, Short.MAX_VALUE);
}
});
@ -100,9 +101,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return TestUtil.nextLong(r, Short.MIN_VALUE, Short.MAX_VALUE);
}
});
@ -113,9 +114,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return TestUtil.nextLong(r, Long.MIN_VALUE, Long.MAX_VALUE);
}
});
@ -127,9 +128,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return TestUtil.nextLong(r, Long.MIN_VALUE, Long.MAX_VALUE);
}
});
@ -140,9 +141,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
int thingToDo = r.nextInt(3);
switch (thingToDo) {
case 0: return Long.MIN_VALUE;
@ -159,9 +160,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
int thingToDo = r.nextInt(3);
switch (thingToDo) {
case 0: return Long.MIN_VALUE;
@ -177,9 +178,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextBoolean() ? 20 : 3;
}
});
@ -191,9 +192,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextBoolean() ? 20 : 3;
}
});
@ -204,9 +205,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextBoolean() ? 1000000L : -5000;
}
});
@ -218,9 +219,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextBoolean() ? 1000000L : -5000;
}
});
@ -230,9 +231,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
public void testAllZeros() throws Exception {
int iterations = atLeast(1);
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return 0;
}
});
@ -243,9 +244,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
assumeTrue("Requires sparse norms support", codecSupportsSparsity());
int iterations = atLeast(1);
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return 0;
}
});
@ -256,9 +257,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
int iterations = atLeast(1);
final Random r = random();
for (int i = 0; i < iterations; i++) {
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextInt(100) == 0 ? TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE) : 0;
}
});
@ -270,9 +271,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
final Random r = random();
for (int i = 0; i < iterations; i++) {
final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextInt(100) == 0 ? TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE) : commonValue;
}
});
@ -285,9 +286,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
final Random r = random();
for (int i = 0; i < iterations; i++) {
final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextInt(100) == 0 ? TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE) : commonValue;
}
});
@ -300,9 +301,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int i = 0; i < iterations; i++) {
final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
final long uncommonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextInt(100) == 0 ? uncommonValue : commonValue;
}
});
@ -316,9 +317,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int i = 0; i < iterations; i++) {
final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
final long uncommonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextInt(100) == 0 ? uncommonValue : commonValue;
}
});
@ -337,9 +338,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int j = 0; j < numOtherValues; ++j) {
otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
}
});
@ -358,9 +359,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int j = 0; j < numOtherValues; ++j) {
otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
}
});
@ -386,9 +387,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int j = 0; j < numOtherValues; ++j) {
otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
doTestNormsVersusDocValues(1, new LongProducer() {
doTestNormsVersusDocValues(1, new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
}
});
@ -417,9 +418,9 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
for (int j = 0; j < numOtherValues; ++j) {
otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE);
}
doTestNormsVersusDocValues(random().nextDouble(), new LongProducer() {
doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() {
@Override
long next() {
public long getAsLong() {
return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)];
}
});
@ -427,7 +428,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
}
}
private void doTestNormsVersusDocValues(double density, LongProducer longs) throws Exception {
private void doTestNormsVersusDocValues(double density, LongSupplier longs) throws Exception {
int numDocs = atLeast(500);
final FixedBitSet docsWithField = new FixedBitSet(numDocs);
final int numDocsWithField = Math.max(1, (int) (density * numDocs));
@ -445,7 +446,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
}
long norms[] = new long[numDocsWithField];
for (int i = 0; i < numDocsWithField; i++) {
norms[i] = longs.next();
norms[i] = longs.getAsLong();
}
Directory dir = newDirectory();
@ -519,10 +520,6 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
}
}
static abstract class LongProducer {
abstract long next();
}
static class CannedNormSimilarity extends Similarity {
final long norms[];
int index = 0;

View File

@ -53,7 +53,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.TestUtil;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
@ -320,7 +320,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
public void testReadSkip() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
FieldType ft = new FieldType();
@ -373,7 +373,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
public void testEmptyDocs() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
// make sure that the fact that documents might be empty is not a problem
@ -398,7 +398,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
public void testConcurrentReads() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
// make sure the readers are properly cloned
@ -486,15 +486,15 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
}
Directory dir = newDirectory();
IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
final int docCount = atLeast(200);
final byte[][][] data = new byte [docCount][][];
for (int i = 0; i < docCount; ++i) {
final int fieldCount = rarely()
? RandomInts.randomIntBetween(random(), 1, 500)
: RandomInts.randomIntBetween(random(), 1, 5);
? RandomNumbers.randomIntBetween(random(), 1, 500)
: RandomNumbers.randomIntBetween(random(), 1, 5);
data[i] = new byte[fieldCount][];
for (int j = 0; j < fieldCount; ++j) {
final int length = rarely()
@ -669,7 +669,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
// so if we get NRTCachingDir+SimpleText, we make massive stored fields and OOM (LUCENE-4484)
Directory dir = new MockDirectoryWrapper(random(), new MMapDirectory(createTempDir("testBigDocuments")));
IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
if (dir instanceof MockDirectoryWrapper) {
@ -689,12 +689,12 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
onlyStored.setIndexOptions(IndexOptions.NONE);
final Field smallField = new Field("fld", randomByteArray(random().nextInt(10), 256), onlyStored);
final int numFields = RandomInts.randomIntBetween(random(), 500000, 1000000);
final int numFields = RandomNumbers.randomIntBetween(random(), 500000, 1000000);
for (int i = 0; i < numFields; ++i) {
bigDoc1.add(smallField);
}
final Field bigField = new Field("fld", randomByteArray(RandomInts.randomIntBetween(random(), 1000000, 5000000), 2), onlyStored);
final Field bigField = new Field("fld", randomByteArray(RandomNumbers.randomIntBetween(random(), 1000000, 5000000), 2), onlyStored);
bigDoc2.add(bigField);
final int numDocs = atLeast(5);

View File

@ -22,7 +22,7 @@ import java.util.Random;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.util.Bits;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
/** Wraps a Scorer with additional checks */
final class AssertingBulkScorer extends BulkScorer {
@ -82,7 +82,7 @@ final class AssertingBulkScorer extends BulkScorer {
assert next == DocIdSetIterator.NO_MORE_DOCS;
return DocIdSetIterator.NO_MORE_DOCS;
} else {
return RandomInts.randomIntBetween(random, max, next);
return RandomNumbers.randomIntBetween(random, max, next);
}
}

View File

@ -18,7 +18,7 @@ package org.apache.lucene.search;
import java.io.IOException;
import java.util.Random;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
@ -185,7 +185,7 @@ public class RandomApproximationQuery extends Query {
if (disi.docID() == NO_MORE_DOCS) {
return doc = NO_MORE_DOCS;
}
return doc = RandomInts.randomIntBetween(random, target, disi.docID());
return doc = RandomNumbers.randomIntBetween(random, target, disi.docID());
}
@Override

View File

@ -125,7 +125,7 @@ public final class RunListenerPrintReproduceInfo extends RunListener {
/** print some useful debugging information about the environment */
private static void printDebuggingInformation() {
if (classEnvRule != null) {
if (classEnvRule != null && classEnvRule.isInitialized()) {
System.err.println("NOTE: test params are: codec=" + classEnvRule.codec +
", sim=" + classEnvRule.similarity +
", locale=" + classEnvRule.locale.toLanguageTag() +
@ -176,7 +176,7 @@ public final class RunListenerPrintReproduceInfo extends RunListener {
// Environment.
if (!TEST_LINE_DOCS_FILE.equals(DEFAULT_LINE_DOCS_FILE)) addVmOpt(b, "tests.linedocsfile", TEST_LINE_DOCS_FILE);
if (classEnvRule != null) {
if (classEnvRule != null && classEnvRule.isInitialized()) {
addVmOpt(b, "tests.locale", classEnvRule.locale.toLanguageTag());
if (classEnvRule.timeZone != null) {
addVmOpt(b, "tests.timezone", classEnvRule.timeZone.getID());

View File

@ -72,13 +72,17 @@ final class TestRuleSetupAndRestoreClassEnv extends AbstractBeforeAfterRule {
Similarity similarity;
Codec codec;
/**
* Indicates whether the rule has executed its {@link #before()} method fully.
*/
private boolean initialized;
/**
* @see SuppressCodecs
*/
HashSet<String> avoidCodecs;
static class ThreadNameFixingPrintStreamInfoStream extends PrintStreamInfoStream {
public ThreadNameFixingPrintStreamInfoStream(PrintStream out) {
super(out);
}
@ -100,6 +104,10 @@ final class TestRuleSetupAndRestoreClassEnv extends AbstractBeforeAfterRule {
}
}
public boolean isInitialized() {
return initialized;
}
@Override
protected void before() throws Exception {
// enable this by default, for IDE consistency with ant tests (as it's the default from ant)
@ -113,7 +121,6 @@ final class TestRuleSetupAndRestoreClassEnv extends AbstractBeforeAfterRule {
if (VERBOSE) {
System.out.println("Loaded codecs: " + Codec.availableCodecs());
System.out.println("Loaded postingsFormats: " + PostingsFormat.availablePostingsFormats());
}
savedInfoStream = InfoStream.getDefault();
@ -235,6 +242,8 @@ final class TestRuleSetupAndRestoreClassEnv extends AbstractBeforeAfterRule {
}
LuceneTestCase.setLiveIWCFlushMode(flushMode);
initialized = true;
}
/**

View File

@ -100,7 +100,7 @@ import org.apache.lucene.store.NoLockFactory;
import org.apache.lucene.store.RAMDirectory;
import org.junit.Assert;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
/**
@ -429,7 +429,7 @@ public final class TestUtil {
/** start and end are BOTH inclusive */
public static int nextInt(Random r, int start, int end) {
return RandomInts.randomIntBetween(r, start, end);
return RandomNumbers.randomIntBetween(r, start, end);
}
/** start and end are BOTH inclusive */
@ -580,7 +580,7 @@ public final class TestUtil {
final StringBuilder regexp = new StringBuilder(maxLength);
for (int i = nextInt(r, 0, maxLength); i > 0; i--) {
if (r.nextBoolean()) {
regexp.append((char) RandomInts.randomIntBetween(r, 'a', 'z'));
regexp.append((char) RandomNumbers.randomIntBetween(r, 'a', 'z'));
} else {
regexp.append(RandomPicks.randomFrom(r, ops));
}

View File

@ -36,7 +36,7 @@ import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.store.Directory;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTestCase {
@ -52,7 +52,7 @@ public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTes
public void testDeletePartiallyWrittenFilesIfAbort() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
iwConf.setCodec(CompressingCodec.randomInstance(random()));
// disable CFS because this test checks file names
iwConf.setMergePolicy(newLogMergePolicy(false));

View File

@ -25,7 +25,8 @@ Upgrading from Solr 6.x
SolrHttpClientBuilder rather than an HttpClientConfigurer.
* HttpClientUtil now allows configuring HttpClient instances via SolrHttpClientBuilder
rather than an HttpClientConfigurer.
rather than an HttpClientConfigurer. Use of env variable SOLR_AUTHENTICATION_CLIENT_CONFIGURER
no longer works, please use SOLR_AUTHENTICATION_CLIENT_BUILDER
* SolrClient implementations now use their own internal configuration for socket timeouts,
connect timeouts, and allowing redirects rather than what is set as the default when
@ -56,6 +57,8 @@ Optimizations
check on every request and move connection lifecycle management towards the client.
(Ryan Zezeski, Mark Miller, Shawn Heisey, Steve Davids)
* SOLR-9255: Rename SOLR_AUTHENTICATION_CLIENT_CONFIGURER -> SOLR_AUTHENTICATION_CLIENT_BUILDER (janhoy)
* SOLR-9579: Make Solr's SchemaField implement Lucene's IndexableFieldType, removing the
creation of a Lucene FieldType every time a field is indexed. (John Call, yonik)
@ -95,7 +98,7 @@ Upgrade Notes
* The create/deleteCollection methods on MiniSolrCloudCluster have been
deprecated. Clients should instead use the CollectionAdminRequest API. In
addition, MiniSolrCloudCluster#uploadConfigSet(File, String) has been
addition, MiniSolrCloudCluster#uploadConfigDir(File, String) has been
deprecated in favour of #uploadConfigSet(Path, String)
* The bin/solr.in.sh (bin/solr.in.cmd on Windows) is now completely commented by default. Previously, this wasn't so,
@ -149,6 +152,27 @@ New Features
* SOLR-8370: Display configured Similarity in Schema-Browser, both global/default and per-field/field-type
(janhoy, Alexandre Rafalovitch)
* SOLR-9326: Ability to create/delete/list snapshots at collection level.
(Hrishikesh Gadre via yonik)
* SOLR-9662: New parameter -u <user:pass> in bin/post to pass basicauth credentials (janhoy)
* SOLR-9654: Add "overrequest" parameter to JSON Facet API to control amount of overrequest
on a distributed terms facet. (yonik)
* SOLR-9481: Authentication and Authorization plugins now work in standalone mode if security.json is placed in
SOLR_HOME on every node. Editing config through API is supported but affects only that one node.
(janhoy)
* SOLR-2212: Add a factory class corresponding to Lucene's NoMergePolicy. (Lance Norskog, Cao Manh Dat via shalin)
* SOLR-9670: Support SOLR_AUTHENTICATION_OPTS in solr.cmd (janhoy)
* SOLR-9559: Add ExecutorStream to execute stored Streaming Expressions (Joel Bernstein)
* SOLR-1085: Add support for MoreLikeThis queries and responses in SolrJ client.
(Maurice Jumelet, Bill Mitchell, Cao Manh Dat via shalin)
Bug Fixes
----------------------
@ -195,6 +219,28 @@ Bug Fixes
* SOLR-9325: solr.log is now written to $SOLR_LOGS_DIR without changing log4j.properties (janhoy)
* SOLR-9518: Kerberos Delegation Tokens don't work without a chrooted ZK (Ishan Chattopadhyaya,via noble)
* SOLR-9687: Fixed Interval Facet count issue in cases of open/close intervals on the same values
(Andy Chillrud, Tomás Fernández Löbbe)
* SOLR-9441: Solr collection backup on HDFS can only be manipulated by the Solr process owner.
(Hrishikesh Gadre via Mark Miller)
* SOLR-9536: OldBackupDirectory timestamp field needs to be initialized to avoid NPE.
(Hrishikesh Gadre, hossman via Mark Miller)
* SOLR-2039: Multivalued fields with dynamic names does not work properly with DIH.
(K A, ruslan.shv, Cao Manh Dat via shalin)
* SOLR-4164: group.limit=-1 was not supported for grouping in distributed mode.
(Cao Manh Dat, Lance Norskog, Webster Homer, hossman, yonik)
* SOLR-9692: blockUnknown property makes inter-node communication impossible (noble)
* SOLR-2094: XPathEntityProcessor should reinitialize the XPathRecordReader instance if
the 'forEach' or 'xpath' attributes are templates & it is not a root entity (Cao Manh Dat, noble)
Optimizations
----------------------
@ -220,6 +266,12 @@ Optimizations
* SOLR-9566: Don't put replicas into recovery when first creating a Collection
(Alan Woodward)
* SOLR-9546: Eliminate unnecessary boxing/unboxing going on in SolrParams (Pushkar Raste, noble)
* SOLR-9506: cache IndexFingerprint for each segment (Pushkar Raste, yonik, noble)
* SOLR-7506: Roll over GC logs by default via bin/solr scripts (shalin, janhoy)
Other Changes
----------------------
@ -297,6 +349,8 @@ Other Changes
* SOLR-9610: New AssertTool in SolrCLI for easier cross platform assertions from command line (janhoy)
* SOLR-9680: Better error messages in SolrCLI when authentication required (janhoy)
* SOLR-9639: Test only fix. Prevent CDCR tests from removing collection during recovery that used to blow up jvm (Mikhail Khludnev)
* SOLR-9625: Add HelloWorldSolrCloudTestCase class (Christine Poerschke, Alan Woodward, Alexandre Rafalovitch)
@ -319,6 +373,15 @@ Other Changes
Last JVM garbage collection log solr_gc.log is moved into $SOLR_LOGS_DIR/archived/
(janhoy)
* SOLR-4531: Add tests to ensure that recovery does not fail on corrupted tlogs.
(Simon Scofield, Cao Manh Dat via shalin)
* SOLR-5245: Add a test to ensure that election contexts are keyed off both collection name and coreNodeName
so that killing a shard in one collection does not result in leader election in a different collection.
See SOLR-5243 for the related bug. (Cao Manh Dat via shalin)
* SOLR-9533: Reload core config when a core is reloaded (Gethin James, Joel Bernstein)
================== 6.2.1 ==================
Bug Fixes
@ -3271,7 +3334,7 @@ Bug Fixes
while accessing other collections. (Shai Erera)
* SOLR-7412: Fixed range.facet.other parameter for distributed requests.
(Will Miller, Tomás Fernándes Löbbe)
(Will Miller, Tomás Fernández Löbbe)
* SOLR-6087: SolrIndexSearcher makes no DelegatingCollector.finish() call when IndexSearcher
throws an expected exception. (Christine Poerschke via shalin)

View File

@ -344,8 +344,6 @@ if [ -f "$SOLR_VAR_DIR/log4j.properties" ]; then
echo -e "\n$SOLR_VAR_DIR/log4j.properties already exists. Skipping install ...\n"
else
cp "$SOLR_INSTALL_DIR/server/resources/log4j.properties" "$SOLR_VAR_DIR/log4j.properties"
sed_expr="s#solr.log=.*#solr.log=\${solr.solr.home}/../logs#"
sed -i -e "$sed_expr" "$SOLR_VAR_DIR/log4j.properties"
fi
chown -R "$SOLR_USER:" "$SOLR_VAR_DIR"
find "$SOLR_VAR_DIR" -type d -print0 | xargs -0 chmod 0750

View File

@ -68,6 +68,7 @@ function print_usage() {
echo " -host <host> (default: localhost)"
echo " -p or -port <port> (default: 8983)"
echo " -commit yes|no (default: yes)"
echo " -u or -user <user:pass> (sets BasicAuth credentials)"
# optimize intentionally omitted, but can be used as '-optimize yes' (default: no)
echo ""
echo " Web crawl options:"
@ -155,13 +156,23 @@ while [ $# -gt 0 ]; do
ARGS+=("<add/>")
fi
fi
else
key="${1:1}"
elif [[ ("$1" == "-u" || "$1" == "-user") ]]; then
shift
# echo "$1: PROP"
PROPS+=("-D$key=$1")
if [[ "$key" == "url" ]]; then
SOLR_URL=$1
PROPS+=("-Dbasicauth=$1")
else
if [[ "$1" == -D* ]] ; then
PROPS+=("$1")
if [[ "${1:2:4}" == "url=" ]]; then
SOLR_URL=${1:6}
fi
else
key="${1:1}"
shift
# echo "$1: PROP"
PROPS+=("-D$key=$1")
if [[ "$key" == "url" ]]; then
SOLR_URL=$1
fi
fi
fi
else

View File

@ -178,9 +178,13 @@ fi
# Authentication options
if [ "$SOLR_AUTHENTICATION_CLIENT_CONFIGURER" != "" ]; then
AUTHC_CLIENT_CONFIGURER_ARG="-Dsolr.authentication.httpclient.configurer=$SOLR_AUTHENTICATION_CLIENT_CONFIGURER"
echo "WARNING: Found unsupported configuration variable SOLR_AUTHENTICATION_CLIENT_CONFIGURER"
echo " Please start using SOLR_AUTHENTICATION_CLIENT_BUILDER instead"
fi
AUTHC_OPTS="$AUTHC_CLIENT_CONFIGURER_ARG $SOLR_AUTHENTICATION_OPTS"
if [ "$SOLR_AUTHENTICATION_CLIENT_BUILDER" != "" ]; then
AUTHC_CLIENT_BUILDER_ARG="-Dsolr.authentication.httpclient.builder=$SOLR_AUTHENTICATION_CLIENT_BUILDER"
fi
AUTHC_OPTS="$AUTHC_CLIENT_BUILDER_ARG $SOLR_AUTHENTICATION_OPTS"
# Set the SOLR_TOOL_HOST variable for use when connecting to a running Solr instance
if [ "$SOLR_HOST" != "" ]; then
@ -1407,13 +1411,14 @@ if [ -z ${GC_LOG_OPTS+x} ]; then
else
GC_LOG_OPTS=($GC_LOG_OPTS)
fi
# if verbose gc logging enabled, setup the location of the log file
# if verbose gc logging enabled, setup the location of the log file and rotation
if [ "$GC_LOG_OPTS" != "" ]; then
gc_log_flag="-Xloggc"
if [ "$JAVA_VENDOR" == "IBM J9" ]; then
gc_log_flag="-Xverbosegclog"
fi
GC_LOG_OPTS+=("$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log")
GC_LOG_OPTS+=("$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M)
fi
# If ZK_HOST is defined, the assume SolrCloud mode

View File

@ -56,6 +56,16 @@ IF DEFINED SOLR_SSL_KEY_STORE (
set SOLR_SSL_OPTS=
)
REM Authentication options
IF DEFINED SOLR_AUTHENTICATION_CLIENT_CONFIGURER (
echo WARNING: Found unsupported configuration variable SOLR_AUTHENTICATION_CLIENT_CONFIGURER
echo Please start using SOLR_AUTHENTICATION_CLIENT_BUILDER instead
)
IF DEFINED SOLR_AUTHENTICATION_CLIENT_BUILDER (
set AUTHC_CLIENT_BUILDER_ARG="-Dsolr.authentication.httpclient.builder=%SOLR_AUTHENTICATION_CLIENT_BUILDER%"
)
set "AUTHC_OPTS=%AUTHC_CLIENT_BUILDER_ARG% %SOLR_AUTHENTICATION_OPTS%"
REM Set the SOLR_TOOL_HOST variable for use when connecting to a running Solr instance
IF NOT "%SOLR_HOST%"=="" (
set "SOLR_TOOL_HOST=%SOLR_HOST%"
@ -1013,23 +1023,23 @@ IF NOT EXIST "%SOLR_SERVER_DIR%\tmp" (
)
IF "%JAVA_VENDOR%" == "IBM J9" (
set "GCLOG_OPT=-Xverbosegclog"
set GCLOG_OPT="-Xverbosegclog:!SOLR_LOGS_DIR!\solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M
) else (
set "GCLOG_OPT=-Xloggc"
set GCLOG_OPT="-Xloggc:!SOLR_LOGS_DIR!\solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M
)
IF "%FG%"=="1" (
REM run solr in the foreground
title "Solr-%SOLR_PORT%"
echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port
"%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" ^
"%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT% ^
-Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
-Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^
-Djetty.host=%SOLR_JETTY_HOST% -Djetty.port=%SOLR_PORT% -Djetty.home="%SOLR_SERVER_DIR%" ^
-Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar "%SOLR_JETTY_CONFIG%"
) ELSE (
START /B "Solr-%SOLR_PORT%" /D "%SOLR_SERVER_DIR%" ^
"%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" ^
"%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT% ^
-Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
-Dsolr.log.muteconsole ^
-Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^
@ -1038,7 +1048,7 @@ IF "%FG%"=="1" (
echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port
REM now wait to see Solr come online ...
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI status -maxWaitSecs 30 -solr !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:%SOLR_PORT%/solr
@ -1049,9 +1059,11 @@ goto done
:run_example
REM Run the requested example
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI run_example -script "%SDIR%\solr.cmd" -e %EXAMPLE% -d "%SOLR_SERVER_DIR%" -urlScheme !SOLR_URL_SCHEME! !PASS_TO_RUN_EXAMPLE!
org.apache.solr.util.SolrCLI run_example -script "%SDIR%\solr.cmd" -e %EXAMPLE% -d "%SOLR_SERVER_DIR%" ^
-urlScheme !SOLR_URL_SCHEME! !PASS_TO_RUN_EXAMPLE!
REM End of run_example
goto done
@ -1069,7 +1081,8 @@ for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port
@echo.
set has_info=1
echo Found Solr process %%k running on port !SOME_SOLR_PORT!
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI status -solr !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!SOME_SOLR_PORT!/solr
@echo.
@ -1108,13 +1121,15 @@ goto parse_healthcheck_args
:run_healthcheck
IF NOT DEFINED HEALTHCHECK_COLLECTION goto healthcheck_usage
IF NOT DEFINED HEALTHCHECK_ZK_HOST set "HEALTHCHECK_ZK_HOST=localhost:9983"
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI healthcheck -collection !HEALTHCHECK_COLLECTION! -zkHost !HEALTHCHECK_ZK_HOST!
goto done
:run_assert
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI %*
if errorlevel 1 (
@ -1123,14 +1138,16 @@ if errorlevel 1 (
goto done
:get_version
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI version
goto done
:run_utils
set "TOOL_CMD=%~1"
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI utils -s "%DEFAULT_SERVER_DIR%" -l "%SOLR_LOGS_DIR%" %TOOL_CMD%
if errorlevel 1 (
@ -1222,15 +1239,18 @@ if "!CREATE_PORT!"=="" (
)
if "%SCRIPT_CMD%"=="create_core" (
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI create_core -name !CREATE_NAME! -solrUrl !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!CREATE_PORT!/solr ^
-confdir !CREATE_CONFDIR! -configsetsDir "%SOLR_TIP%\server\solr\configsets"
) else (
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI create -name !CREATE_NAME! -shards !CREATE_NUM_SHARDS! -replicationFactor !CREATE_REPFACT! ^
-confname !CREATE_CONFNAME! -confdir !CREATE_CONFDIR! -configsetsDir "%SOLR_TIP%\server\solr\configsets" -solrUrl !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!CREATE_PORT!/solr
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI create -name !CREATE_NAME! -shards !CREATE_NUM_SHARDS! -replicationFactor !CREATE_REPFACT! ^
-confname !CREATE_CONFNAME! -confdir !CREATE_CONFDIR! -configsetsDir "%SOLR_TIP%\server\solr\configsets" ^
-solrUrl !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!CREATE_PORT!/solr
)
goto done
@ -1293,7 +1313,8 @@ if "!DELETE_CONFIG!"=="" (
set DELETE_CONFIG=true
)
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI delete -name !DELETE_NAME! -deleteConfig !DELETE_CONFIG! ^
-solrUrl !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!DELETE_PORT!/solr
@ -1420,9 +1441,11 @@ IF "!ZK_OP!"=="upconfig" (
set ERROR_MSG="The -d option must be set for upconfig."
goto zk_short_usage
)
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -confname !CONFIGSET_NAME! -confdir !CONFIGSET_DIR! -zkHost !ZK_HOST! -configsetsDir "%SOLR_TIP%/server/solr/configsets"
org.apache.solr.util.SolrCLI !ZK_OP! -confname !CONFIGSET_NAME! -confdir !CONFIGSET_DIR! -zkHost !ZK_HOST! ^
-configsetsDir "%SOLR_TIP%/server/solr/configsets"
) ELSE IF "!ZK_OP!"=="downconfig" (
IF "!CONFIGSET_NAME!"=="" (
set ERROR_MSG="-n option must be set for downconfig"
@ -1432,7 +1455,8 @@ IF "!ZK_OP!"=="upconfig" (
set ERROR_MSG="The -d option must be set for downconfig."
goto zk_short_usage
)
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -confname !CONFIGSET_NAME! -confdir !CONFIGSET_DIR! -zkHost !ZK_HOST!
) ELSE IF "!ZK_OP!"=="cp" (
@ -1450,7 +1474,8 @@ IF "!ZK_OP!"=="upconfig" (
goto zk_short_usage
)
)
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -zkHost !ZK_HOST! -src !ZK_SRC! -dst !ZK_DST! -recurse !ZK_RECURSE!
) ELSE IF "!ZK_OP!"=="mv" (
@ -1462,7 +1487,8 @@ IF "!ZK_OP!"=="upconfig" (
set ERROR_MSG="<dest> must be specified for 'mv' command"
goto zk_short_usage
)
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -zkHost !ZK_HOST! -src !ZK_SRC! -dst !ZK_DST!
) ELSE IF "!ZK_OP!"=="rm" (
@ -1470,7 +1496,8 @@ IF "!ZK_OP!"=="upconfig" (
set ERROR_MSG="Zookeeper path to remove must be specified when using the 'rm' command"
goto zk_short_usage
)
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -zkHost !ZK_HOST! -path !ZK_SRC! -recurse !ZK_RECURSE!
) ELSE IF "!ZK_OP!"=="ls" (
@ -1478,7 +1505,8 @@ IF "!ZK_OP!"=="upconfig" (
set ERROR_MSG="Zookeeper path to remove must be specified when using the 'rm' command"
goto zk_short_usage
)
"%JAVA%" %SOLR_SSL_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% -Dsolr.install.dir="%SOLR_TIP%" ^
-Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^
-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
org.apache.solr.util.SolrCLI !ZK_OP! -zkHost !ZK_HOST! -path !ZK_SRC! -recurse !ZK_RECURSE!
) ELSE (

View File

@ -98,6 +98,10 @@ REM set SOLR_SSL_CLIENT_KEY_STORE_PASSWORD=
REM set SOLR_SSL_CLIENT_TRUST_STORE=
REM setSOLR_SSL_CLIENT_TRUST_STORE_PASSWORD=
REM Settings for authentication
REM set SOLR_AUTHENTICATION_CLIENT_BUILDER=
REM set SOLR_AUTHENTICATION_OPTS="-Dbasicauth=solr:SolrRocks"
REM Settings for ZK ACL
REM set SOLR_ZK_CREDS_AND_ACLS=-DzkACLProvider=org.apache.solr.common.cloud.VMParamsAllAndReadonlyDigestZkACLProvider ^
REM -DzkCredentialsProvider=org.apache.solr.common.cloud.VMParamsSingleSetCredentialsDigestZkCredentialsProvider ^

Some files were not shown because too many files have changed in this diff Show More