mirror of https://github.com/apache/lucene.git
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr
This commit is contained in:
commit
1cd859713b
|
@ -238,7 +238,6 @@
|
||||||
<quiet>true</quiet>
|
<quiet>true</quiet>
|
||||||
<additionalparam>-Xdoclint:all</additionalparam>
|
<additionalparam>-Xdoclint:all</additionalparam>
|
||||||
<additionalparam>-Xdoclint:-missing</additionalparam>
|
<additionalparam>-Xdoclint:-missing</additionalparam>
|
||||||
<additionalparam>-proc:none</additionalparam>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
|
|
@ -38,6 +38,9 @@ API Changes
|
||||||
* LUCENE-8113: TermContext has been renamed to TermStates, and can now be
|
* LUCENE-8113: TermContext has been renamed to TermStates, and can now be
|
||||||
constructed lazily if term statistics are not required (Alan Woodward)
|
constructed lazily if term statistics are not required (Alan Woodward)
|
||||||
|
|
||||||
|
* LUCENE-8242: Deprecated method IndexSearcher#createNormalizedWeight() has
|
||||||
|
been removed (Alan Woodward)
|
||||||
|
|
||||||
Changes in Runtime Behavior
|
Changes in Runtime Behavior
|
||||||
|
|
||||||
* LUCENE-7837: Indices that were created before the previous major version
|
* LUCENE-7837: Indices that were created before the previous major version
|
||||||
|
@ -95,6 +98,12 @@ Optimizations
|
||||||
|
|
||||||
======================= Lucene 7.4.0 =======================
|
======================= Lucene 7.4.0 =======================
|
||||||
|
|
||||||
|
API Changes
|
||||||
|
|
||||||
|
* LUCENE-8242: IndexSearcher.createNormalizedWeight() has been deprecated.
|
||||||
|
Instead use IndexSearcher.createWeight(), rewriting the query first.
|
||||||
|
(Alan Woodward)
|
||||||
|
|
||||||
New Features
|
New Features
|
||||||
|
|
||||||
* LUCENE-8200: Allow doc-values to be updated atomically together
|
* LUCENE-8200: Allow doc-values to be updated atomically together
|
||||||
|
@ -121,6 +130,10 @@ New Features
|
||||||
to selectively carry over soft_deleted document across merges for retention
|
to selectively carry over soft_deleted document across merges for retention
|
||||||
policies (Simon Willnauer, Mike McCandless, Robert Muir)
|
policies (Simon Willnauer, Mike McCandless, Robert Muir)
|
||||||
|
|
||||||
|
* LUCENE-8237: Add a SoftDeletesDirectoryReaderWrapper that allows to respect
|
||||||
|
soft deletes if the reader is opened form a directory. (Simon Willnauer,
|
||||||
|
Mike McCandless, Uwe Schindler, Adrien Grand)
|
||||||
|
|
||||||
Bug Fixes
|
Bug Fixes
|
||||||
|
|
||||||
* LUCENE-8234: Fixed bug in how spatial relationship is computed for
|
* LUCENE-8234: Fixed bug in how spatial relationship is computed for
|
||||||
|
@ -129,6 +142,10 @@ Bug Fixes
|
||||||
* LUCENE-8236: Filter duplicated points when creating GeoPath shapes to
|
* LUCENE-8236: Filter duplicated points when creating GeoPath shapes to
|
||||||
avoid creation of bogus planes. (Ignacio Vera)
|
avoid creation of bogus planes. (Ignacio Vera)
|
||||||
|
|
||||||
|
* LUCENE-8243: IndexWriter.addIndexes(Directory[]) did not properly preserve
|
||||||
|
index file names for updated doc values fields (Simon Willnauer,
|
||||||
|
Michael McCandless, Nhat Nguyen)
|
||||||
|
|
||||||
Other
|
Other
|
||||||
|
|
||||||
* LUCENE-8228: removed obsolete IndexDeletionPolicy clone() requirements from
|
* LUCENE-8228: removed obsolete IndexDeletionPolicy clone() requirements from
|
||||||
|
|
|
@ -37,3 +37,9 @@ Changing index options on the fly is now going to result into an
|
||||||
IllegalArgumentException. If a field is indexed
|
IllegalArgumentException. If a field is indexed
|
||||||
(FieldType.indexOptions() != IndexOptions.NONE) then all documents must have
|
(FieldType.indexOptions() != IndexOptions.NONE) then all documents must have
|
||||||
the same index options for that field.
|
the same index options for that field.
|
||||||
|
|
||||||
|
|
||||||
|
## IndexSearcher.createNormalizedWeight() removed (LUCENE-8242) ##
|
||||||
|
|
||||||
|
Instead use IndexSearcher.createWeight(), rewriting the query first, and using
|
||||||
|
a boost of 1f.
|
||||||
|
|
|
@ -21,10 +21,12 @@ import java.util.Random;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
|
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* test ICUTokenizer with dictionary-based CJ segmentation
|
* test ICUTokenizer with dictionary-based CJ segmentation
|
||||||
*/
|
*/
|
||||||
|
@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/LUCENE-8222")
|
||||||
public class TestICUTokenizerCJK extends BaseTokenStreamTestCase {
|
public class TestICUTokenizerCJK extends BaseTokenStreamTestCase {
|
||||||
Analyzer a;
|
Analyzer a;
|
||||||
|
|
||||||
|
|
|
@ -685,7 +685,8 @@ class FrozenBufferedUpdates {
|
||||||
}
|
}
|
||||||
final IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
final IndexSearcher searcher = new IndexSearcher(readerContext.reader());
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
final Weight weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES);
|
query = searcher.rewrite(query);
|
||||||
|
final Weight weight = searcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
final Scorer scorer = weight.scorer(readerContext);
|
final Scorer scorer = weight.scorer(readerContext);
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
final DocIdSetIterator it = scorer.iterator();
|
final DocIdSetIterator it = scorer.iterator();
|
||||||
|
|
|
@ -3207,8 +3207,10 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||||
info.info.getDiagnostics(), info.info.getId(), info.info.getAttributes(), info.info.getIndexSort());
|
info.info.getDiagnostics(), info.info.getId(), info.info.getAttributes(), info.info.getIndexSort());
|
||||||
SegmentCommitInfo newInfoPerCommit = new SegmentCommitInfo(newInfo, info.getDelCount(), info.getDelGen(),
|
SegmentCommitInfo newInfoPerCommit = new SegmentCommitInfo(newInfo, info.getDelCount(), info.getDelGen(),
|
||||||
info.getFieldInfosGen(), info.getDocValuesGen());
|
info.getFieldInfosGen(), info.getDocValuesGen());
|
||||||
|
|
||||||
newInfo.setFiles(info.files());
|
newInfo.setFiles(info.info.files());
|
||||||
|
newInfoPerCommit.setFieldInfosFiles(info.getFieldInfosFiles());
|
||||||
|
newInfoPerCommit.setDocValuesUpdatesFiles(info.getDocValuesUpdatesFiles());
|
||||||
|
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
|
|
||||||
|
@ -3228,7 +3230,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert copiedFiles.equals(newInfoPerCommit.files());
|
assert copiedFiles.equals(newInfoPerCommit.files()): "copiedFiles=" + copiedFiles + " vs " + newInfoPerCommit.files();
|
||||||
|
|
||||||
return newInfoPerCommit;
|
return newInfoPerCommit;
|
||||||
}
|
}
|
||||||
|
@ -3569,6 +3571,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||||
return seqNo;
|
return seqNo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("try")
|
||||||
private final void finishCommit() throws IOException {
|
private final void finishCommit() throws IOException {
|
||||||
|
|
||||||
boolean commitCompleted = false;
|
boolean commitCompleted = false;
|
||||||
|
|
|
@ -73,7 +73,7 @@ final class PendingSoftDeletes extends PendingDeletes {
|
||||||
this.pendingDeleteCount = 0;
|
this.pendingDeleteCount = 0;
|
||||||
} else {
|
} else {
|
||||||
assert info.info.maxDoc() > 0 : "maxDoc is 0";
|
assert info.info.maxDoc() > 0 : "maxDoc is 0";
|
||||||
applyUpdates(iterator);
|
pendingDeleteCount += applySoftDeletes(iterator, getMutableBits());
|
||||||
}
|
}
|
||||||
dvGeneration = info.getDocValuesGen();
|
dvGeneration = info.getDocValuesGen();
|
||||||
}
|
}
|
||||||
|
@ -94,19 +94,26 @@ final class PendingSoftDeletes extends PendingDeletes {
|
||||||
hardDeletes.reset();
|
hardDeletes.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void applyUpdates(DocIdSetIterator iterator) throws IOException {
|
/**
|
||||||
final MutableBits mutableBits = getMutableBits();
|
* Clears all bits in the given bitset that are set and are also in the given DocIdSetIterator.
|
||||||
|
*
|
||||||
|
* @param iterator the doc ID set iterator for apply
|
||||||
|
* @param bits the bit set to apply the deletes to
|
||||||
|
* @return the number of bits changed by this function
|
||||||
|
*/
|
||||||
|
static int applySoftDeletes(DocIdSetIterator iterator, MutableBits bits) throws IOException {
|
||||||
|
assert iterator != null;
|
||||||
int newDeletes = 0;
|
int newDeletes = 0;
|
||||||
int docID;
|
int docID;
|
||||||
while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
if (mutableBits.get(docID)) { // doc is live - clear it
|
if (bits.get(docID)) { // doc is live - clear it
|
||||||
mutableBits.clear(docID);
|
bits.clear(docID);
|
||||||
newDeletes++;
|
newDeletes++;
|
||||||
// now that we know we deleted it and we fully control the hard deletes we can do correct accounting
|
// now that we know we deleted it and we fully control the hard deletes we can do correct accounting
|
||||||
// below.
|
// below.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pendingDeleteCount += newDeletes;
|
return newDeletes;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -118,7 +125,7 @@ final class PendingSoftDeletes extends PendingDeletes {
|
||||||
subs[i] = updatesToApply.get(i).iterator();
|
subs[i] = updatesToApply.get(i).iterator();
|
||||||
}
|
}
|
||||||
DocValuesFieldUpdates.Iterator iterator = DocValuesFieldUpdates.mergedIterator(subs);
|
DocValuesFieldUpdates.Iterator iterator = DocValuesFieldUpdates.mergedIterator(subs);
|
||||||
applyUpdates(new DocIdSetIterator() {
|
pendingDeleteCount += applySoftDeletes(new DocIdSetIterator() {
|
||||||
int docID = -1;
|
int docID = -1;
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
|
@ -139,7 +146,7 @@ final class PendingSoftDeletes extends PendingDeletes {
|
||||||
public long cost() {
|
public long cost() {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
});
|
}, getMutableBits());
|
||||||
dvGeneration = info.getDocValuesGen();
|
dvGeneration = info.getDocValuesGen();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,177 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
|
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||||
|
import org.apache.lucene.util.Bits;
|
||||||
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This reader filters out documents that have a doc values value in the given field and treat these
|
||||||
|
* documents as soft deleted. Hard deleted documents will also be filtered out in the life docs of this reader.
|
||||||
|
* @see IndexWriterConfig#setSoftDeletesField(String)
|
||||||
|
* @see IndexWriter#softUpdateDocument(Term, Iterable, Field...)
|
||||||
|
* @see SoftDeletesRetentionMergePolicy
|
||||||
|
*/
|
||||||
|
public final class SoftDeletesDirectoryReaderWrapper extends FilterDirectoryReader {
|
||||||
|
private final String field;
|
||||||
|
private final CacheHelper readerCacheHelper;
|
||||||
|
/**
|
||||||
|
* Creates a new soft deletes wrapper.
|
||||||
|
* @param in the incoming directory reader
|
||||||
|
* @param field the soft deletes field
|
||||||
|
*/
|
||||||
|
public SoftDeletesDirectoryReaderWrapper(DirectoryReader in, String field) throws IOException {
|
||||||
|
this(in, new SoftDeletesSubReaderWrapper(Collections.emptyMap(), field));
|
||||||
|
}
|
||||||
|
|
||||||
|
private SoftDeletesDirectoryReaderWrapper(DirectoryReader in, SoftDeletesSubReaderWrapper wrapper) throws IOException {
|
||||||
|
super(in, wrapper);
|
||||||
|
this.field = wrapper.field;
|
||||||
|
readerCacheHelper = in.getReaderCacheHelper() == null ? null : new DelegatingCacheHelper(in.getReaderCacheHelper());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
|
||||||
|
Map<CacheKey, LeafReader> readerCache = new HashMap<>();
|
||||||
|
for (LeafReader reader : getSequentialSubReaders()) {
|
||||||
|
// we try to reuse the life docs instances here if the reader cache key didn't change
|
||||||
|
if (reader instanceof SoftDeletesFilterLeafReader && reader.getReaderCacheHelper() != null) {
|
||||||
|
readerCache.put(((SoftDeletesFilterLeafReader) reader).reader.getReaderCacheHelper().getKey(), reader);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
return new SoftDeletesDirectoryReaderWrapper(in, new SoftDeletesSubReaderWrapper(readerCache, field));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheHelper getReaderCacheHelper() {
|
||||||
|
return readerCacheHelper;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class SoftDeletesSubReaderWrapper extends SubReaderWrapper {
|
||||||
|
private final Map<CacheKey, LeafReader> mapping;
|
||||||
|
private final String field;
|
||||||
|
|
||||||
|
public SoftDeletesSubReaderWrapper(Map<CacheKey, LeafReader> oldReadersCache, String field) {
|
||||||
|
Objects.requireNonNull(field, "Field must not be null");
|
||||||
|
assert oldReadersCache != null;
|
||||||
|
this.mapping = oldReadersCache;
|
||||||
|
this.field = field;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LeafReader wrap(LeafReader reader) {
|
||||||
|
CacheHelper readerCacheHelper = reader.getReaderCacheHelper();
|
||||||
|
if (readerCacheHelper != null && mapping.containsKey(readerCacheHelper.getKey())) {
|
||||||
|
// if the reader cache helper didn't change and we have it in the cache don't bother creating a new one
|
||||||
|
return mapping.get(readerCacheHelper.getKey());
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return SoftDeletesDirectoryReaderWrapper.wrap(reader, field);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static LeafReader wrap(LeafReader reader, String field) throws IOException {
|
||||||
|
DocIdSetIterator iterator = DocValuesFieldExistsQuery.getDocValuesDocIdSetIterator(field, reader);
|
||||||
|
if (iterator == null) {
|
||||||
|
return reader;
|
||||||
|
}
|
||||||
|
Bits liveDocs = reader.getLiveDocs();
|
||||||
|
final FixedBitSet bits;
|
||||||
|
if (liveDocs != null) {
|
||||||
|
bits = SoftDeletesRetentionMergePolicy.cloneLiveDocs(liveDocs);
|
||||||
|
} else {
|
||||||
|
bits = new FixedBitSet(reader.maxDoc());
|
||||||
|
bits.set(0, reader.maxDoc());
|
||||||
|
}
|
||||||
|
int numDeletes = reader.numDeletedDocs() + PendingSoftDeletes.applySoftDeletes(iterator, bits);
|
||||||
|
int numDocs = reader.maxDoc() - numDeletes;
|
||||||
|
return new SoftDeletesFilterLeafReader(reader, bits, numDocs);
|
||||||
|
}
|
||||||
|
|
||||||
|
static final class SoftDeletesFilterLeafReader extends FilterLeafReader {
|
||||||
|
private final LeafReader reader;
|
||||||
|
private final FixedBitSet bits;
|
||||||
|
private final int numDocs;
|
||||||
|
private final CacheHelper readerCacheHelper;
|
||||||
|
|
||||||
|
private SoftDeletesFilterLeafReader(LeafReader reader, FixedBitSet bits, int numDocs) {
|
||||||
|
super(reader);
|
||||||
|
this.reader = reader;
|
||||||
|
this.bits = bits;
|
||||||
|
this.numDocs = numDocs;
|
||||||
|
this.readerCacheHelper = reader.getReaderCacheHelper() == null ? null :
|
||||||
|
new DelegatingCacheHelper(reader.getReaderCacheHelper());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Bits getLiveDocs() {
|
||||||
|
return bits;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int numDocs() {
|
||||||
|
return numDocs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheHelper getCoreCacheHelper() {
|
||||||
|
return reader.getCoreCacheHelper();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheHelper getReaderCacheHelper() {
|
||||||
|
return readerCacheHelper;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class DelegatingCacheHelper implements CacheHelper {
|
||||||
|
private final CacheHelper delegate;
|
||||||
|
private final CacheKey cacheKey = new CacheKey();
|
||||||
|
|
||||||
|
public DelegatingCacheHelper(CacheHelper delegate) {
|
||||||
|
this.delegate = delegate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheKey getKey() {
|
||||||
|
return cacheKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addClosedListener(ClosedListener listener) {
|
||||||
|
// here we wrap the listener and call it with our cache key
|
||||||
|
// this is important since this key will be used to cache the reader and otherwise we won't free caches etc.
|
||||||
|
delegate.addClosedListener(unused -> listener.onClose(cacheKey));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -99,32 +99,39 @@ public final class SoftDeletesRetentionMergePolicy extends OneMergeWrappingMerge
|
||||||
}, reader.maxDoc() - reader.numDocs());
|
}, reader.maxDoc() - reader.numDocs());
|
||||||
Scorer scorer = getScorer(softDeleteField, retentionQuery, wrappedReader);
|
Scorer scorer = getScorer(softDeleteField, retentionQuery, wrappedReader);
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
FixedBitSet mutableBits;
|
FixedBitSet cloneLiveDocs = cloneLiveDocs(liveDocs);
|
||||||
if (liveDocs instanceof FixedBitSet) {
|
|
||||||
mutableBits = ((FixedBitSet) liveDocs).clone();
|
|
||||||
} else { // mainly if we have asserting codec
|
|
||||||
mutableBits = new FixedBitSet(liveDocs.length());
|
|
||||||
for (int i = 0; i < liveDocs.length(); i++) {
|
|
||||||
if (liveDocs.get(i)) {
|
|
||||||
mutableBits.set(i);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
DocIdSetIterator iterator = scorer.iterator();
|
DocIdSetIterator iterator = scorer.iterator();
|
||||||
int numExtraLiveDocs = 0;
|
int numExtraLiveDocs = 0;
|
||||||
while (iterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
while (iterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
if (mutableBits.getAndSet(iterator.docID()) == false) {
|
if (cloneLiveDocs.getAndSet(iterator.docID()) == false) {
|
||||||
// if we bring one back to live we need to account for it
|
// if we bring one back to live we need to account for it
|
||||||
numExtraLiveDocs++;
|
numExtraLiveDocs++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert reader.numDocs() + numExtraLiveDocs <= reader.maxDoc() : "numDocs: " + reader.numDocs() + " numExtraLiveDocs: " + numExtraLiveDocs + " maxDoc: " + reader.maxDoc();
|
assert reader.numDocs() + numExtraLiveDocs <= reader.maxDoc() : "numDocs: " + reader.numDocs() + " numExtraLiveDocs: " + numExtraLiveDocs + " maxDoc: " + reader.maxDoc();
|
||||||
return wrapLiveDocs(reader, mutableBits, reader.numDocs() + numExtraLiveDocs);
|
return wrapLiveDocs(reader, cloneLiveDocs, reader.numDocs() + numExtraLiveDocs);
|
||||||
} else {
|
} else {
|
||||||
return reader;
|
return reader;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clones the given live docs
|
||||||
|
*/
|
||||||
|
static FixedBitSet cloneLiveDocs(Bits liveDocs) {
|
||||||
|
if (liveDocs instanceof FixedBitSet) {
|
||||||
|
return ((FixedBitSet) liveDocs).clone();
|
||||||
|
} else { // mainly if we have asserting codec
|
||||||
|
FixedBitSet mutableBits = new FixedBitSet(liveDocs.length());
|
||||||
|
for (int i = 0; i < liveDocs.length(); i++) {
|
||||||
|
if (liveDocs.get(i)) {
|
||||||
|
mutableBits.set(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return mutableBits;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static Scorer getScorer(String softDeleteField, Query retentionQuery, CodecReader reader) throws IOException {
|
private static Scorer getScorer(String softDeleteField, Query retentionQuery, CodecReader reader) throws IOException {
|
||||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||||
builder.add(new DocValuesFieldExistsQuery(softDeleteField), BooleanClause.Occur.FILTER);
|
builder.add(new DocValuesFieldExistsQuery(softDeleteField), BooleanClause.Occur.FILTER);
|
||||||
|
|
|
@ -197,7 +197,7 @@ public final class StandardDirectoryReader extends DirectoryReader {
|
||||||
|
|
||||||
if (oldReader.getSegmentInfo().getDelGen() == commitInfo.getDelGen()) {
|
if (oldReader.getSegmentInfo().getDelGen() == commitInfo.getDelGen()) {
|
||||||
// only DV updates
|
// only DV updates
|
||||||
newReaders[i] = new SegmentReader(commitInfo, oldReader, oldReader.getLiveDocs(), oldReader.numDocs());
|
newReaders[i] = new SegmentReader(commitInfo, oldReader, oldReader.getLiveDocs(), oldReader.numDocs(), false); // this is not an NRT reader!
|
||||||
} else {
|
} else {
|
||||||
// both DV and liveDocs have changed
|
// both DV and liveDocs have changed
|
||||||
newReaders[i] = new SegmentReader(commitInfo, oldReader);
|
newReaders[i] = new SegmentReader(commitInfo, oldReader);
|
||||||
|
|
|
@ -414,7 +414,8 @@ public class IndexSearcher {
|
||||||
*/
|
*/
|
||||||
public void search(Query query, Collector results)
|
public void search(Query query, Collector results)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
search(leafContexts, createNormalizedWeight(query, results.scoreMode()), results);
|
query = rewrite(query);
|
||||||
|
search(leafContexts, createWeight(query, results.scoreMode(), 1), results);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Search implementation with arbitrary sorting, plus
|
/** Search implementation with arbitrary sorting, plus
|
||||||
|
@ -553,8 +554,8 @@ public class IndexSearcher {
|
||||||
// no segments
|
// no segments
|
||||||
scoreMode = ScoreMode.COMPLETE;
|
scoreMode = ScoreMode.COMPLETE;
|
||||||
}
|
}
|
||||||
|
query = rewrite(query);
|
||||||
final Weight weight = createNormalizedWeight(query, scoreMode);
|
final Weight weight = createWeight(query, scoreMode, 1);
|
||||||
final List<Future<C>> topDocsFutures = new ArrayList<>(leafSlices.length);
|
final List<Future<C>> topDocsFutures = new ArrayList<>(leafSlices.length);
|
||||||
for (int i = 0; i < leafSlices.length; ++i) {
|
for (int i = 0; i < leafSlices.length; ++i) {
|
||||||
final LeafReaderContext[] leaves = leafSlices[i].leaves;
|
final LeafReaderContext[] leaves = leafSlices[i].leaves;
|
||||||
|
@ -651,7 +652,8 @@ public class IndexSearcher {
|
||||||
* entire index.
|
* entire index.
|
||||||
*/
|
*/
|
||||||
public Explanation explain(Query query, int doc) throws IOException {
|
public Explanation explain(Query query, int doc) throws IOException {
|
||||||
return explain(createNormalizedWeight(query, ScoreMode.COMPLETE), doc);
|
query = rewrite(query);
|
||||||
|
return explain(createWeight(query, ScoreMode.COMPLETE, 1), doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Expert: low-level implementation method
|
/** Expert: low-level implementation method
|
||||||
|
@ -677,18 +679,6 @@ public class IndexSearcher {
|
||||||
return weight.explain(ctx, deBasedDoc);
|
return weight.explain(ctx, deBasedDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a normalized weight for a top-level {@link Query}.
|
|
||||||
* The query is rewritten by this method and {@link Query#createWeight} called,
|
|
||||||
* afterwards the {@link Weight} is normalized. The returned {@code Weight}
|
|
||||||
* can then directly be used to get a {@link Scorer}.
|
|
||||||
* @lucene.internal
|
|
||||||
*/
|
|
||||||
public Weight createNormalizedWeight(Query query, ScoreMode scoreMode) throws IOException {
|
|
||||||
query = rewrite(query);
|
|
||||||
return createWeight(query, scoreMode, 1f);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a {@link Weight} for the given query, potentially adding caching
|
* Creates a {@link Weight} for the given query, potentially adding caching
|
||||||
* if possible and configured.
|
* if possible and configured.
|
||||||
|
|
|
@ -60,7 +60,8 @@ public abstract class QueryRescorer extends Rescorer {
|
||||||
|
|
||||||
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
|
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
|
||||||
|
|
||||||
Weight weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
Query rewritten = searcher.rewrite(query);
|
||||||
|
Weight weight = searcher.createWeight(rewritten, ScoreMode.COMPLETE, 1);
|
||||||
|
|
||||||
// Now merge sort docIDs from hits, with reader's leaves:
|
// Now merge sort docIDs from hits, with reader's leaves:
|
||||||
int hitUpto = 0;
|
int hitUpto = 0;
|
||||||
|
|
|
@ -453,8 +453,8 @@
|
||||||
* <p>Assuming we are not sorting (since sorting doesn't affect the raw Lucene score),
|
* <p>Assuming we are not sorting (since sorting doesn't affect the raw Lucene score),
|
||||||
* we call one of the search methods of the IndexSearcher, passing in the
|
* we call one of the search methods of the IndexSearcher, passing in the
|
||||||
* {@link org.apache.lucene.search.Weight Weight} object created by
|
* {@link org.apache.lucene.search.Weight Weight} object created by
|
||||||
* {@link org.apache.lucene.search.IndexSearcher#createNormalizedWeight(org.apache.lucene.search.Query,ScoreMode)
|
* {@link org.apache.lucene.search.IndexSearcher#createWeight(org.apache.lucene.search.Query,ScoreMode,float)
|
||||||
* IndexSearcher.createNormalizedWeight(Query,boolean)} and the number of results we want.
|
* IndexSearcher.createWeight(Query,ScoreMode,float)} and the number of results we want.
|
||||||
* This method returns a {@link org.apache.lucene.search.TopDocs TopDocs} object,
|
* This method returns a {@link org.apache.lucene.search.TopDocs TopDocs} object,
|
||||||
* which is an internal collection of search results. The IndexSearcher creates
|
* which is an internal collection of search results. The IndexSearcher creates
|
||||||
* a {@link org.apache.lucene.search.TopScoreDocCollector TopScoreDocCollector} and
|
* a {@link org.apache.lucene.search.TopScoreDocCollector TopScoreDocCollector} and
|
||||||
|
|
|
@ -1332,4 +1332,85 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||||
assertEquals("cannot change index sort from <int: \"foo\"> to <string: \"foo\">", message);
|
assertEquals("cannot change index sort from <int: \"foo\"> to <string: \"foo\">", message);
|
||||||
IOUtils.close(r1, dir1, w2, dir2);
|
IOUtils.close(r1, dir1, w2, dir2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testAddIndexesDVUpdateSameSegmentName() throws Exception {
|
||||||
|
Directory dir1 = newDirectory();
|
||||||
|
IndexWriterConfig iwc1 = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||||
|
IndexWriter w1 = new IndexWriter(dir1, iwc1);
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "1", Field.Store.YES));
|
||||||
|
doc.add(new NumericDocValuesField("soft_delete", 1));
|
||||||
|
w1.addDocument(doc);
|
||||||
|
w1.flush();
|
||||||
|
|
||||||
|
w1.updateDocValues(new Term("id", "1"), new NumericDocValuesField("soft_delete", 1));
|
||||||
|
w1.commit();
|
||||||
|
w1.close();
|
||||||
|
|
||||||
|
IndexWriterConfig iwc2 = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||||
|
Directory dir2 = newDirectory();
|
||||||
|
IndexWriter w2 = new IndexWriter(dir2, iwc2);
|
||||||
|
w2.addIndexes(dir1);
|
||||||
|
w2.commit();
|
||||||
|
w2.close();
|
||||||
|
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("\nTEST: now open w3");
|
||||||
|
}
|
||||||
|
IndexWriterConfig iwc3 = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||||
|
if (VERBOSE) {
|
||||||
|
iwc3.setInfoStream(System.out);
|
||||||
|
}
|
||||||
|
IndexWriter w3 = new IndexWriter(dir2, iwc3);
|
||||||
|
w3.close();
|
||||||
|
|
||||||
|
iwc3 = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||||
|
w3 = new IndexWriter(dir2, iwc3);
|
||||||
|
w3.close();
|
||||||
|
dir1.close();
|
||||||
|
dir2.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAddIndexesDVUpdateNewSegmentName() throws Exception {
|
||||||
|
Directory dir1 = newDirectory();
|
||||||
|
IndexWriterConfig iwc1 = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||||
|
IndexWriter w1 = new IndexWriter(dir1, iwc1);
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "1", Field.Store.YES));
|
||||||
|
doc.add(new NumericDocValuesField("soft_delete", 1));
|
||||||
|
w1.addDocument(doc);
|
||||||
|
w1.flush();
|
||||||
|
|
||||||
|
w1.updateDocValues(new Term("id", "1"), new NumericDocValuesField("soft_delete", 1));
|
||||||
|
w1.commit();
|
||||||
|
w1.close();
|
||||||
|
|
||||||
|
IndexWriterConfig iwc2 = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||||
|
Directory dir2 = newDirectory();
|
||||||
|
IndexWriter w2 = new IndexWriter(dir2, iwc2);
|
||||||
|
w2.addDocument(new Document());
|
||||||
|
w2.commit();
|
||||||
|
|
||||||
|
w2.addIndexes(dir1);
|
||||||
|
w2.commit();
|
||||||
|
w2.close();
|
||||||
|
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("\nTEST: now open w3");
|
||||||
|
}
|
||||||
|
IndexWriterConfig iwc3 = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||||
|
if (VERBOSE) {
|
||||||
|
iwc3.setInfoStream(System.out);
|
||||||
|
}
|
||||||
|
IndexWriter w3 = new IndexWriter(dir2, iwc3);
|
||||||
|
w3.close();
|
||||||
|
|
||||||
|
iwc3 = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||||
|
w3 = new IndexWriter(dir2, iwc3);
|
||||||
|
w3.close();
|
||||||
|
dir1.close();
|
||||||
|
dir2.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,6 +43,7 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||||
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
|
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
|
||||||
|
@ -1013,6 +1014,59 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
|
||||||
DirectoryReader.openIfChanged(r);
|
DirectoryReader.openIfChanged(r);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testReuseUnchangedLeafReaderOnDVUpdate() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
|
||||||
|
indexWriterConfig.setMergePolicy(NoMergePolicy.INSTANCE);
|
||||||
|
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
|
||||||
|
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "1", Field.Store.YES));
|
||||||
|
doc.add(new NumericDocValuesField("some_docvalue", 2));
|
||||||
|
writer.addDocument(doc);
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "2", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "1", Field.Store.YES));
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.commit();
|
||||||
|
DirectoryReader reader = DirectoryReader.open(dir);
|
||||||
|
assertEquals(2, reader.numDocs());
|
||||||
|
assertEquals(2, reader.maxDoc());
|
||||||
|
assertEquals(0, reader.numDeletedDocs());
|
||||||
|
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "2", Field.Store.YES));
|
||||||
|
writer.updateDocValues(new Term("id", "1"), new NumericDocValuesField("some_docvalue", 1));
|
||||||
|
writer.commit();
|
||||||
|
DirectoryReader newReader = DirectoryReader.openIfChanged(reader);
|
||||||
|
assertNotSame(newReader, reader);
|
||||||
|
reader.close();
|
||||||
|
reader = newReader;
|
||||||
|
assertEquals(2, reader.numDocs());
|
||||||
|
assertEquals(2, reader.maxDoc());
|
||||||
|
assertEquals(0, reader.numDeletedDocs());
|
||||||
|
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "3", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "3", Field.Store.YES));
|
||||||
|
writer.updateDocument(new Term("id", "3"), doc);
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
newReader = DirectoryReader.openIfChanged(reader);
|
||||||
|
assertNotSame(newReader, reader);
|
||||||
|
assertEquals(2, newReader.getSequentialSubReaders().size());
|
||||||
|
assertEquals(1, reader.getSequentialSubReaders().size());
|
||||||
|
assertSame(reader.getSequentialSubReaders().get(0), newReader.getSequentialSubReaders().get(0));
|
||||||
|
reader.close();
|
||||||
|
reader = newReader;
|
||||||
|
assertEquals(3, reader.numDocs());
|
||||||
|
assertEquals(3, reader.maxDoc());
|
||||||
|
assertEquals(0, reader.numDeletedDocs());
|
||||||
|
IOUtils.close(reader, writer, dir);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,199 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.NumericDocValuesField;
|
||||||
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.util.IOUtils;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
|
||||||
|
public class TestSoftDeletesDirectoryReaderWrapper extends LuceneTestCase {
|
||||||
|
|
||||||
|
public void testReuseUnchangedLeafReader() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
|
||||||
|
String softDeletesField = "soft_delete";
|
||||||
|
indexWriterConfig.setSoftDeletesField(softDeletesField);
|
||||||
|
indexWriterConfig.setMergePolicy(NoMergePolicy.INSTANCE);
|
||||||
|
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
|
||||||
|
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "1", Field.Store.YES));
|
||||||
|
writer.addDocument(doc);
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "2", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "1", Field.Store.YES));
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.commit();
|
||||||
|
DirectoryReader reader = new SoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField);
|
||||||
|
assertEquals(2, reader.numDocs());
|
||||||
|
assertEquals(2, reader.maxDoc());
|
||||||
|
assertEquals(0, reader.numDeletedDocs());
|
||||||
|
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "2", Field.Store.YES));
|
||||||
|
writer.softUpdateDocument(new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
|
||||||
|
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "3", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "1", Field.Store.YES));
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
DirectoryReader newReader = DirectoryReader.openIfChanged(reader);
|
||||||
|
assertNotSame(newReader, reader);
|
||||||
|
reader.close();
|
||||||
|
reader = newReader;
|
||||||
|
assertEquals(3, reader.numDocs());
|
||||||
|
assertEquals(4, reader.maxDoc());
|
||||||
|
assertEquals(1, reader.numDeletedDocs());
|
||||||
|
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "3", Field.Store.YES));
|
||||||
|
writer.softUpdateDocument(new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
|
||||||
|
writer.commit();
|
||||||
|
|
||||||
|
newReader = DirectoryReader.openIfChanged(reader);
|
||||||
|
assertNotSame(newReader, reader);
|
||||||
|
assertEquals(3, newReader.getSequentialSubReaders().size());
|
||||||
|
assertEquals(2, reader.getSequentialSubReaders().size());
|
||||||
|
assertSame(reader.getSequentialSubReaders().get(0), newReader.getSequentialSubReaders().get(0));
|
||||||
|
assertNotSame(reader.getSequentialSubReaders().get(1), newReader.getSequentialSubReaders().get(1));
|
||||||
|
assertTrue(isWrapped(reader.getSequentialSubReaders().get(0)));
|
||||||
|
// last one has no soft deletes
|
||||||
|
assertFalse(isWrapped(reader.getSequentialSubReaders().get(1)));
|
||||||
|
|
||||||
|
assertTrue(isWrapped(newReader.getSequentialSubReaders().get(0)));
|
||||||
|
assertTrue(isWrapped(newReader.getSequentialSubReaders().get(1)));
|
||||||
|
// last one has no soft deletes
|
||||||
|
assertFalse(isWrapped(newReader.getSequentialSubReaders().get(2)));
|
||||||
|
reader.close();
|
||||||
|
reader = newReader;
|
||||||
|
assertEquals(3, reader.numDocs());
|
||||||
|
assertEquals(5, reader.maxDoc());
|
||||||
|
assertEquals(2, reader.numDeletedDocs());
|
||||||
|
IOUtils.close(reader, writer, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isWrapped(LeafReader reader) {
|
||||||
|
return reader instanceof SoftDeletesDirectoryReaderWrapper.SoftDeletesFilterLeafReader;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMixSoftAndHardDeletes() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
|
||||||
|
String softDeletesField = "soft_delete";
|
||||||
|
indexWriterConfig.setSoftDeletesField(softDeletesField);
|
||||||
|
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
|
||||||
|
Set<Integer> uniqueDocs = new HashSet<>();
|
||||||
|
for (int i = 0; i < 100; i++) {
|
||||||
|
int docId = random().nextInt(5);
|
||||||
|
uniqueDocs.add(docId);
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", String.valueOf(docId), Field.Store.YES));
|
||||||
|
if (docId % 2 == 0) {
|
||||||
|
writer.updateDocument(new Term("id", String.valueOf(docId)), doc);
|
||||||
|
} else {
|
||||||
|
writer.softUpdateDocument(new Term("id", String.valueOf(docId)), doc,
|
||||||
|
new NumericDocValuesField(softDeletesField, 0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
writer.commit();
|
||||||
|
writer.close();
|
||||||
|
DirectoryReader reader = new SoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField);
|
||||||
|
assertEquals(uniqueDocs.size(), reader.numDocs());
|
||||||
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
|
for (Integer docId : uniqueDocs) {
|
||||||
|
assertEquals(1, searcher.search(new TermQuery(new Term("id", docId.toString())), 1).totalHits);
|
||||||
|
}
|
||||||
|
|
||||||
|
IOUtils.close(reader, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testReaderCacheKey() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
|
||||||
|
String softDeletesField = "soft_delete";
|
||||||
|
indexWriterConfig.setSoftDeletesField(softDeletesField);
|
||||||
|
indexWriterConfig.setMergePolicy(NoMergePolicy.INSTANCE);
|
||||||
|
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
|
||||||
|
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "1", Field.Store.YES));
|
||||||
|
writer.addDocument(doc);
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "2", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "1", Field.Store.YES));
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.commit();
|
||||||
|
DirectoryReader reader = new SoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), softDeletesField);
|
||||||
|
IndexReader.CacheHelper readerCacheHelper = reader.leaves().get(0).reader().getReaderCacheHelper();
|
||||||
|
AtomicInteger leafCalled = new AtomicInteger(0);
|
||||||
|
AtomicInteger dirCalled = new AtomicInteger(0);
|
||||||
|
readerCacheHelper.addClosedListener(key -> {
|
||||||
|
leafCalled.incrementAndGet();
|
||||||
|
assertSame(key, readerCacheHelper.getKey());
|
||||||
|
});
|
||||||
|
IndexReader.CacheHelper dirReaderCacheHelper = reader.getReaderCacheHelper();
|
||||||
|
dirReaderCacheHelper.addClosedListener(key -> {
|
||||||
|
dirCalled.incrementAndGet();
|
||||||
|
assertSame(key, dirReaderCacheHelper.getKey());
|
||||||
|
});
|
||||||
|
assertEquals(2, reader.numDocs());
|
||||||
|
assertEquals(2, reader.maxDoc());
|
||||||
|
assertEquals(0, reader.numDeletedDocs());
|
||||||
|
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "2", Field.Store.YES));
|
||||||
|
writer.softUpdateDocument(new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
|
||||||
|
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "3", Field.Store.YES));
|
||||||
|
doc.add(new StringField("version", "1", Field.Store.YES));
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.commit();
|
||||||
|
assertEquals(0, leafCalled.get());
|
||||||
|
assertEquals(0, dirCalled.get());
|
||||||
|
DirectoryReader newReader = DirectoryReader.openIfChanged(reader);
|
||||||
|
assertEquals(0, leafCalled.get());
|
||||||
|
assertEquals(0, dirCalled.get());
|
||||||
|
assertNotSame(newReader.getReaderCacheHelper().getKey(), reader.getReaderCacheHelper().getKey());
|
||||||
|
assertNotSame(newReader, reader);
|
||||||
|
reader.close();
|
||||||
|
reader = newReader;
|
||||||
|
assertEquals(1, dirCalled.get());
|
||||||
|
assertEquals(1, leafCalled.get());
|
||||||
|
IOUtils.close(reader, writer, dir);
|
||||||
|
}
|
||||||
|
}
|
|
@ -110,7 +110,11 @@ public class TestStressNRT extends LuceneTestCase {
|
||||||
final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())), useSoftDeletes);
|
final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())), useSoftDeletes);
|
||||||
writer.setDoRandomForceMergeAssert(false);
|
writer.setDoRandomForceMergeAssert(false);
|
||||||
writer.commit();
|
writer.commit();
|
||||||
reader = useSoftDeletes ? writer.getReader() : DirectoryReader.open(dir);
|
if (useSoftDeletes) {
|
||||||
|
reader = new SoftDeletesDirectoryReaderWrapper(DirectoryReader.open(dir), writer.w.getConfig().getSoftDeletesField());
|
||||||
|
} else {
|
||||||
|
reader = DirectoryReader.open(dir);
|
||||||
|
}
|
||||||
|
|
||||||
for (int i=0; i<nWriteThreads; i++) {
|
for (int i=0; i<nWriteThreads; i++) {
|
||||||
Thread thread = new Thread("WRITER"+i) {
|
Thread thread = new Thread("WRITER"+i) {
|
||||||
|
@ -136,7 +140,7 @@ public class TestStressNRT extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
DirectoryReader newReader;
|
DirectoryReader newReader;
|
||||||
if (rand.nextInt(100) < softCommitPercent || useSoftDeletes) {
|
if (rand.nextInt(100) < softCommitPercent) {
|
||||||
// assertU(h.commit("softCommit","true"));
|
// assertU(h.commit("softCommit","true"));
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
|
|
@ -44,7 +44,10 @@ public class TestApproximationSearchEquivalence extends SearchEquivalenceTestBas
|
||||||
|
|
||||||
public void testNestedConjunction() throws Exception {
|
public void testNestedConjunction() throws Exception {
|
||||||
Term t1 = randomTerm();
|
Term t1 = randomTerm();
|
||||||
Term t2 = randomTerm();
|
Term t2;
|
||||||
|
do {
|
||||||
|
t2 = randomTerm();
|
||||||
|
} while (t1.equals(t2));
|
||||||
Term t3 = randomTerm();
|
Term t3 = randomTerm();
|
||||||
TermQuery q1 = new TermQuery(t1);
|
TermQuery q1 = new TermQuery(t1);
|
||||||
TermQuery q2 = new TermQuery(t2);
|
TermQuery q2 = new TermQuery(t2);
|
||||||
|
@ -88,7 +91,10 @@ public class TestApproximationSearchEquivalence extends SearchEquivalenceTestBas
|
||||||
|
|
||||||
public void testNestedDisjunction() throws Exception {
|
public void testNestedDisjunction() throws Exception {
|
||||||
Term t1 = randomTerm();
|
Term t1 = randomTerm();
|
||||||
Term t2 = randomTerm();
|
Term t2;
|
||||||
|
do {
|
||||||
|
t2 = randomTerm();
|
||||||
|
} while (t1.equals(t2));
|
||||||
Term t3 = randomTerm();
|
Term t3 = randomTerm();
|
||||||
TermQuery q1 = new TermQuery(t1);
|
TermQuery q1 = new TermQuery(t1);
|
||||||
TermQuery q2 = new TermQuery(t2);
|
TermQuery q2 = new TermQuery(t2);
|
||||||
|
@ -115,7 +121,10 @@ public class TestApproximationSearchEquivalence extends SearchEquivalenceTestBas
|
||||||
|
|
||||||
public void testDisjunctionInConjunction() throws Exception {
|
public void testDisjunctionInConjunction() throws Exception {
|
||||||
Term t1 = randomTerm();
|
Term t1 = randomTerm();
|
||||||
Term t2 = randomTerm();
|
Term t2;
|
||||||
|
do {
|
||||||
|
t2 = randomTerm();
|
||||||
|
} while (t1.equals(t2));
|
||||||
Term t3 = randomTerm();
|
Term t3 = randomTerm();
|
||||||
TermQuery q1 = new TermQuery(t1);
|
TermQuery q1 = new TermQuery(t1);
|
||||||
TermQuery q2 = new TermQuery(t2);
|
TermQuery q2 = new TermQuery(t2);
|
||||||
|
@ -142,7 +151,10 @@ public class TestApproximationSearchEquivalence extends SearchEquivalenceTestBas
|
||||||
|
|
||||||
public void testConjunctionInDisjunction() throws Exception {
|
public void testConjunctionInDisjunction() throws Exception {
|
||||||
Term t1 = randomTerm();
|
Term t1 = randomTerm();
|
||||||
Term t2 = randomTerm();
|
Term t2;
|
||||||
|
do {
|
||||||
|
t2 = randomTerm();
|
||||||
|
} while (t1.equals(t2));
|
||||||
Term t3 = randomTerm();
|
Term t3 = randomTerm();
|
||||||
TermQuery q1 = new TermQuery(t1);
|
TermQuery q1 = new TermQuery(t1);
|
||||||
TermQuery q2 = new TermQuery(t2);
|
TermQuery q2 = new TermQuery(t2);
|
||||||
|
@ -203,7 +215,10 @@ public class TestApproximationSearchEquivalence extends SearchEquivalenceTestBas
|
||||||
|
|
||||||
public void testNestedExclusion() throws Exception {
|
public void testNestedExclusion() throws Exception {
|
||||||
Term t1 = randomTerm();
|
Term t1 = randomTerm();
|
||||||
Term t2 = randomTerm();
|
Term t2;
|
||||||
|
do {
|
||||||
|
t2 = randomTerm();
|
||||||
|
} while (t1.equals(t2));
|
||||||
Term t3 = randomTerm();
|
Term t3 = randomTerm();
|
||||||
TermQuery q1 = new TermQuery(t1);
|
TermQuery q1 = new TermQuery(t1);
|
||||||
TermQuery q2 = new TermQuery(t2);
|
TermQuery q2 = new TermQuery(t2);
|
||||||
|
@ -253,7 +268,10 @@ public class TestApproximationSearchEquivalence extends SearchEquivalenceTestBas
|
||||||
|
|
||||||
public void testReqOpt() throws Exception {
|
public void testReqOpt() throws Exception {
|
||||||
Term t1 = randomTerm();
|
Term t1 = randomTerm();
|
||||||
Term t2 = randomTerm();
|
Term t2;
|
||||||
|
do {
|
||||||
|
t2 = randomTerm();
|
||||||
|
} while (t1.equals(t2));
|
||||||
Term t3 = randomTerm();
|
Term t3 = randomTerm();
|
||||||
TermQuery q1 = new TermQuery(t1);
|
TermQuery q1 = new TermQuery(t1);
|
||||||
TermQuery q2 = new TermQuery(t2);
|
TermQuery q2 = new TermQuery(t2);
|
||||||
|
|
|
@ -186,7 +186,7 @@ public class TestBooleanOr extends LuceneTestCase {
|
||||||
bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
|
bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
|
||||||
bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
|
bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
|
||||||
|
|
||||||
Weight w = s.createNormalizedWeight(bq.build(), ScoreMode.COMPLETE);
|
Weight w = s.createWeight(s.rewrite(bq.build()), ScoreMode.COMPLETE, 1);
|
||||||
|
|
||||||
assertEquals(1, s.getIndexReader().leaves().size());
|
assertEquals(1, s.getIndexReader().leaves().size());
|
||||||
BulkScorer scorer = w.bulkScorer(s.getIndexReader().leaves().get(0));
|
BulkScorer scorer = w.bulkScorer(s.getIndexReader().leaves().get(0));
|
||||||
|
|
|
@ -313,7 +313,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
q.add(new BooleanClause(new TermQuery(new Term("field", term)), BooleanClause.Occur.SHOULD));
|
q.add(new BooleanClause(new TermQuery(new Term("field", term)), BooleanClause.Occur.SHOULD));
|
||||||
}
|
}
|
||||||
|
|
||||||
Weight weight = s.createNormalizedWeight(q.build(), ScoreMode.COMPLETE);
|
Weight weight = s.createWeight(s.rewrite(q.build()), ScoreMode.COMPLETE, 1);
|
||||||
|
|
||||||
Scorer scorer = weight.scorer(s.leafContexts.get(0));
|
Scorer scorer = weight.scorer(s.leafContexts.get(0));
|
||||||
|
|
||||||
|
@ -331,7 +331,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
// verify exact match:
|
// verify exact match:
|
||||||
for(int iter2=0;iter2<10;iter2++) {
|
for(int iter2=0;iter2<10;iter2++) {
|
||||||
|
|
||||||
weight = s.createNormalizedWeight(q.build(), ScoreMode.COMPLETE);
|
weight = s.createWeight(s.rewrite(q.build()), ScoreMode.COMPLETE, 1);
|
||||||
scorer = weight.scorer(s.leafContexts.get(0));
|
scorer = weight.scorer(s.leafContexts.get(0));
|
||||||
|
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
@ -617,7 +617,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
q.add(pq, Occur.MUST);
|
q.add(pq, Occur.MUST);
|
||||||
q.add(new TermQuery(new Term("field", "c")), Occur.FILTER);
|
q.add(new TermQuery(new Term("field", "c")), Occur.FILTER);
|
||||||
|
|
||||||
final Weight weight = searcher.createNormalizedWeight(q.build(), ScoreMode.COMPLETE);
|
final Weight weight = searcher.createWeight(searcher.rewrite(q.build()), ScoreMode.COMPLETE, 1);
|
||||||
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertTrue(scorer instanceof ConjunctionScorer);
|
assertTrue(scorer instanceof ConjunctionScorer);
|
||||||
assertNotNull(scorer.twoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
@ -646,7 +646,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
q.add(pq, Occur.SHOULD);
|
q.add(pq, Occur.SHOULD);
|
||||||
q.add(new TermQuery(new Term("field", "c")), Occur.SHOULD);
|
q.add(new TermQuery(new Term("field", "c")), Occur.SHOULD);
|
||||||
|
|
||||||
final Weight weight = searcher.createNormalizedWeight(q.build(), ScoreMode.COMPLETE);
|
final Weight weight = searcher.createWeight(searcher.rewrite(q.build()), ScoreMode.COMPLETE, 1);
|
||||||
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
||||||
assertTrue(scorer instanceof DisjunctionScorer);
|
assertTrue(scorer instanceof DisjunctionScorer);
|
||||||
assertNotNull(scorer.twoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
@ -677,7 +677,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
q.add(pq, Occur.SHOULD);
|
q.add(pq, Occur.SHOULD);
|
||||||
q.add(new TermQuery(new Term("field", "d")), Occur.SHOULD);
|
q.add(new TermQuery(new Term("field", "d")), Occur.SHOULD);
|
||||||
|
|
||||||
final Weight weight = searcher.createNormalizedWeight(q.build(), ScoreMode.COMPLETE);
|
final Weight weight = searcher.createWeight(searcher.rewrite(q.build()), ScoreMode.COMPLETE, 1);
|
||||||
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertTrue(scorer instanceof ExactPhraseScorer);
|
assertTrue(scorer instanceof ExactPhraseScorer);
|
||||||
assertNotNull(scorer.twoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
@ -706,7 +706,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
q.add(pq, Occur.SHOULD);
|
q.add(pq, Occur.SHOULD);
|
||||||
q.add(new TermQuery(new Term("field", "c")), Occur.MUST_NOT);
|
q.add(new TermQuery(new Term("field", "c")), Occur.MUST_NOT);
|
||||||
|
|
||||||
final Weight weight = searcher.createNormalizedWeight(q.build(), ScoreMode.COMPLETE);
|
final Weight weight = searcher.createWeight(searcher.rewrite(q.build()), ScoreMode.COMPLETE, 1);
|
||||||
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
||||||
assertTrue(scorer instanceof ReqExclScorer);
|
assertTrue(scorer instanceof ReqExclScorer);
|
||||||
assertNotNull(scorer.twoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
@ -735,7 +735,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
q.add(pq, Occur.MUST);
|
q.add(pq, Occur.MUST);
|
||||||
q.add(new TermQuery(new Term("field", "c")), Occur.SHOULD);
|
q.add(new TermQuery(new Term("field", "c")), Occur.SHOULD);
|
||||||
|
|
||||||
final Weight weight = searcher.createNormalizedWeight(q.build(), ScoreMode.COMPLETE);
|
final Weight weight = searcher.createWeight(searcher.rewrite(q.build()), ScoreMode.COMPLETE, 1);
|
||||||
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
||||||
assertTrue(scorer instanceof ReqOptSumScorer);
|
assertTrue(scorer instanceof ReqOptSumScorer);
|
||||||
assertNotNull(scorer.twoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
@ -768,11 +768,11 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
BooleanQuery bq = bqBuilder.build();
|
BooleanQuery bq = bqBuilder.build();
|
||||||
|
|
||||||
Set<Term> scoringTerms = new HashSet<>();
|
Set<Term> scoringTerms = new HashSet<>();
|
||||||
searcher.createNormalizedWeight(bq, ScoreMode.COMPLETE).extractTerms(scoringTerms);
|
searcher.createWeight(searcher.rewrite(bq), ScoreMode.COMPLETE, 1).extractTerms(scoringTerms);
|
||||||
assertEquals(new HashSet<>(Arrays.asList(a, b)), scoringTerms);
|
assertEquals(new HashSet<>(Arrays.asList(a, b)), scoringTerms);
|
||||||
|
|
||||||
Set<Term> matchingTerms = new HashSet<>();
|
Set<Term> matchingTerms = new HashSet<>();
|
||||||
searcher.createNormalizedWeight(bq, ScoreMode.COMPLETE_NO_SCORES).extractTerms(matchingTerms);
|
searcher.createWeight(searcher.rewrite(bq), ScoreMode.COMPLETE_NO_SCORES, 1).extractTerms(matchingTerms);
|
||||||
assertEquals(new HashSet<>(Arrays.asList(a, b, c)), matchingTerms);
|
assertEquals(new HashSet<>(Arrays.asList(a, b, c)), matchingTerms);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -196,7 +196,7 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase {
|
||||||
bq1.add(new TermQuery(new Term(F1, "lucene")), Occur.SHOULD);
|
bq1.add(new TermQuery(new Term(F1, "lucene")), Occur.SHOULD);
|
||||||
bq1.add(new PhraseQuery(F2, "search", "engine"), Occur.SHOULD);
|
bq1.add(new PhraseQuery(F2, "search", "engine"), Occur.SHOULD);
|
||||||
|
|
||||||
Weight w1 = scorerSearcher.createNormalizedWeight(bq1.build(), ScoreMode.COMPLETE);
|
Weight w1 = scorerSearcher.createWeight(scorerSearcher.rewrite(bq1.build()), ScoreMode.COMPLETE, 1);
|
||||||
Scorer s1 = w1.scorer(reader.leaves().get(0));
|
Scorer s1 = w1.scorer(reader.leaves().get(0));
|
||||||
assertEquals(0, s1.iterator().nextDoc());
|
assertEquals(0, s1.iterator().nextDoc());
|
||||||
assertEquals(2, s1.getChildren().size());
|
assertEquals(2, s1.getChildren().size());
|
||||||
|
@ -205,7 +205,7 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase {
|
||||||
bq2.add(new TermQuery(new Term(F1, "lucene")), Occur.SHOULD);
|
bq2.add(new TermQuery(new Term(F1, "lucene")), Occur.SHOULD);
|
||||||
bq2.add(new PhraseQuery(F2, "search", "library"), Occur.SHOULD);
|
bq2.add(new PhraseQuery(F2, "search", "library"), Occur.SHOULD);
|
||||||
|
|
||||||
Weight w2 = scorerSearcher.createNormalizedWeight(bq2.build(), ScoreMode.COMPLETE);
|
Weight w2 = scorerSearcher.createWeight(scorerSearcher.rewrite(bq2.build()), ScoreMode.COMPLETE, 1);
|
||||||
Scorer s2 = w2.scorer(reader.leaves().get(0));
|
Scorer s2 = w2.scorer(reader.leaves().get(0));
|
||||||
assertEquals(0, s2.iterator().nextDoc());
|
assertEquals(0, s2.iterator().nextDoc());
|
||||||
assertEquals(1, s2.getChildren().size());
|
assertEquals(1, s2.getChildren().size());
|
||||||
|
@ -218,7 +218,7 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase {
|
||||||
bq.add(new PhraseQuery(F2, "search", "library"), Occur.SHOULD);
|
bq.add(new PhraseQuery(F2, "search", "library"), Occur.SHOULD);
|
||||||
bq.setMinimumNumberShouldMatch(2);
|
bq.setMinimumNumberShouldMatch(2);
|
||||||
|
|
||||||
Weight w = scorerSearcher.createNormalizedWeight(bq.build(), ScoreMode.COMPLETE);
|
Weight w = scorerSearcher.createWeight(scorerSearcher.rewrite(bq.build()), ScoreMode.COMPLETE, 1);
|
||||||
Scorer s = w.scorer(reader.leaves().get(0));
|
Scorer s = w.scorer(reader.leaves().get(0));
|
||||||
assertEquals(0, s.iterator().nextDoc());
|
assertEquals(0, s.iterator().nextDoc());
|
||||||
assertEquals(2, s.getChildren().size());
|
assertEquals(2, s.getChildren().size());
|
||||||
|
|
|
@ -93,7 +93,7 @@ public class TestBooleanRewrites extends LuceneTestCase {
|
||||||
BooleanQuery.Builder query2 = new BooleanQuery.Builder();
|
BooleanQuery.Builder query2 = new BooleanQuery.Builder();
|
||||||
query2.add(new TermQuery(new Term("field", "a")), Occur.FILTER);
|
query2.add(new TermQuery(new Term("field", "a")), Occur.FILTER);
|
||||||
query2.add(new TermQuery(new Term("field", "b")), Occur.SHOULD);
|
query2.add(new TermQuery(new Term("field", "b")), Occur.SHOULD);
|
||||||
final Weight weight = searcher.createNormalizedWeight(query2.build(), ScoreMode.COMPLETE);
|
final Weight weight = searcher.createWeight(searcher.rewrite(query2.build()), ScoreMode.COMPLETE, 1);
|
||||||
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
||||||
assertEquals(0, scorer.iterator().nextDoc());
|
assertEquals(0, scorer.iterator().nextDoc());
|
||||||
assertTrue(scorer.getClass().getName(), scorer instanceof FilterScorer);
|
assertTrue(scorer.getClass().getName(), scorer instanceof FilterScorer);
|
||||||
|
|
|
@ -172,7 +172,7 @@ public class TestBooleanScorer extends LuceneTestCase {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
// no scores -> term scorer
|
// no scores -> term scorer
|
||||||
Weight weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES);
|
Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
BulkScorer scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
BulkScorer scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
||||||
assertTrue(scorer instanceof DefaultBulkScorer); // term scorer
|
assertTrue(scorer instanceof DefaultBulkScorer); // term scorer
|
||||||
|
|
||||||
|
@ -181,7 +181,7 @@ public class TestBooleanScorer extends LuceneTestCase {
|
||||||
.add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD) // existing term
|
.add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD) // existing term
|
||||||
.add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD) // missing term
|
.add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD) // missing term
|
||||||
.build();
|
.build();
|
||||||
weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE, 1);
|
||||||
scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
||||||
assertTrue(scorer instanceof DefaultBulkScorer); // term scorer
|
assertTrue(scorer instanceof DefaultBulkScorer); // term scorer
|
||||||
|
|
||||||
|
@ -210,7 +210,7 @@ public class TestBooleanScorer extends LuceneTestCase {
|
||||||
.add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD)
|
.add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD)
|
||||||
.add(new TermQuery(new Term("foo", "bar")), Occur.MUST_NOT)
|
.add(new TermQuery(new Term("foo", "bar")), Occur.MUST_NOT)
|
||||||
.build();
|
.build();
|
||||||
Weight weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE, 1);
|
||||||
BulkScorer scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
BulkScorer scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
||||||
assertTrue(scorer instanceof ReqExclBulkScorer);
|
assertTrue(scorer instanceof ReqExclBulkScorer);
|
||||||
|
|
||||||
|
@ -219,7 +219,7 @@ public class TestBooleanScorer extends LuceneTestCase {
|
||||||
.add(new MatchAllDocsQuery(), Occur.SHOULD)
|
.add(new MatchAllDocsQuery(), Occur.SHOULD)
|
||||||
.add(new TermQuery(new Term("foo", "bar")), Occur.MUST_NOT)
|
.add(new TermQuery(new Term("foo", "bar")), Occur.MUST_NOT)
|
||||||
.build();
|
.build();
|
||||||
weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE, 1);
|
||||||
scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
||||||
assertTrue(scorer instanceof ReqExclBulkScorer);
|
assertTrue(scorer instanceof ReqExclBulkScorer);
|
||||||
|
|
||||||
|
@ -227,7 +227,7 @@ public class TestBooleanScorer extends LuceneTestCase {
|
||||||
.add(new TermQuery(new Term("foo", "baz")), Occur.MUST)
|
.add(new TermQuery(new Term("foo", "baz")), Occur.MUST)
|
||||||
.add(new TermQuery(new Term("foo", "bar")), Occur.MUST_NOT)
|
.add(new TermQuery(new Term("foo", "bar")), Occur.MUST_NOT)
|
||||||
.build();
|
.build();
|
||||||
weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE, 1);
|
||||||
scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
||||||
assertTrue(scorer instanceof ReqExclBulkScorer);
|
assertTrue(scorer instanceof ReqExclBulkScorer);
|
||||||
|
|
||||||
|
@ -235,7 +235,7 @@ public class TestBooleanScorer extends LuceneTestCase {
|
||||||
.add(new TermQuery(new Term("foo", "baz")), Occur.FILTER)
|
.add(new TermQuery(new Term("foo", "baz")), Occur.FILTER)
|
||||||
.add(new TermQuery(new Term("foo", "bar")), Occur.MUST_NOT)
|
.add(new TermQuery(new Term("foo", "bar")), Occur.MUST_NOT)
|
||||||
.build();
|
.build();
|
||||||
weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE, 1);
|
||||||
scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
scorer = ((BooleanWeight) weight).booleanScorer(ctx);
|
||||||
assertTrue(scorer instanceof ReqExclBulkScorer);
|
assertTrue(scorer instanceof ReqExclBulkScorer);
|
||||||
|
|
||||||
|
|
|
@ -201,9 +201,9 @@ public class TestConstantScoreQuery extends LuceneTestCase {
|
||||||
|
|
||||||
PhraseQuery pq = new PhraseQuery("field", "a", "b");
|
PhraseQuery pq = new PhraseQuery("field", "a", "b");
|
||||||
|
|
||||||
ConstantScoreQuery q = new ConstantScoreQuery(pq);
|
Query q = searcher.rewrite(new ConstantScoreQuery(pq));
|
||||||
|
|
||||||
final Weight weight = searcher.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
final Weight weight = searcher.createWeight(q, ScoreMode.COMPLETE, 1);
|
||||||
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
final Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertNotNull(scorer.twoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
|
|
||||||
|
@ -215,14 +215,14 @@ public class TestConstantScoreQuery extends LuceneTestCase {
|
||||||
public void testExtractTerms() throws Exception {
|
public void testExtractTerms() throws Exception {
|
||||||
final IndexSearcher searcher = newSearcher(new MultiReader());
|
final IndexSearcher searcher = newSearcher(new MultiReader());
|
||||||
final TermQuery termQuery = new TermQuery(new Term("foo", "bar"));
|
final TermQuery termQuery = new TermQuery(new Term("foo", "bar"));
|
||||||
final ConstantScoreQuery csq = new ConstantScoreQuery(termQuery);
|
final Query csq = searcher.rewrite(new ConstantScoreQuery(termQuery));
|
||||||
|
|
||||||
final Set<Term> scoringTerms = new HashSet<>();
|
final Set<Term> scoringTerms = new HashSet<>();
|
||||||
searcher.createNormalizedWeight(csq, ScoreMode.COMPLETE).extractTerms(scoringTerms);
|
searcher.createWeight(csq, ScoreMode.COMPLETE, 1).extractTerms(scoringTerms);
|
||||||
assertEquals(Collections.emptySet(), scoringTerms);
|
assertEquals(Collections.emptySet(), scoringTerms);
|
||||||
|
|
||||||
final Set<Term> matchingTerms = new HashSet<>();
|
final Set<Term> matchingTerms = new HashSet<>();
|
||||||
searcher.createNormalizedWeight(csq, ScoreMode.COMPLETE_NO_SCORES).extractTerms(matchingTerms);
|
searcher.createWeight(csq, ScoreMode.COMPLETE_NO_SCORES, 1).extractTerms(matchingTerms);
|
||||||
assertEquals(Collections.singleton(new Term("foo", "bar")), matchingTerms);
|
assertEquals(Collections.singleton(new Term("foo", "bar")), matchingTerms);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -175,7 +175,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
|
||||||
|
|
||||||
QueryUtils.check(random(), dq, s);
|
QueryUtils.check(random(), dq, s);
|
||||||
assertTrue(s.getTopReaderContext() instanceof LeafReaderContext);
|
assertTrue(s.getTopReaderContext() instanceof LeafReaderContext);
|
||||||
final Weight dw = s.createNormalizedWeight(dq, ScoreMode.COMPLETE);
|
final Weight dw = s.createWeight(s.rewrite(dq), ScoreMode.COMPLETE, 1);
|
||||||
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
|
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
|
||||||
final Scorer ds = dw.scorer(context);
|
final Scorer ds = dw.scorer(context);
|
||||||
final boolean skipOk = ds.iterator().advance(3) != DocIdSetIterator.NO_MORE_DOCS;
|
final boolean skipOk = ds.iterator().advance(3) != DocIdSetIterator.NO_MORE_DOCS;
|
||||||
|
@ -191,7 +191,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
|
||||||
|
|
||||||
assertTrue(s.getTopReaderContext() instanceof LeafReaderContext);
|
assertTrue(s.getTopReaderContext() instanceof LeafReaderContext);
|
||||||
QueryUtils.check(random(), dq, s);
|
QueryUtils.check(random(), dq, s);
|
||||||
final Weight dw = s.createNormalizedWeight(dq, ScoreMode.COMPLETE);
|
final Weight dw = s.createWeight(s.rewrite(dq), ScoreMode.COMPLETE, 1);
|
||||||
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
|
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
|
||||||
final Scorer ds = dw.scorer(context);
|
final Scorer ds = dw.scorer(context);
|
||||||
assertTrue("firsttime skipTo found no match",
|
assertTrue("firsttime skipTo found no match",
|
||||||
|
|
|
@ -230,7 +230,7 @@ public class TestDocValuesQueries extends LuceneTestCase {
|
||||||
SortedNumericDocValuesField.newSlowRangeQuery("foo", 2, 4),
|
SortedNumericDocValuesField.newSlowRangeQuery("foo", 2, 4),
|
||||||
SortedDocValuesField.newSlowRangeQuery("foo", new BytesRef("abc"), new BytesRef("bcd"), random().nextBoolean(), random().nextBoolean()),
|
SortedDocValuesField.newSlowRangeQuery("foo", new BytesRef("abc"), new BytesRef("bcd"), random().nextBoolean(), random().nextBoolean()),
|
||||||
SortedSetDocValuesField.newSlowRangeQuery("foo", new BytesRef("abc"), new BytesRef("bcd"), random().nextBoolean(), random().nextBoolean()))) {
|
SortedSetDocValuesField.newSlowRangeQuery("foo", new BytesRef("abc"), new BytesRef("bcd"), random().nextBoolean(), random().nextBoolean()))) {
|
||||||
Weight w = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
Weight w = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE, 1);
|
||||||
assertNull(w.scorer(searcher.getIndexReader().leaves().get(0)));
|
assertNull(w.scorer(searcher.getIndexReader().leaves().get(0)));
|
||||||
}
|
}
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
|
@ -67,7 +67,7 @@ public class TestIndexOrDocValuesQuery extends LuceneTestCase {
|
||||||
.add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 2), NumericDocValuesField.newSlowRangeQuery("f2", 2L, 2L)), Occur.MUST)
|
.add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 2), NumericDocValuesField.newSlowRangeQuery("f2", 2L, 2L)), Occur.MUST)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
final Weight w1 = searcher.createNormalizedWeight(q1, ScoreMode.COMPLETE);
|
final Weight w1 = searcher.createWeight(searcher.rewrite(q1), ScoreMode.COMPLETE, 1);
|
||||||
final Scorer s1 = w1.scorer(searcher.getIndexReader().leaves().get(0));
|
final Scorer s1 = w1.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertNotNull(s1.twoPhaseIterator()); // means we use doc values
|
assertNotNull(s1.twoPhaseIterator()); // means we use doc values
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ public class TestIndexOrDocValuesQuery extends LuceneTestCase {
|
||||||
.add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 42), NumericDocValuesField.newSlowRangeQuery("f2", 42L, 42L)), Occur.MUST)
|
.add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 42), NumericDocValuesField.newSlowRangeQuery("f2", 42L, 42L)), Occur.MUST)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
final Weight w2 = searcher.createNormalizedWeight(q2, ScoreMode.COMPLETE);
|
final Weight w2 = searcher.createWeight(searcher.rewrite(q2), ScoreMode.COMPLETE, 1);
|
||||||
final Scorer s2 = w2.scorer(searcher.getIndexReader().leaves().get(0));
|
final Scorer s2 = w2.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertNull(s2.twoPhaseIterator()); // means we use points
|
assertNull(s2.twoPhaseIterator()); // means we use points
|
||||||
|
|
||||||
|
|
|
@ -1141,7 +1141,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
|
||||||
LRUQueryCache cache = new LRUQueryCache(1, Long.MAX_VALUE, context -> true);
|
LRUQueryCache cache = new LRUQueryCache(1, Long.MAX_VALUE, context -> true);
|
||||||
|
|
||||||
// test that the bulk scorer is propagated when a scorer should not be cached
|
// test that the bulk scorer is propagated when a scorer should not be cached
|
||||||
Weight weight = searcher.createNormalizedWeight(new MatchAllDocsQuery(), ScoreMode.COMPLETE_NO_SCORES);
|
Weight weight = searcher.createWeight(new MatchAllDocsQuery(), ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
weight = new WeightWrapper(weight, scorerCalled, bulkScorerCalled);
|
weight = new WeightWrapper(weight, scorerCalled, bulkScorerCalled);
|
||||||
weight = cache.doCache(weight, NEVER_CACHE);
|
weight = cache.doCache(weight, NEVER_CACHE);
|
||||||
weight.bulkScorer(leaf);
|
weight.bulkScorer(leaf);
|
||||||
|
@ -1151,7 +1151,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
|
||||||
|
|
||||||
// test that the doc id set is computed using the bulk scorer
|
// test that the doc id set is computed using the bulk scorer
|
||||||
bulkScorerCalled.set(false);
|
bulkScorerCalled.set(false);
|
||||||
weight = searcher.createNormalizedWeight(new MatchAllDocsQuery(), ScoreMode.COMPLETE_NO_SCORES);
|
weight = searcher.createWeight(new MatchAllDocsQuery(), ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
weight = new WeightWrapper(weight, scorerCalled, bulkScorerCalled);
|
weight = new WeightWrapper(weight, scorerCalled, bulkScorerCalled);
|
||||||
weight = cache.doCache(weight, QueryCachingPolicy.ALWAYS_CACHE);
|
weight = cache.doCache(weight, QueryCachingPolicy.ALWAYS_CACHE);
|
||||||
weight.scorer(leaf);
|
weight.scorer(leaf);
|
||||||
|
@ -1424,7 +1424,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
|
||||||
|
|
||||||
AtomicBoolean scorerCreated = new AtomicBoolean(false);
|
AtomicBoolean scorerCreated = new AtomicBoolean(false);
|
||||||
Query query = new DummyQuery2(scorerCreated);
|
Query query = new DummyQuery2(scorerCreated);
|
||||||
Weight weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES);
|
Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
ScorerSupplier supplier = weight.scorerSupplier(searcher.getIndexReader().leaves().get(0));
|
ScorerSupplier supplier = weight.scorerSupplier(searcher.getIndexReader().leaves().get(0));
|
||||||
assertFalse(scorerCreated.get());
|
assertFalse(scorerCreated.get());
|
||||||
supplier.get(random().nextLong() & 0x7FFFFFFFFFFFFFFFL);
|
supplier.get(random().nextLong() & 0x7FFFFFFFFFFFFFFFL);
|
||||||
|
|
|
@ -118,7 +118,7 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
bq.setMinimumNumberShouldMatch(minShouldMatch);
|
bq.setMinimumNumberShouldMatch(minShouldMatch);
|
||||||
|
|
||||||
BooleanWeight weight = (BooleanWeight) searcher.createNormalizedWeight(bq.build(), ScoreMode.COMPLETE);
|
BooleanWeight weight = (BooleanWeight) searcher.createWeight(searcher.rewrite(bq.build()), ScoreMode.COMPLETE, 1);
|
||||||
|
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
case DOC_VALUES:
|
case DOC_VALUES:
|
||||||
|
|
|
@ -1903,7 +1903,7 @@ public class TestPointQueries extends LuceneTestCase {
|
||||||
upperBound[i] = value[i] + random().nextInt(1);
|
upperBound[i] = value[i] + random().nextInt(1);
|
||||||
}
|
}
|
||||||
Query query = IntPoint.newRangeQuery("point", lowerBound, upperBound);
|
Query query = IntPoint.newRangeQuery("point", lowerBound, upperBound);
|
||||||
Weight weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES);
|
Weight weight = searcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertEquals(DocIdSetIterator.all(1).getClass(), scorer.iterator().getClass());
|
assertEquals(DocIdSetIterator.all(1).getClass(), scorer.iterator().getClass());
|
||||||
|
|
||||||
|
@ -1914,7 +1914,7 @@ public class TestPointQueries extends LuceneTestCase {
|
||||||
reader = w.getReader();
|
reader = w.getReader();
|
||||||
searcher = new IndexSearcher(reader);
|
searcher = new IndexSearcher(reader);
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES);
|
weight = searcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertFalse(DocIdSetIterator.all(1).getClass().equals(scorer.iterator().getClass()));
|
assertFalse(DocIdSetIterator.all(1).getClass().equals(scorer.iterator().getClass()));
|
||||||
|
|
||||||
|
|
|
@ -62,7 +62,7 @@ public class TestReqOptSumScorer extends LuceneTestCase {
|
||||||
.add(new ConstantScoreQuery(new TermQuery(new Term("f", "foo"))), Occur.MUST)
|
.add(new ConstantScoreQuery(new TermQuery(new Term("f", "foo"))), Occur.MUST)
|
||||||
.add(new ConstantScoreQuery(new TermQuery(new Term("f", "bar"))), Occur.SHOULD)
|
.add(new ConstantScoreQuery(new TermQuery(new Term("f", "bar"))), Occur.SHOULD)
|
||||||
.build();
|
.build();
|
||||||
Weight weight = searcher.createNormalizedWeight(query, ScoreMode.TOP_SCORES);
|
Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.TOP_SCORES, 1);
|
||||||
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
|
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
|
||||||
|
|
||||||
Scorer scorer = weight.scorer(context);
|
Scorer scorer = weight.scorer(context);
|
||||||
|
|
|
@ -487,7 +487,7 @@ public class TestSearcherManager extends ThreadedIndexingAndSearchingTestCase {
|
||||||
|
|
||||||
FilterDirectoryReader reader = new MyFilterDirectoryReader(nrtReader);
|
FilterDirectoryReader reader = new MyFilterDirectoryReader(nrtReader);
|
||||||
assertEquals(nrtReader, reader.getDelegate());
|
assertEquals(nrtReader, reader.getDelegate());
|
||||||
assertEquals(nrtReader, FilterDirectoryReader.unwrap(reader));
|
assertEquals(FilterDirectoryReader.unwrap(nrtReader), FilterDirectoryReader.unwrap(reader));
|
||||||
|
|
||||||
SearcherManager mgr = new SearcherManager(reader, null);
|
SearcherManager mgr = new SearcherManager(reader, null);
|
||||||
for(int i=0;i<10;i++) {
|
for(int i=0;i<10;i++) {
|
||||||
|
|
|
@ -73,10 +73,10 @@ public class TestTermQuery extends LuceneTestCase {
|
||||||
IndexSearcher noSeekSearcher = new IndexSearcher(noSeekReader);
|
IndexSearcher noSeekSearcher = new IndexSearcher(noSeekReader);
|
||||||
Query query = new TermQuery(new Term("foo", "bar"));
|
Query query = new TermQuery(new Term("foo", "bar"));
|
||||||
AssertionError e = expectThrows(AssertionError.class,
|
AssertionError e = expectThrows(AssertionError.class,
|
||||||
() -> noSeekSearcher.createNormalizedWeight(query, ScoreMode.COMPLETE));
|
() -> noSeekSearcher.createWeight(noSeekSearcher.rewrite(query), ScoreMode.COMPLETE, 1));
|
||||||
assertEquals("no seek", e.getMessage());
|
assertEquals("no seek", e.getMessage());
|
||||||
|
|
||||||
noSeekSearcher.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES); // no exception
|
noSeekSearcher.createWeight(noSeekSearcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1); // no exception
|
||||||
IndexSearcher searcher = new IndexSearcher(reader);
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
// use a collector rather than searcher.count() which would just read the
|
// use a collector rather than searcher.count() which would just read the
|
||||||
// doc freq instead of creating a scorer
|
// doc freq instead of creating a scorer
|
||||||
|
|
|
@ -82,7 +82,7 @@ public class TestTermScorer extends LuceneTestCase {
|
||||||
Term allTerm = new Term(FIELD, "all");
|
Term allTerm = new Term(FIELD, "all");
|
||||||
TermQuery termQuery = new TermQuery(allTerm);
|
TermQuery termQuery = new TermQuery(allTerm);
|
||||||
|
|
||||||
Weight weight = indexSearcher.createNormalizedWeight(termQuery, ScoreMode.COMPLETE);
|
Weight weight = indexSearcher.createWeight(termQuery, ScoreMode.COMPLETE, 1);
|
||||||
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
|
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
|
||||||
LeafReaderContext context = (LeafReaderContext)indexSearcher.getTopReaderContext();
|
LeafReaderContext context = (LeafReaderContext)indexSearcher.getTopReaderContext();
|
||||||
BulkScorer ts = weight.bulkScorer(context);
|
BulkScorer ts = weight.bulkScorer(context);
|
||||||
|
@ -133,7 +133,7 @@ public class TestTermScorer extends LuceneTestCase {
|
||||||
Term allTerm = new Term(FIELD, "all");
|
Term allTerm = new Term(FIELD, "all");
|
||||||
TermQuery termQuery = new TermQuery(allTerm);
|
TermQuery termQuery = new TermQuery(allTerm);
|
||||||
|
|
||||||
Weight weight = indexSearcher.createNormalizedWeight(termQuery, ScoreMode.COMPLETE);
|
Weight weight = indexSearcher.createWeight(termQuery, ScoreMode.COMPLETE, 1);
|
||||||
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
|
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
|
||||||
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
|
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
|
||||||
Scorer ts = weight.scorer(context);
|
Scorer ts = weight.scorer(context);
|
||||||
|
@ -150,7 +150,7 @@ public class TestTermScorer extends LuceneTestCase {
|
||||||
Term allTerm = new Term(FIELD, "all");
|
Term allTerm = new Term(FIELD, "all");
|
||||||
TermQuery termQuery = new TermQuery(allTerm);
|
TermQuery termQuery = new TermQuery(allTerm);
|
||||||
|
|
||||||
Weight weight = indexSearcher.createNormalizedWeight(termQuery, ScoreMode.COMPLETE);
|
Weight weight = indexSearcher.createWeight(termQuery, ScoreMode.COMPLETE, 1);
|
||||||
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
|
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
|
||||||
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
|
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
|
||||||
Scorer ts = weight.scorer(context);
|
Scorer ts = weight.scorer(context);
|
||||||
|
@ -199,12 +199,12 @@ public class TestTermScorer extends LuceneTestCase {
|
||||||
// We don't use newSearcher because it sometimes runs checkIndex which loads norms
|
// We don't use newSearcher because it sometimes runs checkIndex which loads norms
|
||||||
IndexSearcher indexSearcher = new IndexSearcher(forbiddenNorms);
|
IndexSearcher indexSearcher = new IndexSearcher(forbiddenNorms);
|
||||||
|
|
||||||
Weight weight = indexSearcher.createNormalizedWeight(termQuery, ScoreMode.COMPLETE);
|
Weight weight = indexSearcher.createWeight(termQuery, ScoreMode.COMPLETE, 1);
|
||||||
expectThrows(AssertionError.class, () -> {
|
expectThrows(AssertionError.class, () -> {
|
||||||
weight.scorer(forbiddenNorms.getContext()).iterator().nextDoc();
|
weight.scorer(forbiddenNorms.getContext()).iterator().nextDoc();
|
||||||
});
|
});
|
||||||
|
|
||||||
Weight weight2 = indexSearcher.createNormalizedWeight(termQuery, ScoreMode.COMPLETE_NO_SCORES);
|
Weight weight2 = indexSearcher.createWeight(termQuery, ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
// should not fail this time since norms are not necessary
|
// should not fail this time since norms are not necessary
|
||||||
weight2.scorer(forbiddenNorms.getContext()).iterator().nextDoc();
|
weight2.scorer(forbiddenNorms.getContext()).iterator().nextDoc();
|
||||||
}
|
}
|
||||||
|
|
|
@ -316,7 +316,7 @@ public class TestTopDocsMerge extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ... then all shards:
|
// ... then all shards:
|
||||||
final Weight w = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
final Weight w = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE, 1);
|
||||||
|
|
||||||
final TopDocs[] shardHits;
|
final TopDocs[] shardHits;
|
||||||
if (sort == null) {
|
if (sort == null) {
|
||||||
|
|
|
@ -92,7 +92,7 @@ public class TestWANDScorer extends LuceneTestCase {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
Scorer scorer = searcher
|
Scorer scorer = searcher
|
||||||
.createNormalizedWeight(query, ScoreMode.TOP_SCORES)
|
.createWeight(searcher.rewrite(query), ScoreMode.TOP_SCORES, 1)
|
||||||
.scorer(searcher.getIndexReader().leaves().get(0));
|
.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
|
|
||||||
assertEquals(0, scorer.iterator().nextDoc());
|
assertEquals(0, scorer.iterator().nextDoc());
|
||||||
|
@ -113,7 +113,7 @@ public class TestWANDScorer extends LuceneTestCase {
|
||||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, scorer.iterator().nextDoc());
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, scorer.iterator().nextDoc());
|
||||||
|
|
||||||
scorer = searcher
|
scorer = searcher
|
||||||
.createNormalizedWeight(query, ScoreMode.TOP_SCORES)
|
.createWeight(searcher.rewrite(query), ScoreMode.TOP_SCORES, 1)
|
||||||
.scorer(searcher.getIndexReader().leaves().get(0));
|
.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
scorer.setMinCompetitiveScore(4);
|
scorer.setMinCompetitiveScore(4);
|
||||||
|
|
||||||
|
@ -126,7 +126,7 @@ public class TestWANDScorer extends LuceneTestCase {
|
||||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, scorer.iterator().nextDoc());
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, scorer.iterator().nextDoc());
|
||||||
|
|
||||||
scorer = searcher
|
scorer = searcher
|
||||||
.createNormalizedWeight(query, ScoreMode.TOP_SCORES)
|
.createWeight(searcher.rewrite(query), ScoreMode.TOP_SCORES, 1)
|
||||||
.scorer(searcher.getIndexReader().leaves().get(0));
|
.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
|
|
||||||
assertEquals(0, scorer.iterator().nextDoc());
|
assertEquals(0, scorer.iterator().nextDoc());
|
||||||
|
@ -147,7 +147,7 @@ public class TestWANDScorer extends LuceneTestCase {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
scorer = searcher
|
scorer = searcher
|
||||||
.createNormalizedWeight(query, ScoreMode.TOP_SCORES)
|
.createWeight(searcher.rewrite(query), ScoreMode.TOP_SCORES, 1)
|
||||||
.scorer(searcher.getIndexReader().leaves().get(0));
|
.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
|
|
||||||
assertEquals(3, scorer.iterator().nextDoc());
|
assertEquals(3, scorer.iterator().nextDoc());
|
||||||
|
@ -159,7 +159,7 @@ public class TestWANDScorer extends LuceneTestCase {
|
||||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, scorer.iterator().nextDoc());
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, scorer.iterator().nextDoc());
|
||||||
|
|
||||||
scorer = searcher
|
scorer = searcher
|
||||||
.createNormalizedWeight(query, ScoreMode.TOP_SCORES)
|
.createWeight(searcher.rewrite(query), ScoreMode.TOP_SCORES, 1)
|
||||||
.scorer(searcher.getIndexReader().leaves().get(0));
|
.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
|
|
||||||
scorer.setMinCompetitiveScore(2);
|
scorer.setMinCompetitiveScore(2);
|
||||||
|
@ -177,7 +177,7 @@ public class TestWANDScorer extends LuceneTestCase {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
scorer = searcher
|
scorer = searcher
|
||||||
.createNormalizedWeight(query, ScoreMode.TOP_SCORES)
|
.createWeight(searcher.rewrite(query), ScoreMode.TOP_SCORES, 1)
|
||||||
.scorer(searcher.getIndexReader().leaves().get(0));
|
.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
|
|
||||||
assertEquals(0, scorer.iterator().nextDoc());
|
assertEquals(0, scorer.iterator().nextDoc());
|
||||||
|
@ -192,7 +192,7 @@ public class TestWANDScorer extends LuceneTestCase {
|
||||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, scorer.iterator().nextDoc());
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, scorer.iterator().nextDoc());
|
||||||
|
|
||||||
scorer = searcher
|
scorer = searcher
|
||||||
.createNormalizedWeight(query, ScoreMode.TOP_SCORES)
|
.createWeight(searcher.rewrite(query), ScoreMode.TOP_SCORES, 1)
|
||||||
.scorer(searcher.getIndexReader().leaves().get(0));
|
.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
|
|
||||||
scorer.setMinCompetitiveScore(3);
|
scorer.setMinCompetitiveScore(3);
|
||||||
|
|
|
@ -194,7 +194,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
public void testSpanNearScorerSkipTo1() throws Exception {
|
public void testSpanNearScorerSkipTo1() throws Exception {
|
||||||
SpanNearQuery q = makeQuery();
|
SpanNearQuery q = makeQuery();
|
||||||
Weight w = searcher.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
Weight w = searcher.createWeight(searcher.rewrite(q), ScoreMode.COMPLETE, 1);
|
||||||
IndexReaderContext topReaderContext = searcher.getTopReaderContext();
|
IndexReaderContext topReaderContext = searcher.getTopReaderContext();
|
||||||
LeafReaderContext leave = topReaderContext.leaves().get(0);
|
LeafReaderContext leave = topReaderContext.leaves().get(0);
|
||||||
Scorer s = w.scorer(leave);
|
Scorer s = w.scorer(leave);
|
||||||
|
|
|
@ -84,7 +84,7 @@ class DrillSidewaysQuery extends Query {
|
||||||
final Weight baseWeight = baseQuery.createWeight(searcher, scoreMode, boost);
|
final Weight baseWeight = baseQuery.createWeight(searcher, scoreMode, boost);
|
||||||
final Weight[] drillDowns = new Weight[drillDownQueries.length];
|
final Weight[] drillDowns = new Weight[drillDownQueries.length];
|
||||||
for(int dim=0;dim<drillDownQueries.length;dim++) {
|
for(int dim=0;dim<drillDownQueries.length;dim++) {
|
||||||
drillDowns[dim] = searcher.createNormalizedWeight(drillDownQueries[dim], ScoreMode.COMPLETE_NO_SCORES);
|
drillDowns[dim] = searcher.createWeight(searcher.rewrite(drillDownQueries[dim]), ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Weight(DrillSidewaysQuery.this) {
|
return new Weight(DrillSidewaysQuery.this) {
|
||||||
|
|
|
@ -102,7 +102,7 @@ public class DoubleRangeFacetCounts extends RangeFacetCounts {
|
||||||
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(hits.context);
|
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(hits.context);
|
||||||
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
final Weight fastMatchWeight = searcher.createNormalizedWeight(fastMatchQuery, ScoreMode.COMPLETE_NO_SCORES);
|
final Weight fastMatchWeight = searcher.createWeight(searcher.rewrite(fastMatchQuery), ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
Scorer s = fastMatchWeight.scorer(hits.context);
|
Scorer s = fastMatchWeight.scorer(hits.context);
|
||||||
if (s == null) {
|
if (s == null) {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -85,7 +85,7 @@ public class LongRangeFacetCounts extends RangeFacetCounts {
|
||||||
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(hits.context);
|
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(hits.context);
|
||||||
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
final Weight fastMatchWeight = searcher.createNormalizedWeight(fastMatchQuery, ScoreMode.COMPLETE_NO_SCORES);
|
final Weight fastMatchWeight = searcher.createWeight(searcher.rewrite(fastMatchQuery), ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
Scorer s = fastMatchWeight.scorer(hits.context);
|
Scorer s = fastMatchWeight.scorer(hits.context);
|
||||||
if (s == null) {
|
if (s == null) {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -172,7 +172,8 @@ public class GroupingSearch {
|
||||||
|
|
||||||
protected TopGroups<?> groupByDocBlock(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException {
|
protected TopGroups<?> groupByDocBlock(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException {
|
||||||
int topN = groupOffset + groupLimit;
|
int topN = groupOffset + groupLimit;
|
||||||
final Weight groupEndDocs = searcher.createNormalizedWeight(this.groupEndDocs, ScoreMode.COMPLETE_NO_SCORES);
|
final Query endDocsQuery = searcher.rewrite(this.groupEndDocs);
|
||||||
|
final Weight groupEndDocs = searcher.createWeight(endDocsQuery, ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
BlockGroupingCollector c = new BlockGroupingCollector(groupSort, topN, includeScores, groupEndDocs);
|
BlockGroupingCollector c = new BlockGroupingCollector(groupSort, topN, includeScores, groupEndDocs);
|
||||||
searcher.search(query, c);
|
searcher.search(query, c);
|
||||||
int topNInsideGroup = groupDocsOffset + groupDocsLimit;
|
int topNInsideGroup = groupDocsOffset + groupDocsLimit;
|
||||||
|
|
|
@ -1042,7 +1042,8 @@ public class TestGrouping extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
final boolean needsScores = getScores || getMaxScores || docSort == null;
|
final boolean needsScores = getScores || getMaxScores || docSort == null;
|
||||||
final BlockGroupingCollector c3 = new BlockGroupingCollector(groupSort, groupOffset+topNGroups, needsScores, sBlocks.createNormalizedWeight(lastDocInBlock, ScoreMode.COMPLETE_NO_SCORES));
|
final BlockGroupingCollector c3 = new BlockGroupingCollector(groupSort, groupOffset+topNGroups, needsScores,
|
||||||
|
sBlocks.createWeight(sBlocks.rewrite(lastDocInBlock), ScoreMode.COMPLETE_NO_SCORES, 1));
|
||||||
final AllGroupsCollector<BytesRef> allGroupsCollector2;
|
final AllGroupsCollector<BytesRef> allGroupsCollector2;
|
||||||
final Collector c4;
|
final Collector c4;
|
||||||
if (doAllGroups) {
|
if (doAllGroups) {
|
||||||
|
@ -1163,7 +1164,7 @@ public class TestGrouping extends LuceneTestCase {
|
||||||
System.out.println("TEST: " + subSearchers.length + " shards: " + Arrays.toString(subSearchers) + " canUseIDV=" + canUseIDV);
|
System.out.println("TEST: " + subSearchers.length + " shards: " + Arrays.toString(subSearchers) + " canUseIDV=" + canUseIDV);
|
||||||
}
|
}
|
||||||
// Run 1st pass collector to get top groups per shard
|
// Run 1st pass collector to get top groups per shard
|
||||||
final Weight w = topSearcher.createNormalizedWeight(query, getScores || getMaxScores ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES);
|
final Weight w = topSearcher.createWeight(topSearcher.rewrite(query), getScores || getMaxScores ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
final List<Collection<SearchGroup<BytesRef>>> shardGroups = new ArrayList<>();
|
final List<Collection<SearchGroup<BytesRef>>> shardGroups = new ArrayList<>();
|
||||||
List<FirstPassGroupingCollector<?>> firstPassGroupingCollectors = new ArrayList<>();
|
List<FirstPassGroupingCollector<?>> firstPassGroupingCollectors = new ArrayList<>();
|
||||||
FirstPassGroupingCollector<?> firstPassCollector = null;
|
FirstPassGroupingCollector<?> firstPassCollector = null;
|
||||||
|
|
|
@ -129,7 +129,7 @@ public final class QueryTermExtractor
|
||||||
else {
|
else {
|
||||||
HashSet<Term> nonWeightedTerms = new HashSet<>();
|
HashSet<Term> nonWeightedTerms = new HashSet<>();
|
||||||
try {
|
try {
|
||||||
EMPTY_INDEXSEARCHER.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES).extractTerms(nonWeightedTerms);
|
EMPTY_INDEXSEARCHER.createWeight(EMPTY_INDEXSEARCHER.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1).extractTerms(nonWeightedTerms);
|
||||||
} catch (IOException bogus) {
|
} catch (IOException bogus) {
|
||||||
throw new RuntimeException("Should not happen on an empty index", bogus);
|
throw new RuntimeException("Should not happen on an empty index", bogus);
|
||||||
}
|
}
|
||||||
|
|
|
@ -306,7 +306,7 @@ public class WeightedSpanTermExtractor {
|
||||||
q = spanQuery;
|
q = spanQuery;
|
||||||
}
|
}
|
||||||
LeafReaderContext context = getLeafContext();
|
LeafReaderContext context = getLeafContext();
|
||||||
SpanWeight w = (SpanWeight) searcher.createNormalizedWeight(q, ScoreMode.COMPLETE_NO_SCORES);
|
SpanWeight w = (SpanWeight) searcher.createWeight(searcher.rewrite(q), ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
Bits acceptDocs = context.reader().getLiveDocs();
|
Bits acceptDocs = context.reader().getLiveDocs();
|
||||||
final Spans spans = w.getSpans(context, SpanWeight.Postings.POSITIONS);
|
final Spans spans = w.getSpans(context, SpanWeight.Postings.POSITIONS);
|
||||||
if (spans == null) {
|
if (spans == null) {
|
||||||
|
@ -360,7 +360,7 @@ public class WeightedSpanTermExtractor {
|
||||||
protected void extractWeightedTerms(Map<String,WeightedSpanTerm> terms, Query query, float boost) throws IOException {
|
protected void extractWeightedTerms(Map<String,WeightedSpanTerm> terms, Query query, float boost) throws IOException {
|
||||||
Set<Term> nonWeightedTerms = new HashSet<>();
|
Set<Term> nonWeightedTerms = new HashSet<>();
|
||||||
final IndexSearcher searcher = new IndexSearcher(getLeafContext());
|
final IndexSearcher searcher = new IndexSearcher(getLeafContext());
|
||||||
searcher.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES).extractTerms(nonWeightedTerms);
|
searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1).extractTerms(nonWeightedTerms);
|
||||||
|
|
||||||
for (final Term queryTerm : nonWeightedTerms) {
|
for (final Term queryTerm : nonWeightedTerms) {
|
||||||
|
|
||||||
|
|
|
@ -227,7 +227,7 @@ public class PhraseHelper {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
for (Query query : spanQueries) {
|
for (Query query : spanQueries) {
|
||||||
Weight weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES);
|
Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
Scorer scorer = weight.scorer(leafReader.getContext());
|
Scorer scorer = weight.scorer(leafReader.getContext());
|
||||||
if (scorer == null) {
|
if (scorer == null) {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -144,7 +144,7 @@ public class UnifiedHighlighter {
|
||||||
*/
|
*/
|
||||||
protected static Set<Term> extractTerms(Query query) throws IOException {
|
protected static Set<Term> extractTerms(Query query) throws IOException {
|
||||||
Set<Term> queryTerms = new HashSet<>();
|
Set<Term> queryTerms = new HashSet<>();
|
||||||
EMPTY_INDEXSEARCHER.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES).extractTerms(queryTerms);
|
EMPTY_INDEXSEARCHER.createWeight(EMPTY_INDEXSEARCHER.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1).extractTerms(queryTerms);
|
||||||
return queryTerms;
|
return queryTerms;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -70,7 +70,8 @@ public class QueryBitSetProducer implements BitSetProducer {
|
||||||
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
|
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
|
||||||
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
final Weight weight = searcher.createNormalizedWeight(query, org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES);
|
final Query rewritten = searcher.rewrite(query);
|
||||||
|
final Weight weight = searcher.createWeight(rewritten, org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
final Scorer s = weight.scorer(context);
|
final Scorer s = weight.scorer(context);
|
||||||
|
|
||||||
if (s == null) {
|
if (s == null) {
|
||||||
|
|
|
@ -1113,7 +1113,7 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
CheckJoinIndex.check(s.getIndexReader(), parentFilter);
|
CheckJoinIndex.check(s.getIndexReader(), parentFilter);
|
||||||
|
|
||||||
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
|
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
|
||||||
Weight weight = s.createNormalizedWeight(q, org.apache.lucene.search.ScoreMode.COMPLETE);
|
Weight weight = s.createWeight(s.rewrite(q), org.apache.lucene.search.ScoreMode.COMPLETE, 1);
|
||||||
Scorer sc = weight.scorer(s.getIndexReader().leaves().get(0));
|
Scorer sc = weight.scorer(s.getIndexReader().leaves().get(0));
|
||||||
assertEquals(1, sc.iterator().advance(1));
|
assertEquals(1, sc.iterator().advance(1));
|
||||||
r.close();
|
r.close();
|
||||||
|
@ -1147,7 +1147,7 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
CheckJoinIndex.check(s.getIndexReader(), parentFilter);
|
CheckJoinIndex.check(s.getIndexReader(), parentFilter);
|
||||||
|
|
||||||
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
|
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
|
||||||
Weight weight = s.createNormalizedWeight(q, org.apache.lucene.search.ScoreMode.COMPLETE);
|
Weight weight = s.createWeight(s.rewrite(q), org.apache.lucene.search.ScoreMode.COMPLETE, 1);
|
||||||
Scorer sc = weight.scorer(s.getIndexReader().leaves().get(0));
|
Scorer sc = weight.scorer(s.getIndexReader().leaves().get(0));
|
||||||
assertEquals(2, sc.iterator().advance(0));
|
assertEquals(2, sc.iterator().advance(0));
|
||||||
r.close();
|
r.close();
|
||||||
|
@ -1199,7 +1199,7 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
CheckJoinIndex.check(r, parentsFilter);
|
CheckJoinIndex.check(r, parentsFilter);
|
||||||
ToParentBlockJoinQuery childJoinQuery = new ToParentBlockJoinQuery(childQuery, parentsFilter, ScoreMode.Avg);
|
ToParentBlockJoinQuery childJoinQuery = new ToParentBlockJoinQuery(childQuery, parentsFilter, ScoreMode.Avg);
|
||||||
|
|
||||||
Weight weight = searcher.createNormalizedWeight(childJoinQuery, RandomPicks.randomFrom(random(), org.apache.lucene.search.ScoreMode.values()));
|
Weight weight = searcher.createWeight(searcher.rewrite(childJoinQuery), RandomPicks.randomFrom(random(), org.apache.lucene.search.ScoreMode.values()), 1);
|
||||||
Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
Scorer scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertNull(scorer);
|
assertNull(scorer);
|
||||||
|
|
||||||
|
@ -1207,7 +1207,7 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
childQuery = new TermQuery(new Term("bogus", "bogus"));
|
childQuery = new TermQuery(new Term("bogus", "bogus"));
|
||||||
childJoinQuery = new ToParentBlockJoinQuery(childQuery, parentsFilter, ScoreMode.Avg);
|
childJoinQuery = new ToParentBlockJoinQuery(childQuery, parentsFilter, ScoreMode.Avg);
|
||||||
|
|
||||||
weight = searcher.createNormalizedWeight(childJoinQuery, RandomPicks.randomFrom(random(), org.apache.lucene.search.ScoreMode.values()));
|
weight = searcher.createWeight(searcher.rewrite(childJoinQuery), RandomPicks.randomFrom(random(), org.apache.lucene.search.ScoreMode.values()), 1);
|
||||||
scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
scorer = weight.scorer(searcher.getIndexReader().leaves().get(0));
|
||||||
assertNull(scorer);
|
assertNull(scorer);
|
||||||
|
|
||||||
|
@ -1401,7 +1401,7 @@ public class TestBlockJoin extends LuceneTestCase {
|
||||||
|
|
||||||
ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentFilter);
|
ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentFilter);
|
||||||
|
|
||||||
Weight weight = s.createNormalizedWeight(parentJoinQuery, RandomPicks.randomFrom(random(), org.apache.lucene.search.ScoreMode.values()));
|
Weight weight = s.createWeight(s.rewrite(parentJoinQuery), RandomPicks.randomFrom(random(), org.apache.lucene.search.ScoreMode.values()), 1);
|
||||||
Scorer advancingScorer = weight.scorer(s.getIndexReader().leaves().get(0));
|
Scorer advancingScorer = weight.scorer(s.getIndexReader().leaves().get(0));
|
||||||
Scorer nextDocScorer = weight.scorer(s.getIndexReader().leaves().get(0));
|
Scorer nextDocScorer = weight.scorer(s.getIndexReader().leaves().get(0));
|
||||||
|
|
||||||
|
|
|
@ -103,7 +103,7 @@ public class TestBlockJoinValidation extends LuceneTestCase {
|
||||||
ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentsFilter);
|
ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentsFilter);
|
||||||
|
|
||||||
final LeafReaderContext context = indexSearcher.getIndexReader().leaves().get(0);
|
final LeafReaderContext context = indexSearcher.getIndexReader().leaves().get(0);
|
||||||
Weight weight = indexSearcher.createNormalizedWeight(blockJoinQuery, org.apache.lucene.search.ScoreMode.COMPLETE);
|
Weight weight = indexSearcher.createWeight(indexSearcher.rewrite(blockJoinQuery), org.apache.lucene.search.ScoreMode.COMPLETE, 1);
|
||||||
Scorer scorer = weight.scorer(context);
|
Scorer scorer = weight.scorer(context);
|
||||||
final Bits parentDocs = parentsFilter.getBitSet(context);
|
final Bits parentDocs = parentsFilter.getBitSet(context);
|
||||||
|
|
||||||
|
|
|
@ -106,7 +106,8 @@ public class PKIndexSplitter {
|
||||||
try {
|
try {
|
||||||
final IndexSearcher searcher = new IndexSearcher(reader);
|
final IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
final Weight preserveWeight = searcher.createNormalizedWeight(preserveFilter, ScoreMode.COMPLETE_NO_SCORES);
|
preserveFilter = searcher.rewrite(preserveFilter);
|
||||||
|
final Weight preserveWeight = searcher.createWeight(preserveFilter, ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
final List<LeafReaderContext> leaves = reader.leaves();
|
final List<LeafReaderContext> leaves = reader.leaves();
|
||||||
final CodecReader[] subReaders = new CodecReader[leaves.size()];
|
final CodecReader[] subReaders = new CodecReader[leaves.size()];
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
|
|
@ -72,7 +72,8 @@ public class QueryValueSource extends ValueSource {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
|
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
|
||||||
Weight w = searcher.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
Query rewritten = searcher.rewrite(q);
|
||||||
|
Weight w = searcher.createWeight(rewritten, ScoreMode.COMPLETE, 1);
|
||||||
context.put(this, w);
|
context.put(this, w);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -159,14 +159,14 @@ public class TestIndexReaderFunctions extends LuceneTestCase {
|
||||||
|
|
||||||
void assertCacheable(DoubleValuesSource vs, boolean expected) throws Exception {
|
void assertCacheable(DoubleValuesSource vs, boolean expected) throws Exception {
|
||||||
Query q = new FunctionScoreQuery(new MatchAllDocsQuery(), vs);
|
Query q = new FunctionScoreQuery(new MatchAllDocsQuery(), vs);
|
||||||
Weight w = searcher.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
Weight w = searcher.createWeight(q, ScoreMode.COMPLETE, 1);
|
||||||
LeafReaderContext ctx = reader.leaves().get(0);
|
LeafReaderContext ctx = reader.leaves().get(0);
|
||||||
assertEquals(expected, w.isCacheable(ctx));
|
assertEquals(expected, w.isCacheable(ctx));
|
||||||
}
|
}
|
||||||
|
|
||||||
void assertCacheable(LongValuesSource vs, boolean expected) throws Exception {
|
void assertCacheable(LongValuesSource vs, boolean expected) throws Exception {
|
||||||
Query q = new FunctionScoreQuery(new MatchAllDocsQuery(), vs.toDoubleValuesSource());
|
Query q = new FunctionScoreQuery(new MatchAllDocsQuery(), vs.toDoubleValuesSource());
|
||||||
Weight w = searcher.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
Weight w = searcher.createWeight(q, ScoreMode.COMPLETE, 1);
|
||||||
LeafReaderContext ctx = reader.leaves().get(0);
|
LeafReaderContext ctx = reader.leaves().get(0);
|
||||||
assertEquals(expected, w.isCacheable(ctx));
|
assertEquals(expected, w.isCacheable(ctx));
|
||||||
}
|
}
|
||||||
|
|
|
@ -172,7 +172,8 @@ public class PayloadSpanUtil {
|
||||||
final IndexSearcher searcher = new IndexSearcher(context);
|
final IndexSearcher searcher = new IndexSearcher(context);
|
||||||
searcher.setQueryCache(null);
|
searcher.setQueryCache(null);
|
||||||
|
|
||||||
SpanWeight w = (SpanWeight) searcher.createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES);
|
query = (SpanQuery) searcher.rewrite(query);
|
||||||
|
SpanWeight w = (SpanWeight) searcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
|
|
||||||
PayloadSpanCollector collector = new PayloadSpanCollector();
|
PayloadSpanCollector collector = new PayloadSpanCollector();
|
||||||
for (LeafReaderContext leafReaderContext : context.leaves()) {
|
for (LeafReaderContext leafReaderContext : context.leaves()) {
|
||||||
|
|
|
@ -381,7 +381,7 @@ public class RandomIndexWriter implements Closeable {
|
||||||
if (r.nextInt(20) == 2) {
|
if (r.nextInt(20) == 2) {
|
||||||
doRandomForceMerge();
|
doRandomForceMerge();
|
||||||
}
|
}
|
||||||
if (!applyDeletions || r.nextBoolean() || w.getConfig().getSoftDeletesField() != null) {
|
if (!applyDeletions || r.nextBoolean()) {
|
||||||
// if we have soft deletes we can't open from a directory
|
// if we have soft deletes we can't open from a directory
|
||||||
if (LuceneTestCase.VERBOSE) {
|
if (LuceneTestCase.VERBOSE) {
|
||||||
System.out.println("RIW.getReader: use NRT reader");
|
System.out.println("RIW.getReader: use NRT reader");
|
||||||
|
@ -396,7 +396,12 @@ public class RandomIndexWriter implements Closeable {
|
||||||
}
|
}
|
||||||
w.commit();
|
w.commit();
|
||||||
if (r.nextBoolean()) {
|
if (r.nextBoolean()) {
|
||||||
return DirectoryReader.open(w.getDirectory());
|
DirectoryReader reader = DirectoryReader.open(w.getDirectory());
|
||||||
|
if (w.getConfig().getSoftDeletesField() != null) {
|
||||||
|
return new SoftDeletesDirectoryReaderWrapper(reader, w.getConfig().getSoftDeletesField());
|
||||||
|
} else {
|
||||||
|
return reader;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
return w.getReader(applyDeletions, writeAllDeletes);
|
return w.getReader(applyDeletions, writeAllDeletes);
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,22 +16,21 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.lucene.search;
|
package org.apache.lucene.search;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
import java.util.Random;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
import java.util.Random;
|
|
||||||
|
|
||||||
import junit.framework.Assert;
|
import junit.framework.Assert;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utility class for asserting expected hits in tests.
|
* Utility class for asserting expected hits in tests.
|
||||||
*/
|
*/
|
||||||
|
@ -541,8 +540,9 @@ public class CheckHits {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void doCheckMaxScores(Random random, Query query, IndexSearcher searcher) throws IOException {
|
private static void doCheckMaxScores(Random random, Query query, IndexSearcher searcher) throws IOException {
|
||||||
Weight w1 = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
query = searcher.rewrite(query);
|
||||||
Weight w2 = searcher.createNormalizedWeight(query, ScoreMode.TOP_SCORES);
|
Weight w1 = searcher.createWeight(query, ScoreMode.COMPLETE, 1);
|
||||||
|
Weight w2 = searcher.createWeight(query, ScoreMode.TOP_SCORES, 1);
|
||||||
|
|
||||||
// Check boundaries and max scores when iterating all matches
|
// Check boundaries and max scores when iterating all matches
|
||||||
for (LeafReaderContext ctx : searcher.getIndexReader().leaves()) {
|
for (LeafReaderContext ctx : searcher.getIndexReader().leaves()) {
|
||||||
|
|
|
@ -309,7 +309,8 @@ public class QueryUtils {
|
||||||
lastDoc[0] = doc;
|
lastDoc[0] = doc;
|
||||||
try {
|
try {
|
||||||
if (scorer == null) {
|
if (scorer == null) {
|
||||||
Weight w = s.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
Query rewritten = s.rewrite(q);
|
||||||
|
Weight w = s.createWeight(rewritten, ScoreMode.COMPLETE, 1);
|
||||||
LeafReaderContext context = readerContextArray.get(leafPtr);
|
LeafReaderContext context = readerContextArray.get(leafPtr);
|
||||||
scorer = w.scorer(context);
|
scorer = w.scorer(context);
|
||||||
iterator = scorer.iterator();
|
iterator = scorer.iterator();
|
||||||
|
@ -374,7 +375,8 @@ public class QueryUtils {
|
||||||
final LeafReader previousReader = lastReader[0];
|
final LeafReader previousReader = lastReader[0];
|
||||||
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
|
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
|
||||||
indexSearcher.setSimilarity(s.getSimilarity());
|
indexSearcher.setSimilarity(s.getSimilarity());
|
||||||
Weight w = indexSearcher.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
Query rewritten = indexSearcher.rewrite(q);
|
||||||
|
Weight w = indexSearcher.createWeight(rewritten, ScoreMode.COMPLETE, 1);
|
||||||
LeafReaderContext ctx = (LeafReaderContext)indexSearcher.getTopReaderContext();
|
LeafReaderContext ctx = (LeafReaderContext)indexSearcher.getTopReaderContext();
|
||||||
Scorer scorer = w.scorer(ctx);
|
Scorer scorer = w.scorer(ctx);
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
|
@ -404,7 +406,8 @@ public class QueryUtils {
|
||||||
final LeafReader previousReader = lastReader[0];
|
final LeafReader previousReader = lastReader[0];
|
||||||
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
|
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
|
||||||
indexSearcher.setSimilarity(s.getSimilarity());
|
indexSearcher.setSimilarity(s.getSimilarity());
|
||||||
Weight w = indexSearcher.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
Query rewritten = indexSearcher.rewrite(q);
|
||||||
|
Weight w = indexSearcher.createWeight(rewritten, ScoreMode.COMPLETE, 1);
|
||||||
LeafReaderContext ctx = previousReader.getContext();
|
LeafReaderContext ctx = previousReader.getContext();
|
||||||
Scorer scorer = w.scorer(ctx);
|
Scorer scorer = w.scorer(ctx);
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
|
@ -430,6 +433,7 @@ public class QueryUtils {
|
||||||
final int lastDoc[] = {-1};
|
final int lastDoc[] = {-1};
|
||||||
final LeafReader lastReader[] = {null};
|
final LeafReader lastReader[] = {null};
|
||||||
final List<LeafReaderContext> context = s.getTopReaderContext().leaves();
|
final List<LeafReaderContext> context = s.getTopReaderContext().leaves();
|
||||||
|
Query rewritten = s.rewrite(q);
|
||||||
s.search(q,new SimpleCollector() {
|
s.search(q,new SimpleCollector() {
|
||||||
private Scorer scorer;
|
private Scorer scorer;
|
||||||
private int leafPtr;
|
private int leafPtr;
|
||||||
|
@ -443,7 +447,7 @@ public class QueryUtils {
|
||||||
try {
|
try {
|
||||||
long startMS = System.currentTimeMillis();
|
long startMS = System.currentTimeMillis();
|
||||||
for (int i=lastDoc[0]+1; i<=doc; i++) {
|
for (int i=lastDoc[0]+1; i<=doc; i++) {
|
||||||
Weight w = s.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
Weight w = s.createWeight(rewritten, ScoreMode.COMPLETE, 1);
|
||||||
Scorer scorer = w.scorer(context.get(leafPtr));
|
Scorer scorer = w.scorer(context.get(leafPtr));
|
||||||
Assert.assertTrue("query collected "+doc+" but advance("+i+") says no more docs!",scorer.iterator().advance(i) != DocIdSetIterator.NO_MORE_DOCS);
|
Assert.assertTrue("query collected "+doc+" but advance("+i+") says no more docs!",scorer.iterator().advance(i) != DocIdSetIterator.NO_MORE_DOCS);
|
||||||
Assert.assertEquals("query collected "+doc+" but advance("+i+") got to "+scorer.docID(),doc,scorer.docID());
|
Assert.assertEquals("query collected "+doc+" but advance("+i+") got to "+scorer.docID(),doc,scorer.docID());
|
||||||
|
@ -476,7 +480,7 @@ public class QueryUtils {
|
||||||
final LeafReader previousReader = lastReader[0];
|
final LeafReader previousReader = lastReader[0];
|
||||||
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
|
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
|
||||||
indexSearcher.setSimilarity(s.getSimilarity());
|
indexSearcher.setSimilarity(s.getSimilarity());
|
||||||
Weight w = indexSearcher.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
Weight w = indexSearcher.createWeight(rewritten, ScoreMode.COMPLETE, 1);
|
||||||
Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext());
|
Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext());
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
DocIdSetIterator iterator = scorer.iterator();
|
DocIdSetIterator iterator = scorer.iterator();
|
||||||
|
@ -504,7 +508,7 @@ public class QueryUtils {
|
||||||
final LeafReader previousReader = lastReader[0];
|
final LeafReader previousReader = lastReader[0];
|
||||||
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
|
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
|
||||||
indexSearcher.setSimilarity(s.getSimilarity());
|
indexSearcher.setSimilarity(s.getSimilarity());
|
||||||
Weight w = indexSearcher.createNormalizedWeight(q, ScoreMode.COMPLETE);
|
Weight w = indexSearcher.createWeight(rewritten, ScoreMode.COMPLETE, 1);
|
||||||
Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext());
|
Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext());
|
||||||
if (scorer != null) {
|
if (scorer != null) {
|
||||||
DocIdSetIterator iterator = scorer.iterator();
|
DocIdSetIterator iterator = scorer.iterator();
|
||||||
|
@ -523,7 +527,8 @@ public class QueryUtils {
|
||||||
|
|
||||||
/** Check that the scorer and bulk scorer advance consistently. */
|
/** Check that the scorer and bulk scorer advance consistently. */
|
||||||
public static void checkBulkScorerSkipTo(Random r, Query query, IndexSearcher searcher) throws IOException {
|
public static void checkBulkScorerSkipTo(Random r, Query query, IndexSearcher searcher) throws IOException {
|
||||||
Weight weight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
query = searcher.rewrite(query);
|
||||||
|
Weight weight = searcher.createWeight(query, ScoreMode.COMPLETE, 1);
|
||||||
for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
|
for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
|
||||||
final Scorer scorer = weight.scorer(context);
|
final Scorer scorer = weight.scorer(context);
|
||||||
final BulkScorer bulkScorer = weight.bulkScorer(context);
|
final BulkScorer bulkScorer = weight.bulkScorer(context);
|
||||||
|
|
|
@ -230,7 +230,8 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase {
|
||||||
@Override
|
@Override
|
||||||
public Query rewrite(Query original) throws IOException {
|
public Query rewrite(Query original) throws IOException {
|
||||||
final IndexSearcher localSearcher = new IndexSearcher(getIndexReader());
|
final IndexSearcher localSearcher = new IndexSearcher(getIndexReader());
|
||||||
final Weight weight = localSearcher.createNormalizedWeight(original, ScoreMode.COMPLETE);
|
original = localSearcher.rewrite(original);
|
||||||
|
final Weight weight = localSearcher.createWeight(original, ScoreMode.COMPLETE, 1);
|
||||||
final Set<Term> terms = new HashSet<>();
|
final Set<Term> terms = new HashSet<>();
|
||||||
weight.extractTerms(terms);
|
weight.extractTerms(terms);
|
||||||
|
|
||||||
|
|
|
@ -78,6 +78,9 @@ New Features
|
||||||
* SOLR-7887: Upgrade Solr to use Log4J 2.11
|
* SOLR-7887: Upgrade Solr to use Log4J 2.11
|
||||||
(Tim Potter, Keith Laban, Shawn Heisey, Ralph Goers, Erick Erickson, Varun Thacker)
|
(Tim Potter, Keith Laban, Shawn Heisey, Ralph Goers, Erick Erickson, Varun Thacker)
|
||||||
|
|
||||||
|
* SOLR-12139: The "eq" (equals) function query now works with string fields, string literals, and perhaps anything.
|
||||||
|
(Andrey Kudryavtsev, David Smiley)
|
||||||
|
|
||||||
Bug Fixes
|
Bug Fixes
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
@ -114,6 +117,9 @@ Bug Fixes
|
||||||
* SOLR-12199: TestReplicationHandler.doTestRepeater(): TEST_PORT interpolation failure:
|
* SOLR-12199: TestReplicationHandler.doTestRepeater(): TEST_PORT interpolation failure:
|
||||||
Server refused connection at: http://127.0.0.1:TEST_PORT/solr (Mikhail Khludnev, Dawid Weiss, Steve Rowe)
|
Server refused connection at: http://127.0.0.1:TEST_PORT/solr (Mikhail Khludnev, Dawid Weiss, Steve Rowe)
|
||||||
|
|
||||||
|
* SOLR-12096: Fixed inconsistent results format of subquery transformer for distributed search (multi-shard).
|
||||||
|
(Munendra S N, Mikhail Khludnev via Ishan Chattopadhyaya)
|
||||||
|
|
||||||
Optimizations
|
Optimizations
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
@ -386,7 +392,7 @@ Bug Fixes
|
||||||
|
|
||||||
* SOLR-11988: Fix exists() method in EphemeralDirectoryFactory/MockDirectoryFactory to prevent false positives (hossman)
|
* SOLR-11988: Fix exists() method in EphemeralDirectoryFactory/MockDirectoryFactory to prevent false positives (hossman)
|
||||||
|
|
||||||
* SOLR-11971: Don't allow referal to external resources in DataImportHandler's dataConfig request parameter.
|
* SOLR-11971: Don't allow referal to external resources in DataImportHandler's dataConfig request parameter (CVE-2018-1308).
|
||||||
(麦 香浓郁, Uwe Schindler)
|
(麦 香浓郁, Uwe Schindler)
|
||||||
|
|
||||||
* SOLR-12021: Fixed a bug in ApiSpec and other JSON resource loading that was causing unclosed file handles (hossman)
|
* SOLR-12021: Fixed a bug in ApiSpec and other JSON resource loading that was causing unclosed file handles (hossman)
|
||||||
|
|
|
@ -115,7 +115,7 @@ public class LTRRescorer extends Rescorer {
|
||||||
final ScoreDoc[] reranked = new ScoreDoc[topN];
|
final ScoreDoc[] reranked = new ScoreDoc[topN];
|
||||||
final List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
|
final List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
|
||||||
final LTRScoringQuery.ModelWeight modelWeight = (LTRScoringQuery.ModelWeight) searcher
|
final LTRScoringQuery.ModelWeight modelWeight = (LTRScoringQuery.ModelWeight) searcher
|
||||||
.createNormalizedWeight(scoringQuery, ScoreMode.COMPLETE);
|
.createWeight(searcher.rewrite(scoringQuery), ScoreMode.COMPLETE, 1);
|
||||||
|
|
||||||
scoreFeatures(searcher, firstPassTopDocs,topN, modelWeight, hits, leaves, reranked);
|
scoreFeatures(searcher, firstPassTopDocs,topN, modelWeight, hits, leaves, reranked);
|
||||||
// Must sort all documents that we reranked, and then select the top
|
// Must sort all documents that we reranked, and then select the top
|
||||||
|
@ -219,8 +219,8 @@ public class LTRRescorer extends Rescorer {
|
||||||
final int n = ReaderUtil.subIndex(docID, leafContexts);
|
final int n = ReaderUtil.subIndex(docID, leafContexts);
|
||||||
final LeafReaderContext context = leafContexts.get(n);
|
final LeafReaderContext context = leafContexts.get(n);
|
||||||
final int deBasedDoc = docID - context.docBase;
|
final int deBasedDoc = docID - context.docBase;
|
||||||
final Weight modelWeight = searcher.createNormalizedWeight(scoringQuery,
|
final Weight modelWeight = searcher.createWeight(searcher.rewrite(scoringQuery),
|
||||||
ScoreMode.COMPLETE);
|
ScoreMode.COMPLETE, 1);
|
||||||
return modelWeight.explain(context, deBasedDoc);
|
return modelWeight.explain(context, deBasedDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -70,7 +70,7 @@ public class OriginalScoreFeature extends Feature {
|
||||||
public OriginalScoreWeight(IndexSearcher searcher,
|
public OriginalScoreWeight(IndexSearcher searcher,
|
||||||
SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi) throws IOException {
|
SolrQueryRequest request, Query originalQuery, Map<String,String[]> efi) throws IOException {
|
||||||
super(OriginalScoreFeature.this, searcher, request, originalQuery, efi);
|
super(OriginalScoreFeature.this, searcher, request, originalQuery, efi);
|
||||||
w = searcher.createNormalizedWeight(originalQuery, ScoreMode.COMPLETE);
|
w = searcher.createWeight(searcher.rewrite(originalQuery), ScoreMode.COMPLETE, 1);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -179,7 +179,7 @@ public class SolrFeature extends Feature {
|
||||||
// leaving nothing for the phrase query to parse.
|
// leaving nothing for the phrase query to parse.
|
||||||
if (query != null) {
|
if (query != null) {
|
||||||
queryAndFilters.add(query);
|
queryAndFilters.add(query);
|
||||||
solrQueryWeight = searcher.createNormalizedWeight(query, ScoreMode.COMPLETE);
|
solrQueryWeight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE, 1);
|
||||||
} else {
|
} else {
|
||||||
solrQueryWeight = null;
|
solrQueryWeight = null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -99,7 +99,7 @@ public class TestLTRScoringQuery extends LuceneTestCase {
|
||||||
final LeafReaderContext context = leafContexts.get(n);
|
final LeafReaderContext context = leafContexts.get(n);
|
||||||
final int deBasedDoc = hits.scoreDocs[0].doc - context.docBase;
|
final int deBasedDoc = hits.scoreDocs[0].doc - context.docBase;
|
||||||
|
|
||||||
final Weight weight = searcher.createNormalizedWeight(model, ScoreMode.COMPLETE);
|
final Weight weight = searcher.createWeight(searcher.rewrite(model), ScoreMode.COMPLETE, 1);
|
||||||
final Scorer scorer = weight.scorer(context);
|
final Scorer scorer = weight.scorer(context);
|
||||||
|
|
||||||
// rerank using the field final-score
|
// rerank using the field final-score
|
||||||
|
|
|
@ -80,7 +80,7 @@ public class TestSelectiveWeightCreation extends TestRerankBase {
|
||||||
final LeafReaderContext context = leafContexts.get(n);
|
final LeafReaderContext context = leafContexts.get(n);
|
||||||
final int deBasedDoc = hits.scoreDocs[0].doc - context.docBase;
|
final int deBasedDoc = hits.scoreDocs[0].doc - context.docBase;
|
||||||
|
|
||||||
final Weight weight = searcher.createNormalizedWeight(model, ScoreMode.COMPLETE);
|
final Weight weight = searcher.createWeight(searcher.rewrite(model), ScoreMode.COMPLETE, 1);
|
||||||
final Scorer scorer = weight.scorer(context);
|
final Scorer scorer = weight.scorer(context);
|
||||||
|
|
||||||
// rerank using the field final-score
|
// rerank using the field final-score
|
||||||
|
|
|
@ -166,7 +166,8 @@ class GeoJSONWriter extends JSONWriter {
|
||||||
|
|
||||||
// SolrDocument will now have multiValued fields represented as a Collection,
|
// SolrDocument will now have multiValued fields represented as a Collection,
|
||||||
// even if only a single value is returned for this document.
|
// even if only a single value is returned for this document.
|
||||||
if (val instanceof List) {
|
// For SolrDocumentList, use writeVal instead of writeArray
|
||||||
|
if (!(val instanceof SolrDocumentList) && val instanceof List) {
|
||||||
// shortcut this common case instead of going through writeVal again
|
// shortcut this common case instead of going through writeVal again
|
||||||
writeArray(name,((Iterable)val).iterator());
|
writeArray(name,((Iterable)val).iterator());
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -25,10 +25,11 @@ import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.solr.common.IteratorWriter;
|
import org.apache.solr.common.IteratorWriter;
|
||||||
|
import org.apache.solr.common.MapWriter;
|
||||||
import org.apache.solr.common.MapWriter.EntryWriter;
|
import org.apache.solr.common.MapWriter.EntryWriter;
|
||||||
import org.apache.solr.common.PushWriter;
|
import org.apache.solr.common.PushWriter;
|
||||||
import org.apache.solr.common.SolrDocument;
|
import org.apache.solr.common.SolrDocument;
|
||||||
import org.apache.solr.common.MapWriter;
|
import org.apache.solr.common.SolrDocumentList;
|
||||||
import org.apache.solr.common.params.SolrParams;
|
import org.apache.solr.common.params.SolrParams;
|
||||||
import org.apache.solr.common.util.NamedList;
|
import org.apache.solr.common.util.NamedList;
|
||||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||||
|
@ -367,7 +368,8 @@ class JSONWriter extends TextResponseWriter {
|
||||||
|
|
||||||
// SolrDocument will now have multiValued fields represented as a Collection,
|
// SolrDocument will now have multiValued fields represented as a Collection,
|
||||||
// even if only a single value is returned for this document.
|
// even if only a single value is returned for this document.
|
||||||
if (val instanceof List) {
|
// For SolrDocumentList, use writeVal instead of writeArray
|
||||||
|
if (!(val instanceof SolrDocumentList) && val instanceof List) {
|
||||||
// shortcut this common case instead of going through writeVal again
|
// shortcut this common case instead of going through writeVal again
|
||||||
writeArray(name,((Iterable)val).iterator());
|
writeArray(name,((Iterable)val).iterator());
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -66,7 +66,8 @@ public class QueryWrapperFilter extends Filter {
|
||||||
public DocIdSet getDocIdSet(final LeafReaderContext context, final Bits acceptDocs) throws IOException {
|
public DocIdSet getDocIdSet(final LeafReaderContext context, final Bits acceptDocs) throws IOException {
|
||||||
// get a private context that is used to rewrite, createWeight and score eventually
|
// get a private context that is used to rewrite, createWeight and score eventually
|
||||||
final LeafReaderContext privateContext = context.reader().getContext();
|
final LeafReaderContext privateContext = context.reader().getContext();
|
||||||
final Weight weight = new IndexSearcher(privateContext).createNormalizedWeight(query, ScoreMode.COMPLETE_NO_SCORES);
|
final IndexSearcher searcher = new IndexSearcher(privateContext);
|
||||||
|
final Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
|
|
||||||
DocIdSet set = new DocIdSet() {
|
DocIdSet set = new DocIdSet() {
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -1061,7 +1061,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
|
||||||
List<Weight> weights = new ArrayList<>(notCached.size());
|
List<Weight> weights = new ArrayList<>(notCached.size());
|
||||||
for (Query q : notCached) {
|
for (Query q : notCached) {
|
||||||
Query qq = QueryUtils.makeQueryable(q);
|
Query qq = QueryUtils.makeQueryable(q);
|
||||||
weights.add(createNormalizedWeight(qq, ScoreMode.COMPLETE));
|
weights.add(createWeight(rewrite(qq), ScoreMode.COMPLETE, 1));
|
||||||
}
|
}
|
||||||
pf.filter = new FilterImpl(answer, weights);
|
pf.filter = new FilterImpl(answer, weights);
|
||||||
pf.hasDeletedDocs = (answer == null); // if all clauses were uncached, the resulting filter may match deleted docs
|
pf.hasDeletedDocs = (answer == null); // if all clauses were uncached, the resulting filter may match deleted docs
|
||||||
|
|
|
@ -67,6 +67,7 @@ import org.apache.solr.search.facet.UniqueAgg;
|
||||||
import org.apache.solr.search.facet.VarianceAgg;
|
import org.apache.solr.search.facet.VarianceAgg;
|
||||||
import org.apache.solr.search.function.CollapseScoreFunction;
|
import org.apache.solr.search.function.CollapseScoreFunction;
|
||||||
import org.apache.solr.search.function.ConcatStringFunction;
|
import org.apache.solr.search.function.ConcatStringFunction;
|
||||||
|
import org.apache.solr.search.function.EqualFunction;
|
||||||
import org.apache.solr.search.function.OrdFieldSource;
|
import org.apache.solr.search.function.OrdFieldSource;
|
||||||
import org.apache.solr.search.function.ReverseOrdFieldSource;
|
import org.apache.solr.search.function.ReverseOrdFieldSource;
|
||||||
import org.apache.solr.search.function.SolrComparisonBoolFunction;
|
import org.apache.solr.search.function.SolrComparisonBoolFunction;
|
||||||
|
@ -922,7 +923,7 @@ public abstract class ValueSourceParser implements NamedListInitializedPlugin {
|
||||||
ValueSource lhsValSource = fp.parseValueSource();
|
ValueSource lhsValSource = fp.parseValueSource();
|
||||||
ValueSource rhsValSource = fp.parseValueSource();
|
ValueSource rhsValSource = fp.parseValueSource();
|
||||||
|
|
||||||
return new SolrComparisonBoolFunction(lhsValSource, rhsValSource, "eq", (cmp) -> cmp == 0);
|
return new EqualFunction(lhsValSource, rhsValSource, "eq");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,61 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.solr.search.function;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import org.apache.lucene.queries.function.FunctionValues;
|
||||||
|
import org.apache.lucene.queries.function.ValueSource;
|
||||||
|
import org.apache.lucene.queries.function.valuesource.ComparisonBoolFunction;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compares two values for equality.
|
||||||
|
* It should work on not only numbers but strings and custom things.
|
||||||
|
*
|
||||||
|
* @since 7.4
|
||||||
|
*/
|
||||||
|
public class EqualFunction extends ComparisonBoolFunction {
|
||||||
|
|
||||||
|
public EqualFunction(ValueSource lhs, ValueSource rhs, String name) {
|
||||||
|
super(lhs, rhs, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean compare(int doc, FunctionValues lhs, FunctionValues rhs) throws IOException {
|
||||||
|
Object objL = lhs.objectVal(doc);
|
||||||
|
Object objR = rhs.objectVal(doc);
|
||||||
|
if (isNumeric(objL) && isNumeric(objR)) {
|
||||||
|
if (isInteger(objL) && isInteger(objR)) {
|
||||||
|
return Long.compare(((Number)objL).longValue(), ((Number)objR).longValue()) == 0;
|
||||||
|
} else {
|
||||||
|
return Double.compare(((Number)objL).doubleValue(), ((Number)objR).doubleValue()) == 0;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Objects.equals(objL, objR);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isInteger(Object obj) {
|
||||||
|
return obj instanceof Integer || obj instanceof Long;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isNumeric(Object obj) {
|
||||||
|
return obj instanceof Number;
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,6 +16,16 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.solr.search.stats;
|
package org.apache.solr.search.stats;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.lang.invoke.MethodHandles;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Map.Entry;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import org.apache.lucene.index.IndexReaderContext;
|
import org.apache.lucene.index.IndexReaderContext;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
@ -39,16 +49,6 @@ import org.apache.solr.search.SolrIndexSearcher;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.lang.invoke.MethodHandles;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Map.Entry;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class implements exact caching of statistics. It requires an additional
|
* This class implements exact caching of statistics. It requires an additional
|
||||||
* round-trip to parse query at shard servers, and return term statistics for
|
* round-trip to parse query at shard servers, and return term statistics for
|
||||||
|
@ -157,7 +157,7 @@ public class ExactStatsCache extends StatsCache {
|
||||||
Query q = rb.getQuery();
|
Query q = rb.getQuery();
|
||||||
try {
|
try {
|
||||||
HashSet<Term> terms = new HashSet<>();
|
HashSet<Term> terms = new HashSet<>();
|
||||||
searcher.createNormalizedWeight(q, ScoreMode.COMPLETE).extractTerms(terms);
|
searcher.createWeight(searcher.rewrite(q), ScoreMode.COMPLETE, 1).extractTerms(terms);
|
||||||
IndexReaderContext context = searcher.getTopReaderContext();
|
IndexReaderContext context = searcher.getTopReaderContext();
|
||||||
HashMap<String,TermStats> statsMap = new HashMap<>();
|
HashMap<String,TermStats> statsMap = new HashMap<>();
|
||||||
HashMap<String,CollectionStats> colMap = new HashMap<>();
|
HashMap<String,CollectionStats> colMap = new HashMap<>();
|
||||||
|
|
|
@ -22,7 +22,10 @@ import java.lang.reflect.Method;
|
||||||
import java.lang.reflect.Modifier;
|
import java.lang.reflect.Modifier;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.solr.JSONTestUtil;
|
||||||
import org.apache.solr.SolrTestCaseJ4;
|
import org.apache.solr.SolrTestCaseJ4;
|
||||||
import org.apache.solr.common.SolrDocument;
|
import org.apache.solr.common.SolrDocument;
|
||||||
import org.apache.solr.common.SolrDocumentList;
|
import org.apache.solr.common.SolrDocumentList;
|
||||||
|
@ -130,9 +133,9 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testJSONSolrDocument() throws IOException {
|
public void testJSONSolrDocument() throws Exception {
|
||||||
SolrQueryRequest req = req(CommonParams.WT,"json",
|
SolrQueryRequest req = req(CommonParams.WT,"json",
|
||||||
CommonParams.FL,"id,score");
|
CommonParams.FL,"id,score,_children_,path");
|
||||||
SolrQueryResponse rsp = new SolrQueryResponse();
|
SolrQueryResponse rsp = new SolrQueryResponse();
|
||||||
JSONResponseWriter w = new JSONResponseWriter();
|
JSONResponseWriter w = new JSONResponseWriter();
|
||||||
|
|
||||||
|
@ -141,11 +144,22 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
|
||||||
|
|
||||||
StringWriter buf = new StringWriter();
|
StringWriter buf = new StringWriter();
|
||||||
|
|
||||||
|
SolrDocument childDoc = new SolrDocument();
|
||||||
|
childDoc.addField("id", "2");
|
||||||
|
childDoc.addField("score", "0.4");
|
||||||
|
childDoc.addField("path", Arrays.asList("a>b", "a>b>c"));
|
||||||
|
|
||||||
|
SolrDocumentList childList = new SolrDocumentList();
|
||||||
|
childList.setNumFound(1);
|
||||||
|
childList.setStart(0);
|
||||||
|
childList.add(childDoc);
|
||||||
|
|
||||||
SolrDocument solrDoc = new SolrDocument();
|
SolrDocument solrDoc = new SolrDocument();
|
||||||
solrDoc.addField("id", "1");
|
solrDoc.addField("id", "1");
|
||||||
solrDoc.addField("subject", "hello2");
|
solrDoc.addField("subject", "hello2");
|
||||||
solrDoc.addField("title", "hello3");
|
solrDoc.addField("title", "hello3");
|
||||||
solrDoc.addField("score", "0.7");
|
solrDoc.addField("score", "0.7");
|
||||||
|
solrDoc.setField("_children_", childList);
|
||||||
|
|
||||||
SolrDocumentList list = new SolrDocumentList();
|
SolrDocumentList list = new SolrDocumentList();
|
||||||
list.setNumFound(1);
|
list.setNumFound(1);
|
||||||
|
@ -163,8 +177,12 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
|
||||||
result.contains("\"title\""));
|
result.contains("\"title\""));
|
||||||
assertTrue("response doesn't contain expected fields: " + result,
|
assertTrue("response doesn't contain expected fields: " + result,
|
||||||
result.contains("\"id\"") &&
|
result.contains("\"id\"") &&
|
||||||
result.contains("\"score\""));
|
result.contains("\"score\"") && result.contains("_children_"));
|
||||||
|
|
||||||
|
String expectedResult = "{'response':{'numFound':1,'start':0,'maxScore':0.7,'docs':[{'id':'1', 'score':'0.7'," +
|
||||||
|
" '_children_':{'numFound':1,'start':0,'docs':[{'id':'2', 'score':'0.4', 'path':['a>b', 'a>b>c']}] }}] }}";
|
||||||
|
String error = JSONTestUtil.match(result, "=="+expectedResult);
|
||||||
|
assertNull("response validation failed with error: " + error, error);
|
||||||
|
|
||||||
req.close();
|
req.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,11 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.solr.response.transform;
|
package org.apache.solr.response.transform;
|
||||||
|
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.nio.charset.Charset;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -26,6 +30,8 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.solr.JSONTestUtil;
|
||||||
import org.apache.solr.client.solrj.SolrServerException;
|
import org.apache.solr.client.solrj.SolrServerException;
|
||||||
import org.apache.solr.client.solrj.impl.CloudSolrClient;
|
import org.apache.solr.client.solrj.impl.CloudSolrClient;
|
||||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||||
|
@ -37,10 +43,12 @@ import org.apache.solr.cloud.SolrCloudTestCase;
|
||||||
import org.apache.solr.common.SolrDocument;
|
import org.apache.solr.common.SolrDocument;
|
||||||
import org.apache.solr.common.SolrDocumentList;
|
import org.apache.solr.common.SolrDocumentList;
|
||||||
import org.apache.solr.common.cloud.ZkStateReader;
|
import org.apache.solr.common.cloud.ZkStateReader;
|
||||||
|
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||||
import org.apache.solr.common.util.ContentStreamBase;
|
import org.apache.solr.common.util.ContentStreamBase;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@org.apache.solr.SolrTestCaseJ4.SuppressSSL()
|
||||||
public class TestSubQueryTransformerDistrib extends SolrCloudTestCase {
|
public class TestSubQueryTransformerDistrib extends SolrCloudTestCase {
|
||||||
|
|
||||||
private static final String support = "These guys help customers";
|
private static final String support = "These guys help customers";
|
||||||
|
@ -92,7 +100,7 @@ public class TestSubQueryTransformerDistrib extends SolrCloudTestCase {
|
||||||
|
|
||||||
@SuppressWarnings("serial")
|
@SuppressWarnings("serial")
|
||||||
@Test
|
@Test
|
||||||
public void test() throws SolrServerException, IOException {
|
public void test() throws Exception {
|
||||||
int peopleMultiplier = atLeast(1);
|
int peopleMultiplier = atLeast(1);
|
||||||
int deptMultiplier = atLeast(1);
|
int deptMultiplier = atLeast(1);
|
||||||
|
|
||||||
|
@ -100,24 +108,26 @@ public class TestSubQueryTransformerDistrib extends SolrCloudTestCase {
|
||||||
|
|
||||||
Random random1 = random();
|
Random random1 = random();
|
||||||
|
|
||||||
|
final ModifiableSolrParams params = params(
|
||||||
|
new String[]{"q","name_s:dave", "indent","true",
|
||||||
|
"fl","*,depts:[subquery "+((random1.nextBoolean() ? "" : "separator=,"))+"]",
|
||||||
|
"rows","" + peopleMultiplier,
|
||||||
|
"depts.q","{!terms f=dept_id_s v=$row.dept_ss_dv "+((random1.nextBoolean() ? "" : "separator=,"))+"}",
|
||||||
|
"depts.fl","text_t"+(differentUniqueId?",id:notid":""),
|
||||||
|
"depts.indent","true",
|
||||||
|
"depts.collection","departments",
|
||||||
|
differentUniqueId ? "depts.distrib.singlePass":"notnecessary","true",
|
||||||
|
"depts.rows",""+(deptMultiplier*2),
|
||||||
|
"depts.logParamsList","q,fl,rows,row.dept_ss_dv",
|
||||||
|
random().nextBoolean()?"depts.wt":"whatever",anyWt(),
|
||||||
|
random().nextBoolean()?"wt":"whatever",anyWt()});
|
||||||
|
|
||||||
|
final SolrDocumentList hits;
|
||||||
{
|
{
|
||||||
|
final QueryRequest qr = new QueryRequest(params);
|
||||||
final QueryRequest qr = new QueryRequest(params(
|
|
||||||
new String[]{"q","name_s:dave", "indent","true",
|
|
||||||
"fl","*,depts:[subquery "+((random1.nextBoolean() ? "" : "separator=,"))+"]",
|
|
||||||
"rows","" + peopleMultiplier,
|
|
||||||
"depts.q","{!terms f=dept_id_s v=$row.dept_ss_dv "+((random1.nextBoolean() ? "" : "separator=,"))+"}",
|
|
||||||
"depts.fl","text_t"+(differentUniqueId?",id:notid":""),
|
|
||||||
"depts.indent","true",
|
|
||||||
"depts.collection","departments",
|
|
||||||
differentUniqueId ? "depts.distrib.singlePass":"notnecessary","true",
|
|
||||||
"depts.rows",""+(deptMultiplier*2),
|
|
||||||
"depts.logParamsList","q,fl,rows,row.dept_ss_dv",
|
|
||||||
random().nextBoolean()?"depts.wt":"whatever",anyWt(),
|
|
||||||
random().nextBoolean()?"wt":"whatever",anyWt()}));
|
|
||||||
final QueryResponse rsp = new QueryResponse();
|
final QueryResponse rsp = new QueryResponse();
|
||||||
rsp.setResponse(cluster.getSolrClient().request(qr, people));
|
rsp.setResponse(cluster.getSolrClient().request(qr, people+","+depts));
|
||||||
final SolrDocumentList hits = rsp.getResults();
|
hits = rsp.getResults();
|
||||||
|
|
||||||
assertEquals(peopleMultiplier, hits.getNumFound());
|
assertEquals(peopleMultiplier, hits.getNumFound());
|
||||||
|
|
||||||
|
@ -140,6 +150,21 @@ public class TestSubQueryTransformerDistrib extends SolrCloudTestCase {
|
||||||
}
|
}
|
||||||
assertEquals(hits.toString(), engineerCount, supportCount);
|
assertEquals(hits.toString(), engineerCount, supportCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
params.set("wt", "json");
|
||||||
|
final URL node = new URL(cluster.getRandomJetty(random()).getBaseUrl().toString()
|
||||||
|
+"/"+people+"/select"+params.toQueryString());
|
||||||
|
|
||||||
|
try(final InputStream jsonResponse = node.openStream()){
|
||||||
|
final ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
|
||||||
|
IOUtils.copy(jsonResponse, outBuffer);
|
||||||
|
|
||||||
|
final Object expected = ((SolrDocumentList) hits.get(0).getFieldValue("depts")).get(0).get("text_t");
|
||||||
|
final String err = JSONTestUtil.match("/response/docs/[0]/depts/docs/[0]/text_t"
|
||||||
|
,outBuffer.toString(Charset.forName("UTF-8").toString()),
|
||||||
|
"\""+expected+"\"");
|
||||||
|
assertNull(err,err);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -228,7 +228,7 @@ public class TestQueryWrapperFilter extends LuceneTestCase {
|
||||||
final IndexSearcher searcher = new IndexSearcher(reader);
|
final IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
searcher.setQueryCache(null); // to still have approximations
|
searcher.setQueryCache(null); // to still have approximations
|
||||||
final Query query = new QueryWrapperFilter(new RandomApproximationQuery(new TermQuery(new Term("foo", "bar")), random()));
|
final Query query = new QueryWrapperFilter(new RandomApproximationQuery(new TermQuery(new Term("foo", "bar")), random()));
|
||||||
final Weight weight = searcher.createNormalizedWeight(query, RandomPicks.randomFrom(random(), ScoreMode.values()));
|
final Weight weight = searcher.createWeight(searcher.rewrite(query), RandomPicks.randomFrom(random(), ScoreMode.values()), 1);
|
||||||
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
final Scorer scorer = weight.scorer(reader.leaves().get(0));
|
||||||
assertNotNull(scorer.twoPhaseIterator());
|
assertNotNull(scorer.twoPhaseIterator());
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
|
@ -988,4 +988,82 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
||||||
/*id*/2, /*score*/5,
|
/*id*/2, /*score*/5,
|
||||||
/*id*/1, /*score*/2);
|
/*id*/1, /*score*/2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testEqualFunction() {
|
||||||
|
clearIndex();
|
||||||
|
assertU(adoc("id", "1", "field1_s", "value1", "field2_s", "value1",
|
||||||
|
"field1_s_dv", "value1", "field2_s_dv", "value2", "field_b", "true"));
|
||||||
|
assertU(adoc("id", "2", "field1_s", "value1", "field2_s", "value2",
|
||||||
|
"field1_s_dv", "value1", "field2_s_dv", "value1", "field_b", "false"));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
singleTest("field1_s", "if(eq(field1_s,field2_s),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/2);
|
||||||
|
singleTest("field1_s_dv", "if(eq(field1_s_dv,field2_s_dv),5,2)",
|
||||||
|
/*id*/2, /*score*/5,
|
||||||
|
/*id*/1, /*score*/2);
|
||||||
|
singleTest("field1_s", "if(eq(field1_s,field1_s_dv),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/5);
|
||||||
|
singleTest("field2_s", "if(eq(field2_s,field2_s_dv),5,2)",
|
||||||
|
/*id*/1, /*score*/2,
|
||||||
|
/*id*/2, /*score*/2);
|
||||||
|
singleTest("field2_s", "if(eq(field2_s,'value1'),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/2);
|
||||||
|
singleTest("field1_s", "if(eq('value1','value1'),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/5);
|
||||||
|
singleTest("field_b", "if(eq(if(field_b,'value1','value2'),'value1'),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/2);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testEqualNumericComparisons() {
|
||||||
|
clearIndex();
|
||||||
|
assertU(adoc("id", "1", "field_d", "5.0", "field_i", "5"));
|
||||||
|
assertU(adoc("id", "2", "field_d", "3.0", "field_i", "3"));
|
||||||
|
assertU(commit());
|
||||||
|
singleTest("field_d", "if(eq(field_d,5),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/2);
|
||||||
|
singleTest("field_d", "if(eq(field_d,5.0),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/2);
|
||||||
|
singleTest("field_d", "if(eq(5,def(field_d,5)),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/2);
|
||||||
|
singleTest("field_i", "if(eq(5.0,def(field_i,5)),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/2);
|
||||||
|
singleTest("field_not_existed_i", "if(def(field_not_existed_i,5.0),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/5);
|
||||||
|
singleTest("field_not_existed_i", "if(def(field_not_existed_i,5),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/5);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDifferentTypesComparisons() {
|
||||||
|
clearIndex();
|
||||||
|
assertU(adoc("id", "1", "field_s", "value"));
|
||||||
|
assertU(adoc("id", "2"));
|
||||||
|
assertU(commit());
|
||||||
|
singleTest("field_s", "if(eq(field_s,'value'),5,2)",
|
||||||
|
/*id*/1, /*score*/5,
|
||||||
|
/*id*/2, /*score*/2);
|
||||||
|
singleTest("field_s", "if(eq(def(field_s,5),5),5,2)",
|
||||||
|
/*id*/2, /*score*/5,
|
||||||
|
/*id*/1, /*score*/2);
|
||||||
|
singleTest("field_s", "if(eq(def(field_s,5),5.0),5,2)",
|
||||||
|
/*id*/2, /*score*/5,
|
||||||
|
/*id*/1, /*score*/2);
|
||||||
|
singleTest("field_s", "if(eq(def(field_s,'5'),5),5,2)",
|
||||||
|
/*id*/1, /*score*/2,
|
||||||
|
/*id*/2, /*score*/2);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -496,7 +496,8 @@ Returns `true` if any member of the field exists.
|
||||||
=== Comparison Functions
|
=== Comparison Functions
|
||||||
`gt`, `gte`, `lt`, `lte`, `eq`
|
`gt`, `gte`, `lt`, `lte`, `eq`
|
||||||
|
|
||||||
5 comparison functions: Greater Than, Greater Than or Equal, Less Than, Less Than or Equal, Equal
|
5 comparison functions: Greater Than, Greater Than or Equal, Less Than, Less Than or Equal, Equal.
|
||||||
|
`eq` works on not just numbers but essentially any value like a string field.
|
||||||
|
|
||||||
*Syntax Example*
|
*Syntax Example*
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue