LUCENE-9002: Query caching leads to absurdly slow queries (#940)

Co-Authored-By: Adrien Grand <jpountz@gmail.com>
This commit is contained in:
ginger 2019-11-12 05:07:33 +08:00 committed by Adrien Grand
parent f072747827
commit 8da9e1915f
5 changed files with 167 additions and 42 deletions

View File

@ -15,6 +15,9 @@ New Features
Improvements
* LUCENE-9002: Skip costly caching clause in LRUQueryCache if it makes the query
many times slower. (Guoqiang Jiang)
* LUCENE-9006: WordDelimiterGraphFilter's catenateAll token is now ordered before any token parts, like WDF did.
(David Smiley)

View File

@ -102,6 +102,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
private final Set<Query> mostRecentlyUsedQueries;
private final Map<IndexReader.CacheKey, LeafCache> cache;
private final ReentrantLock lock;
private final float skipCacheFactor;
// these variables are volatile so that we do not need to sync reads
// but increments need to be performed under the lock
@ -115,12 +116,20 @@ public class LRUQueryCache implements QueryCache, Accountable {
* Expert: Create a new instance that will cache at most <code>maxSize</code>
* queries with at most <code>maxRamBytesUsed</code> bytes of memory, only on
* leaves that satisfy {@code leavesToCache}.
*
* Also, clauses whose cost is {@code skipCacheFactor} times more than the cost of the top-level query
* will not be cached in order to not slow down queries too much.
*/
public LRUQueryCache(int maxSize, long maxRamBytesUsed,
Predicate<LeafReaderContext> leavesToCache) {
Predicate<LeafReaderContext> leavesToCache, float skipCacheFactor) {
this.maxSize = maxSize;
this.maxRamBytesUsed = maxRamBytesUsed;
this.leavesToCache = leavesToCache;
if (skipCacheFactor >= 1 == false) { // NaN >= 1 evaluates false
throw new IllegalArgumentException("skipCacheFactor must be no less than 1, get " + skipCacheFactor);
}
this.skipCacheFactor = skipCacheFactor;
uniqueQueries = new LinkedHashMap<>(16, 0.75f, true);
mostRecentlyUsedQueries = uniqueQueries.keySet();
cache = new IdentityHashMap<>();
@ -142,7 +151,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
* be cached in order to not hurt latency too much because of caching.
*/
public LRUQueryCache(int maxSize, long maxRamBytesUsed) {
this(maxSize, maxRamBytesUsed, new MinSegmentSizePredicate(10000, .03f));
this(maxSize, maxRamBytesUsed, new MinSegmentSizePredicate(10000, .03f), 250);
}
// pkg-private for testing
@ -738,8 +747,38 @@ public class LRUQueryCache implements QueryCache, Accountable {
if (docIdSet == null) {
if (policy.shouldCache(in.getQuery())) {
docIdSet = cache(context);
final ScorerSupplier supplier = in.scorerSupplier(context);
if (supplier == null) {
putIfAbsent(in.getQuery(), DocIdSet.EMPTY, cacheHelper);
return null;
}
final long cost = supplier.cost();
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
// skip cache operation which would slow query down too much
if (cost / skipCacheFactor > leadCost) {
return supplier.get(leadCost);
}
Scorer scorer = supplier.get(Long.MAX_VALUE);
DocIdSet docIdSet = cacheImpl(new DefaultBulkScorer(scorer), context.reader().maxDoc());
putIfAbsent(in.getQuery(), docIdSet, cacheHelper);
DocIdSetIterator disi = docIdSet.iterator();
if (disi == null) {
// docIdSet.iterator() is allowed to return null when empty but we want a non-null iterator here
disi = DocIdSetIterator.empty();
}
return new ConstantScoreScorer(CachingWrapperWeight.this, 0f, ScoreMode.COMPLETE_NO_SCORES, disi);
}
@Override
public long cost() {
return cost;
}
};
} else {
return in.scorerSupplier(context);
}

View File

@ -39,7 +39,9 @@ import java.util.concurrent.atomic.AtomicReference;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
@ -94,7 +96,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
};
public void testConcurrency() throws Throwable {
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(20), 1 + random().nextInt(10000), context -> random().nextBoolean());
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(20), 1 + random().nextInt(10000), context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final SearcherFactory searcherFactory = new SearcherFactory() {
@ -203,7 +205,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
w.addDocument(doc);
final DirectoryReader reader = w.getReader();
final IndexSearcher searcher = newSearcher(reader);
final LRUQueryCache queryCache = new LRUQueryCache(2, 100000, context -> true);
final LRUQueryCache queryCache = new LRUQueryCache(2, 100000, context -> true, Float.POSITIVE_INFINITY);
final Query blue = new TermQuery(new Term("color", "blue"));
final Query red = new TermQuery(new Term("color", "red"));
@ -264,7 +266,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
// different instance yet equal
final Query query2 = new TermQuery(new Term("color", "blue"));
final LRUQueryCache queryCache = new LRUQueryCache(Integer.MAX_VALUE, Long.MAX_VALUE, context -> true);
final LRUQueryCache queryCache = new LRUQueryCache(Integer.MAX_VALUE, Long.MAX_VALUE, context -> true, 1);
searcher.setQueryCache(queryCache);
searcher.setQueryCachingPolicy(ALWAYS_CACHE);
@ -286,7 +288,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
public void testRamBytesUsedAgreesWithRamUsageTester() throws IOException {
assumeFalse("LUCENE-7595: RamUsageTester does not work exact in Java 9 (estimations for maps and lists)", Constants.JRE_IS_MINIMUM_JAVA9);
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(5), 1 + random().nextInt(10000), context -> random().nextBoolean());
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(5), 1 + random().nextInt(10000), context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
// an accumulator that only sums up memory usage of referenced filters and doc id sets
final RamUsageTester.Accumulator acc = new RamUsageTester.Accumulator() {
@Override
@ -412,7 +414,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
public void testRamBytesUsedConstantEntryOverhead() throws IOException {
assumeFalse("LUCENE-7595: RamUsageTester does not work exact in Java 9 (estimations for maps and lists)", Constants.JRE_IS_MINIMUM_JAVA9);
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> true);
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> true, Float.POSITIVE_INFINITY);
final RamUsageTester.Accumulator acc = new RamUsageTester.Accumulator() {
@Override
@ -461,7 +463,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
}
public void testOnUse() throws IOException {
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(5), 1 + random().nextInt(1000), context -> random().nextBoolean());
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(5), 1 + random().nextInt(1000), context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
@ -521,7 +523,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
}
public void testStats() throws IOException {
final LRUQueryCache queryCache = new LRUQueryCache(1, 10000000, context -> true);
final LRUQueryCache queryCache = new LRUQueryCache(1, 10000000, context -> true, 1);
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
@ -652,7 +654,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
final AtomicLong ramBytesUsage = new AtomicLong();
final AtomicLong cacheSize = new AtomicLong();
final LRUQueryCache queryCache = new LRUQueryCache(2, 10000000, context -> true) {
final LRUQueryCache queryCache = new LRUQueryCache(2, 10000000, context -> true, 1) {
@Override
protected void onHit(Object readerCoreKey, Query query) {
super.onHit(readerCoreKey, query);
@ -781,7 +783,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
final BooleanQuery.Builder query = new BooleanQuery.Builder();
query.add(new BoostQuery(expectedCacheKey, 42f), Occur.MUST);
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> random().nextBoolean());
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
Document doc = new Document();
@ -826,7 +828,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
final IndexSearcher searcher = newSearcher(reader);
w.close();
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> true);
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> true, Float.POSITIVE_INFINITY);
searcher.setQueryCache(queryCache);
searcher.setQueryCachingPolicy(ALWAYS_CACHE);
@ -927,7 +929,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
iters = atLeast(2000);
}
final LRUQueryCache queryCache = new LRUQueryCache(maxSize, maxRamBytesUsed, context -> random().nextBoolean());
final LRUQueryCache queryCache = new LRUQueryCache(maxSize, maxRamBytesUsed, context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
IndexSearcher uncachedSearcher = null;
IndexSearcher cachedSearcher = null;
@ -1009,7 +1011,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
IndexReader reader = w.getReader();
// size of 1 so that 2nd query evicts from the cache
final LRUQueryCache queryCache = new LRUQueryCache(1, 10000, context -> true);
final LRUQueryCache queryCache = new LRUQueryCache(1, 10000, context -> true, Float.POSITIVE_INFINITY);
final IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCache(queryCache);
searcher.setQueryCachingPolicy(ALWAYS_CACHE);
@ -1043,7 +1045,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
IndexReader reader = w.getReader();
// size of 1 byte
final LRUQueryCache queryCache = new LRUQueryCache(1, 1, context -> random().nextBoolean());
final LRUQueryCache queryCache = new LRUQueryCache(1, 1, context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
final IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCache(queryCache);
searcher.setQueryCachingPolicy(ALWAYS_CACHE);
@ -1082,7 +1084,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
try (final IndexReader indexReader = DirectoryReader.open(directory)) {
final FrequencyCountingPolicy policy = new FrequencyCountingPolicy();
final IndexSearcher indexSearcher = new IndexSearcher(indexReader);
indexSearcher.setQueryCache(new LRUQueryCache(100, 10240, context -> random().nextBoolean()));
indexSearcher.setQueryCache(new LRUQueryCache(100, 10240, context -> random().nextBoolean(), Float.POSITIVE_INFINITY));
indexSearcher.setQueryCachingPolicy(policy);
final Query foo = new TermQuery(new Term("f", "foo"));
final Query bar = new TermQuery(new Term("f", "bar"));
@ -1166,7 +1168,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
LeafReaderContext leaf = searcher.getIndexReader().leaves().get(0);
AtomicBoolean scorerCalled = new AtomicBoolean();
AtomicBoolean bulkScorerCalled = new AtomicBoolean();
LRUQueryCache cache = new LRUQueryCache(1, Long.MAX_VALUE, context -> true);
LRUQueryCache cache = new LRUQueryCache(1, Long.MAX_VALUE, context -> true, Float.POSITIVE_INFINITY);
// test that the bulk scorer is propagated when a scorer should not be cached
Weight weight = searcher.createWeight(new MatchAllDocsQuery(), ScoreMode.COMPLETE_NO_SCORES, 1);
@ -1177,16 +1179,6 @@ public class TestLRUQueryCache extends LuceneTestCase {
assertEquals(false, scorerCalled.get());
assertEquals(0, cache.getCacheCount());
// test that the doc id set is computed using the bulk scorer
bulkScorerCalled.set(false);
weight = searcher.createWeight(new MatchAllDocsQuery(), ScoreMode.COMPLETE_NO_SCORES, 1);
weight = new WeightWrapper(weight, scorerCalled, bulkScorerCalled);
weight = cache.doCache(weight, ALWAYS_CACHE);
weight.scorer(leaf);
assertEquals(true, bulkScorerCalled.get());
assertEquals(false, scorerCalled.get());
assertEquals(1, cache.getCacheCount());
searcher.getIndexReader().close();
dir.close();
}
@ -1197,7 +1189,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
w.addDocument(new Document());
final DirectoryReader reader = w.getReader();
final IndexSearcher searcher = newSearcher(reader);
final LRUQueryCache queryCache = new LRUQueryCache(2, 100000, context -> true) {
final LRUQueryCache queryCache = new LRUQueryCache(2, 100000, context -> true, Float.POSITIVE_INFINITY) {
@Override
protected void onDocIdSetEviction(Object readerCoreKey, int numEntries, long sumRamBytesUsed) {
super.onDocIdSetEviction(readerCoreKey, numEntries, sumRamBytesUsed);
@ -1227,17 +1219,17 @@ public class TestLRUQueryCache extends LuceneTestCase {
IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCachingPolicy(ALWAYS_CACHE);
LRUQueryCache cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(2, 0f));
LRUQueryCache cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(2, 0f), Float.POSITIVE_INFINITY);
searcher.setQueryCache(cache);
searcher.count(new DummyQuery());
assertEquals(0, cache.getCacheCount());
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(1, 0f));
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(1, 0f), Float.POSITIVE_INFINITY);
searcher.setQueryCache(cache);
searcher.count(new DummyQuery());
assertEquals(1, cache.getCacheCount());
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(0, .6f));
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(0, .6f), Float.POSITIVE_INFINITY);
searcher.setQueryCache(cache);
searcher.count(new DummyQuery());
assertEquals(1, cache.getCacheCount());
@ -1247,7 +1239,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
reader = w.getReader();
searcher = newSearcher(reader);
searcher.setQueryCachingPolicy(ALWAYS_CACHE);
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(0, .6f));
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(0, .6f), Float.POSITIVE_INFINITY);
searcher.setQueryCache(cache);
searcher.count(new DummyQuery());
assertEquals(0, cache.getCacheCount());
@ -1300,7 +1292,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
// don't cache if the reader does not expose a cache helper
assertNull(reader.leaves().get(0).reader().getCoreCacheHelper());
LRUQueryCache cache = new LRUQueryCache(2, 10000, context -> true);
LRUQueryCache cache = new LRUQueryCache(2, 10000, context -> true, Float.POSITIVE_INFINITY);
searcher.setQueryCache(cache);
assertEquals(0, searcher.count(new DummyQuery()));
assertEquals(0, cache.getCacheCount());
@ -1367,7 +1359,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCachingPolicy(ALWAYS_CACHE);
LRUQueryCache cache = new LRUQueryCache(2, 10000, context -> true);
LRUQueryCache cache = new LRUQueryCache(2, 10000, context -> true, Float.POSITIVE_INFINITY);
searcher.setQueryCache(cache);
assertEquals(0, searcher.count(new NoCacheQuery()));
@ -1537,7 +1529,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
IndexSearcher searcher = new AssertingIndexSearcher(random(), reader);
searcher.setQueryCachingPolicy(ALWAYS_CACHE);
LRUQueryCache cache = new LRUQueryCache(1, 10000, context -> true);
LRUQueryCache cache = new LRUQueryCache(1, 10000, context -> true, Float.POSITIVE_INFINITY);
searcher.setQueryCache(cache);
DVCacheQuery query = new DVCacheQuery("field");
@ -1592,7 +1584,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig().setSoftDeletesField("soft_delete");
IndexWriter w = new IndexWriter(dir, iwc);
LRUQueryCache queryCache = new LRUQueryCache(10, 1000 * 1000, ctx -> true);
LRUQueryCache queryCache = new LRUQueryCache(10, 1000 * 1000, ctx -> true, Float.POSITIVE_INFINITY);
IndexSearcher.setDefaultQueryCache(queryCache);
IndexSearcher.setDefaultQueryCachingPolicy(ALWAYS_CACHE);
@ -1666,7 +1658,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
DirectoryReader reader = DirectoryReader.open(w);
DirectoryReader noCacheReader = new DummyDirectoryReader(reader);
LRUQueryCache cache = new LRUQueryCache(1, 100000, context -> true);
LRUQueryCache cache = new LRUQueryCache(1, 100000, context -> true, Float.POSITIVE_INFINITY);
IndexSearcher searcher = new AssertingIndexSearcher(random(), reader);
searcher.setQueryCache(cache);
searcher.setQueryCachingPolicy(ALWAYS_CACHE);
@ -1697,4 +1689,95 @@ public class TestLRUQueryCache extends LuceneTestCase {
t.start();
t.join();
}
public void testSkipCachingForRangeQuery() throws IOException {
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
Document doc1 = new Document();
doc1.add(new StringField("name", "tom", Store.YES));
doc1.add(new LongPoint("age", 15));
doc1.add(new SortedNumericDocValuesField("age", 15));
Document doc2 = new Document();
doc2.add(new StringField("name", "alice", Store.YES));
doc2.add(new LongPoint("age", 20));
doc2.add(new SortedNumericDocValuesField("age", 20));
w.addDocuments(Arrays.asList(doc1, doc2));
final IndexReader reader = w.getReader();
final IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCachingPolicy(ALWAYS_CACHE);
w.close();
// lead cost is 1, cost of subQuery1 is 1, cost of subQuery2 is 2
BooleanQuery.Builder bq = new BooleanQuery.Builder();
TermQuery subQuery1 = new TermQuery(new Term("name", "tom"));
IndexOrDocValuesQuery subQuery2 = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("age", 10, 30),
SortedNumericDocValuesField.newSlowRangeQuery("age", 10, 30));
BooleanQuery query = bq.add(subQuery1, Occur.FILTER).add(subQuery2, Occur.FILTER).build();
Set<Query> cacheSet = new HashSet<>();
// only term query is cached
final LRUQueryCache partCache = new LRUQueryCache(1000000, 10000000, context -> true, 1);
searcher.setQueryCache(partCache);
searcher.search(query, 1);
cacheSet.add(subQuery1);
assertEquals(cacheSet, new HashSet<>(partCache.cachedQueries()));
// both queries are cached
final LRUQueryCache allCache = new LRUQueryCache(1000000, 10000000, context -> true, Float.POSITIVE_INFINITY);
searcher.setQueryCache(allCache);
searcher.search(query, 1);
cacheSet.add(subQuery2);
assertEquals(cacheSet, new HashSet<>(allCache.cachedQueries()));
reader.close();
dir.close();
}
public void testSkipCachingForTermQuery() throws IOException {
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
Document doc1 = new Document();
doc1.add(new StringField("name", "tom", Store.YES));
doc1.add(new StringField("hobby", "movie", Store.YES));
Document doc2 = new Document();
doc2.add(new StringField("name", "alice", Store.YES));
doc2.add(new StringField("hobby", "book", Store.YES));
Document doc3 = new Document();
doc3.add(new StringField("name", "alice", Store.YES));
doc3.add(new StringField("hobby", "movie", Store.YES));
w.addDocuments(Arrays.asList(doc1, doc2, doc3));
final IndexReader reader = w.getReader();
final IndexSearcher searcher = newSearcher(reader);
final UsageTrackingQueryCachingPolicy policy = new UsageTrackingQueryCachingPolicy();
searcher.setQueryCachingPolicy(policy);
w.close();
// lead cost is 2, cost of subQuery1 is 3, cost of subQuery2 is 2
BooleanQuery.Builder inner = new BooleanQuery.Builder();
TermQuery innerSubQuery1 = new TermQuery(new Term("hobby", "book"));
TermQuery innerSubQuery2 = new TermQuery(new Term("hobby", "movie"));
BooleanQuery subQuery1 = inner.add(innerSubQuery1, Occur.SHOULD).add(innerSubQuery2, Occur.SHOULD).build();
BooleanQuery.Builder bq = new BooleanQuery.Builder();
TermQuery subQuery2 = new TermQuery(new Term("name", "alice"));
BooleanQuery query = bq.add(new ConstantScoreQuery(subQuery1), Occur.FILTER).add(subQuery2, Occur.FILTER).build();
Set<Query> cacheSet = new HashSet<>();
// both queries are not cached
final LRUQueryCache partCache = new LRUQueryCache(1000000, 10000000, context -> true, 1);
searcher.setQueryCache(partCache);
searcher.search(query, 1);
assertEquals(cacheSet, new HashSet<>(partCache.cachedQueries()));
// only subQuery1 is cached
final LRUQueryCache allCache = new LRUQueryCache(1000000, 10000000, context -> true, Float.POSITIVE_INFINITY);
searcher.setQueryCache(allCache);
searcher.search(query, 1);
cacheSet.add(subQuery1);
assertEquals(cacheSet, new HashSet<>(allCache.cachedQueries()));
reader.close();
dir.close();
}
}

View File

@ -63,7 +63,7 @@ public class TestUsageTrackingFilterCachingPolicy extends LuceneTestCase {
IndexSearcher searcher = new IndexSearcher(reader);
UsageTrackingQueryCachingPolicy policy = new UsageTrackingQueryCachingPolicy();
LRUQueryCache cache = new LRUQueryCache(10, Long.MAX_VALUE, new LRUQueryCache.MinSegmentSizePredicate(1, 0f));
LRUQueryCache cache = new LRUQueryCache(10, Long.MAX_VALUE, new LRUQueryCache.MinSegmentSizePredicate(1, 0f), Float.POSITIVE_INFINITY);
searcher.setQueryCache(cache);
searcher.setQueryCachingPolicy(policy);

View File

@ -1844,7 +1844,7 @@ public abstract class LuceneTestCase extends Assert {
public static void overrideDefaultQueryCache() {
// we need to reset the query cache in an @BeforeClass so that tests that
// instantiate an IndexSearcher in an @BeforeClass method use a fresh new cache
IndexSearcher.setDefaultQueryCache(new LRUQueryCache(10000, 1 << 25, context -> true));
IndexSearcher.setDefaultQueryCache(new LRUQueryCache(10000, 1 << 25, context -> true, Float.POSITIVE_INFINITY));
IndexSearcher.setDefaultQueryCachingPolicy(MAYBE_CACHE_POLICY);
}