LUCENE-8008: Remove unintended changes.

This commit is contained in:
Adrien Grand 2018-03-20 09:52:24 +01:00
parent 65559cb94d
commit 3048e5da22
4 changed files with 42 additions and 99 deletions

View File

@ -102,7 +102,6 @@ public class LRUQueryCache implements QueryCache, Accountable {
private final int maxSize;
private final long maxRamBytesUsed;
private final Predicate<LeafReaderContext> leavesToCache;
private final float maxCostFactor;
// maps queries that are contained in the cache to a singleton so that this
// cache does not store several copies of the same query
private final Map<Query, Query> uniqueQueries;
@ -124,14 +123,10 @@ public class LRUQueryCache implements QueryCache, Accountable {
/**
* Expert: Create a new instance that will cache at most <code>maxSize</code>
* queries with at most <code>maxRamBytesUsed</code> bytes of memory, only on
* leaves that satisfy {@code leavesToCache}. Also, only clauses whose cost is
* no more than {@code maxCostFactor} times the cost of the top-level query
* will be cached in order to not slow down queries too much due to caching.
* Pass {@link Float#POSITIVE_INFINITY} to cache regardless of costs.
* leaves that satisfy {@code leavesToCache}.
*/
public LRUQueryCache(int maxSize, long maxRamBytesUsed,
Predicate<LeafReaderContext> leavesToCache,
float maxCostFactor) {
Predicate<LeafReaderContext> leavesToCache) {
this.maxSize = maxSize;
this.maxRamBytesUsed = maxRamBytesUsed;
this.leavesToCache = leavesToCache;
@ -140,10 +135,6 @@ public class LRUQueryCache implements QueryCache, Accountable {
cache = new IdentityHashMap<>();
lock = new ReentrantLock();
ramBytesUsed = 0;
if (maxCostFactor < 1) {
throw new IllegalArgumentException("maxCostFactor must be no less than 1, got " + maxCostFactor);
}
this.maxCostFactor = maxCostFactor;
}
/**
@ -160,7 +151,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
* be cached in order to not hurt latency too much because of caching.
*/
public LRUQueryCache(int maxSize, long maxRamBytesUsed) {
this(maxSize, maxRamBytesUsed, new MinSegmentSizePredicate(10000, .03f), 100);
this(maxSize, maxRamBytesUsed, new MinSegmentSizePredicate(10000, .03f));
}
// pkg-private for testing
@ -749,39 +740,11 @@ public class LRUQueryCache implements QueryCache, Accountable {
}
if (docIdSet == null) {
ScorerSupplier inSupplier = in.scorerSupplier(context);
if (inSupplier == null) {
putIfAbsent(in.getQuery(), context, DocIdSet.EMPTY, cacheHelper);
return null;
}
if (policy.shouldCache(in.getQuery())) {
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
double costFactor = (double) inSupplier.cost() / leadCost;
if (costFactor >= maxCostFactor) {
// too costly, caching might make the query much slower
return inSupplier.get(leadCost);
}
DocIdSet cached = cacheImpl(new DefaultBulkScorer(inSupplier.get(Long.MAX_VALUE)), context.reader().maxDoc());
putIfAbsent(in.getQuery(), context, cached, cacheHelper);
DocIdSetIterator iterator = cached.iterator();
if (iterator == null) {
// DIS.iterator() is allowed to return null when empty but we want a non-null iterator here
iterator = DocIdSetIterator.empty();
}
return new ConstantScoreScorer(CachingWrapperWeight.this, 0f, iterator);
}
@Override
public long cost() {
return inSupplier.cost();
}
};
docIdSet = cache(context);
putIfAbsent(in.getQuery(), context, docIdSet, cacheHelper);
} else {
return inSupplier;
return in.scorerSupplier(context);
}
}

View File

@ -79,7 +79,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
};
public void testConcurrency() throws Throwable {
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(20), 1 + random().nextInt(10000), context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(20), 1 + random().nextInt(10000), context -> random().nextBoolean());
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final SearcherFactory searcherFactory = new SearcherFactory() {
@ -182,7 +182,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
w.addDocument(doc);
final DirectoryReader reader = w.getReader();
final IndexSearcher searcher = newSearcher(reader);
final LRUQueryCache queryCache = new LRUQueryCache(2, 100000, context -> true, Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(2, 100000, context -> true);
final Query blue = new TermQuery(new Term("color", "blue"));
final Query red = new TermQuery(new Term("color", "red"));
@ -243,7 +243,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
// different instance yet equal
final Query query2 = new TermQuery(new Term("color", "blue"));
final LRUQueryCache queryCache = new LRUQueryCache(Integer.MAX_VALUE, Long.MAX_VALUE, context -> true, Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(Integer.MAX_VALUE, Long.MAX_VALUE, context -> true);
searcher.setQueryCache(queryCache);
searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
@ -265,7 +265,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
public void testRamBytesUsedAgreesWithRamUsageTester() throws IOException {
assumeFalse("LUCENE-7595: RamUsageTester does not work exact in Java 9 (estimations for maps and lists)", Constants.JRE_IS_MINIMUM_JAVA9);
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(5), 1 + random().nextInt(10000), context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(5), 1 + random().nextInt(10000), context -> random().nextBoolean());
// an accumulator that only sums up memory usage of referenced filters and doc id sets
final RamUsageTester.Accumulator acc = new RamUsageTester.Accumulator() {
@Override
@ -386,7 +386,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
public void testRamBytesUsedConstantEntryOverhead() throws IOException {
assumeFalse("LUCENE-7595: RamUsageTester does not work exact in Java 9 (estimations for maps and lists)", Constants.JRE_IS_MINIMUM_JAVA9);
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> true, Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> true);
final RamUsageTester.Accumulator acc = new RamUsageTester.Accumulator() {
@Override
@ -435,7 +435,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
}
public void testOnUse() throws IOException {
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(5), 1 + random().nextInt(1000), context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(1 + random().nextInt(5), 1 + random().nextInt(1000), context -> random().nextBoolean());
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
@ -495,7 +495,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
}
public void testStats() throws IOException {
final LRUQueryCache queryCache = new LRUQueryCache(1, 10000000, context -> true, Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(1, 10000000, context -> true);
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
@ -626,7 +626,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
final AtomicLong ramBytesUsage = new AtomicLong();
final AtomicLong cacheSize = new AtomicLong();
final LRUQueryCache queryCache = new LRUQueryCache(2, 10000000, context -> true, Float.POSITIVE_INFINITY) {
final LRUQueryCache queryCache = new LRUQueryCache(2, 10000000, context -> true) {
@Override
protected void onHit(Object readerCoreKey, Query query) {
super.onHit(readerCoreKey, query);
@ -755,7 +755,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
final BooleanQuery.Builder query = new BooleanQuery.Builder();
query.add(new BoostQuery(expectedCacheKey, 42f), Occur.MUST);
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> random().nextBoolean());
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
Document doc = new Document();
@ -795,11 +795,12 @@ public class TestLRUQueryCache extends LuceneTestCase {
doc.add(new StringField("foo", "bar", Store.YES));
doc.add(new StringField("foo", "quux", Store.YES));
w.addDocument(doc);
w.commit();
final IndexReader reader = w.getReader();
final IndexSearcher searcher = newSearcher(reader);
w.close();
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> true, Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> true);
searcher.setQueryCache(queryCache);
searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
@ -903,7 +904,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
iters = atLeast(2000);
}
final LRUQueryCache queryCache = new LRUQueryCache(maxSize, maxRamBytesUsed, context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(maxSize, maxRamBytesUsed, context -> random().nextBoolean());
IndexSearcher uncachedSearcher = null;
IndexSearcher cachedSearcher = null;
@ -980,7 +981,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
IndexReader reader = w.getReader();
// size of 1 so that 2nd query evicts from the cache
final LRUQueryCache queryCache = new LRUQueryCache(1, 10000, context -> true, Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(1, 10000, context -> true);
final IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCache(queryCache);
searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
@ -1014,7 +1015,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
IndexReader reader = w.getReader();
// size of 1 byte
final LRUQueryCache queryCache = new LRUQueryCache(1, 1, context -> random().nextBoolean(), Float.POSITIVE_INFINITY);
final LRUQueryCache queryCache = new LRUQueryCache(1, 1, context -> random().nextBoolean());
final IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCache(queryCache);
searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
@ -1053,7 +1054,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
try (final IndexReader indexReader = DirectoryReader.open(directory)) {
final FrequencyCountingPolicy policy = new FrequencyCountingPolicy();
final IndexSearcher indexSearcher = new IndexSearcher(indexReader);
indexSearcher.setQueryCache(new LRUQueryCache(100, 10240, context -> random().nextBoolean(), Float.POSITIVE_INFINITY));
indexSearcher.setQueryCache(new LRUQueryCache(100, 10240, context -> random().nextBoolean()));
indexSearcher.setQueryCachingPolicy(policy);
final Query foo = new TermQuery(new Term("f", "foo"));
final Query bar = new TermQuery(new Term("f", "bar"));
@ -1137,7 +1138,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
LeafReaderContext leaf = searcher.getIndexReader().leaves().get(0);
AtomicBoolean scorerCalled = new AtomicBoolean();
AtomicBoolean bulkScorerCalled = new AtomicBoolean();
LRUQueryCache cache = new LRUQueryCache(1, Long.MAX_VALUE, context -> true, Float.POSITIVE_INFINITY);
LRUQueryCache cache = new LRUQueryCache(1, Long.MAX_VALUE, context -> true);
// test that the bulk scorer is propagated when a scorer should not be cached
Weight weight = searcher.createNormalizedWeight(new MatchAllDocsQuery(), ScoreMode.COMPLETE_NO_SCORES);
@ -1148,6 +1149,16 @@ public class TestLRUQueryCache extends LuceneTestCase {
assertEquals(false, scorerCalled.get());
assertEquals(0, cache.getCacheCount());
// test that the doc id set is computed using the bulk scorer
bulkScorerCalled.set(false);
weight = searcher.createNormalizedWeight(new MatchAllDocsQuery(), ScoreMode.COMPLETE_NO_SCORES);
weight = new WeightWrapper(weight, scorerCalled, bulkScorerCalled);
weight = cache.doCache(weight, QueryCachingPolicy.ALWAYS_CACHE);
weight.scorer(leaf);
assertEquals(true, bulkScorerCalled.get());
assertEquals(false, scorerCalled.get());
assertEquals(1, cache.getCacheCount());
searcher.getIndexReader().close();
dir.close();
}
@ -1158,7 +1169,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
w.addDocument(new Document());
final DirectoryReader reader = w.getReader();
final IndexSearcher searcher = newSearcher(reader);
final LRUQueryCache queryCache = new LRUQueryCache(2, 100000, context -> true, Float.POSITIVE_INFINITY) {
final LRUQueryCache queryCache = new LRUQueryCache(2, 100000, context -> true) {
@Override
protected void onDocIdSetEviction(Object readerCoreKey, int numEntries, long sumRamBytesUsed) {
super.onDocIdSetEviction(readerCoreKey, numEntries, sumRamBytesUsed);
@ -1188,17 +1199,17 @@ public class TestLRUQueryCache extends LuceneTestCase {
IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
LRUQueryCache cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(2, 0f), Float.POSITIVE_INFINITY);
LRUQueryCache cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(2, 0f));
searcher.setQueryCache(cache);
searcher.count(new DummyQuery());
assertEquals(0, cache.getCacheCount());
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(1, 0f), Float.POSITIVE_INFINITY);
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(1, 0f));
searcher.setQueryCache(cache);
searcher.count(new DummyQuery());
assertEquals(1, cache.getCacheCount());
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(0, .6f), Float.POSITIVE_INFINITY);
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(0, .6f));
searcher.setQueryCache(cache);
searcher.count(new DummyQuery());
assertEquals(1, cache.getCacheCount());
@ -1208,7 +1219,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
reader = w.getReader();
searcher = newSearcher(reader);
searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(0, .6f), Float.POSITIVE_INFINITY);
cache = new LRUQueryCache(2, 10000, new LRUQueryCache.MinSegmentSizePredicate(0, .6f));
searcher.setQueryCache(cache);
searcher.count(new DummyQuery());
assertEquals(0, cache.getCacheCount());
@ -1261,7 +1272,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
// don't cache if the reader does not expose a cache helper
assertNull(reader.leaves().get(0).reader().getCoreCacheHelper());
LRUQueryCache cache = new LRUQueryCache(2, 10000, context -> true, Float.POSITIVE_INFINITY);
LRUQueryCache cache = new LRUQueryCache(2, 10000, context -> true);
searcher.setQueryCache(cache);
assertEquals(0, searcher.count(new DummyQuery()));
assertEquals(0, cache.getCacheCount());
@ -1323,7 +1334,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
LRUQueryCache cache = new LRUQueryCache(2, 10000, context -> true, Float.POSITIVE_INFINITY);
LRUQueryCache cache = new LRUQueryCache(2, 10000, context -> true);
searcher.setQueryCache(cache);
assertEquals(0, searcher.count(new NoCacheQuery()));
@ -1485,7 +1496,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
IndexSearcher searcher = new AssertingIndexSearcher(random(), reader);
searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
LRUQueryCache cache = new LRUQueryCache(1, 10000, context -> true, Float.POSITIVE_INFINITY);
LRUQueryCache cache = new LRUQueryCache(1, 10000, context -> true);
searcher.setQueryCache(cache);
DVCacheQuery query = new DVCacheQuery("field");
@ -1535,35 +1546,4 @@ public class TestLRUQueryCache extends LuceneTestCase {
dir.close();
}
public void testSkipCostlyQueries() throws IOException {
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
Document doc1 = new Document();
doc1.add(new StringField("foo", "bar", Store.YES));
doc1.add(new StringField("foo", "quux", Store.YES));
Document doc2 = new Document();
doc2.add(new StringField("foo", "bar", Store.YES));
w.addDocuments(Arrays.asList(doc1, doc2));
final IndexReader reader = w.getReader();
final IndexSearcher searcher = newSearcher(reader);
w.close();
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000, context -> true, 1.5f);
searcher.setQueryCache(queryCache);
searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(new TermQuery(new Term("foo", "bar")), Occur.FILTER);
bq.add(new TermQuery(new Term("foo", "quux")), Occur.FILTER);
assertEquals(Collections.emptySet(), new HashSet<>(queryCache.cachedQueries()));
searcher.search(bq.build(), 1);
// bar is not cached since its cost is 2 which is 2x the cost of the BooleanQuery
assertEquals(Collections.singleton(new TermQuery(new Term("foo", "quux"))), new HashSet<>(queryCache.cachedQueries()));
reader.close();
w.close();
dir.close();
}
}

View File

@ -63,7 +63,7 @@ public class TestUsageTrackingFilterCachingPolicy extends LuceneTestCase {
IndexSearcher searcher = new IndexSearcher(reader);
UsageTrackingQueryCachingPolicy policy = new UsageTrackingQueryCachingPolicy();
LRUQueryCache cache = new LRUQueryCache(10, Long.MAX_VALUE, new LRUQueryCache.MinSegmentSizePredicate(1, 0f), Float.POSITIVE_INFINITY);
LRUQueryCache cache = new LRUQueryCache(10, Long.MAX_VALUE, new LRUQueryCache.MinSegmentSizePredicate(1, 0f));
searcher.setQueryCache(cache);
searcher.setQueryCachingPolicy(policy);

View File

@ -1793,7 +1793,7 @@ public abstract class LuceneTestCase extends Assert {
public static void overrideDefaultQueryCache() {
// we need to reset the query cache in an @BeforeClass so that tests that
// instantiate an IndexSearcher in an @BeforeClass method use a fresh new cache
IndexSearcher.setDefaultQueryCache(new LRUQueryCache(10000, 1 << 25, context -> true, random().nextBoolean() ? 1.1f : Float.POSITIVE_INFINITY));
IndexSearcher.setDefaultQueryCache(new LRUQueryCache(10000, 1 << 25, context -> true));
IndexSearcher.setDefaultQueryCachingPolicy(MAYBE_CACHE_POLICY);
}