mirror of https://github.com/apache/lucene.git
cut test over to SMS and also try to prevent GC from clearing CachingWrapperFilter's cache entry
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@989004 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
4ea872810f
commit
481043e0c5
|
@ -24,8 +24,8 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.MergeScheduler;
|
||||
import org.apache.lucene.index.ConcurrentMergeScheduler;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.index.SerialMergeScheduler;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.SlowMultiReaderWrapper;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -160,15 +160,8 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
|
||||
public void testEnforceDeletions() throws Exception {
|
||||
Directory dir = newDirectory(rand);
|
||||
RandomIndexWriter writer = new RandomIndexWriter(rand, dir);
|
||||
|
||||
MergeScheduler ms = writer.w.getMergeScheduler();
|
||||
ConcurrentMergeScheduler cms;
|
||||
if (ms instanceof ConcurrentMergeScheduler) {
|
||||
cms = (ConcurrentMergeScheduler) ms;
|
||||
} else {
|
||||
cms = null;
|
||||
}
|
||||
RandomIndexWriter writer = new RandomIndexWriter(rand, dir,
|
||||
newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new SerialMergeScheduler()));
|
||||
|
||||
// NOTE: cannot use writer.getReader because RIW (on
|
||||
// flipping a coin) may give us a newly opened reader,
|
||||
|
@ -182,9 +175,6 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
doc.add(new Field("id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
||||
if (cms != null) {
|
||||
cms.sync();
|
||||
}
|
||||
reader = refreshReader(reader);
|
||||
searcher = new IndexSearcher(reader);
|
||||
|
||||
|
@ -205,9 +195,6 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
// now delete the doc, refresh the reader, and see that it's not there
|
||||
writer.deleteDocuments(new Term("id", "1"));
|
||||
|
||||
if (cms != null) {
|
||||
cms.sync();
|
||||
}
|
||||
reader = refreshReader(reader);
|
||||
searcher = new IndexSearcher(reader);
|
||||
|
||||
|
@ -223,27 +210,26 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
|
||||
writer.addDocument(doc);
|
||||
|
||||
if (cms != null) {
|
||||
cms.sync();
|
||||
}
|
||||
reader = refreshReader(reader);
|
||||
searcher = new IndexSearcher(reader);
|
||||
|
||||
docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
|
||||
|
||||
assertEquals("[query + filter] Should find a hit...", 1, docs.totalHits);
|
||||
|
||||
constantScore = new ConstantScoreQuery(filter);
|
||||
docs = searcher.search(constantScore, 1);
|
||||
assertEquals("[just filter] Should find a hit...", 1, docs.totalHits);
|
||||
|
||||
// NOTE: important to hold ref here so GC doesn't clear
|
||||
// the cache entry! Else the assert below may sometimes
|
||||
// fail:
|
||||
IndexReader oldReader = reader;
|
||||
|
||||
// make sure we get a cache hit when we reopen reader
|
||||
// that had no change to deletions
|
||||
if (cms != null) {
|
||||
cms.sync();
|
||||
}
|
||||
IndexReader newReader = refreshReader(reader);
|
||||
assertTrue(reader != newReader);
|
||||
reader = newReader;
|
||||
reader = refreshReader(reader);
|
||||
assertTrue(reader != oldReader);
|
||||
searcher = new IndexSearcher(reader);
|
||||
int missCount = filter.missCount;
|
||||
docs = searcher.search(constantScore, 1);
|
||||
|
@ -253,9 +239,6 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
// now delete the doc, refresh the reader, and see that it's not there
|
||||
writer.deleteDocuments(new Term("id", "1"));
|
||||
|
||||
if (cms != null) {
|
||||
cms.sync();
|
||||
}
|
||||
reader = refreshReader(reader);
|
||||
searcher = new IndexSearcher(reader);
|
||||
|
||||
|
@ -271,9 +254,6 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.DYNAMIC);
|
||||
|
||||
writer.addDocument(doc);
|
||||
if (cms != null) {
|
||||
cms.sync();
|
||||
}
|
||||
reader = refreshReader(reader);
|
||||
searcher = new IndexSearcher(reader);
|
||||
|
||||
|
@ -286,9 +266,6 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
// now delete the doc, refresh the reader, and see that it's not there
|
||||
writer.deleteDocuments(new Term("id", "1"));
|
||||
|
||||
if (cms != null) {
|
||||
cms.sync();
|
||||
}
|
||||
reader = refreshReader(reader);
|
||||
searcher = new IndexSearcher(reader);
|
||||
|
||||
|
@ -301,6 +278,13 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
|
||||
// doesn't count as a miss
|
||||
assertEquals(missCount, filter.missCount);
|
||||
|
||||
// NOTE: silliness to make sure JRE does not optimize
|
||||
// away our holding onto oldReader to prevent
|
||||
// CachingWrapperFilter's WeakHashMap from dropping the
|
||||
// entry:
|
||||
assertTrue(oldReader != null);
|
||||
|
||||
reader.close();
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
|
Loading…
Reference in New Issue