LUCENE-4243: MockDirectoryWrapper synchronizes too much

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1363971 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-07-20 22:09:26 +00:00
parent 0ac72088e8
commit 22b5c84800
36 changed files with 339 additions and 177 deletions

View File

@ -29,7 +29,6 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -45,7 +44,7 @@ public class AddIndexesTaskTest extends BenchmarkTestCase {
// create a dummy index under inputDir // create a dummy index under inputDir
inputDir = new File(testDir, "input"); inputDir = new File(testDir, "input");
MockDirectoryWrapper tmpDir = newFSDirectory(inputDir); Directory tmpDir = newFSDirectory(inputDir);
try { try {
IndexWriter writer = new IndexWriter(tmpDir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)); IndexWriter writer = new IndexWriter(tmpDir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
for (int i = 0; i < 10; i++) { for (int i = 0; i < 10; i++) {

View File

@ -59,7 +59,7 @@ public class TestExternalCodecs extends LuceneTestCase {
System.out.println("TEST: NUM_DOCS=" + NUM_DOCS); System.out.println("TEST: NUM_DOCS=" + NUM_DOCS);
} }
MockDirectoryWrapper dir = newDirectory(); BaseDirectoryWrapper dir = newDirectory();
dir.setCheckIndexOnClose(false); // we use a custom codec provider dir.setCheckIndexOnClose(false); // we use a custom codec provider
IndexWriter w = new IndexWriter( IndexWriter w = new IndexWriter(
dir, dir,

View File

@ -85,7 +85,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
} }
public void testSubclassConcurrentMergeScheduler() throws IOException { public void testSubclassConcurrentMergeScheduler() throws IOException {
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
dir.failOn(new FailOnlyOnMerge()); dir.failOn(new FailOnlyOnMerge());
Document doc = new Document(); Document doc = new Document();

View File

@ -37,6 +37,7 @@ import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
@ -54,7 +55,7 @@ public class Test10KPulsings extends LuceneTestCase {
Codec cp = _TestUtil.alwaysPostingsFormat(new Pulsing40PostingsFormat(1)); Codec cp = _TestUtil.alwaysPostingsFormat(new Pulsing40PostingsFormat(1));
File f = _TestUtil.getTempDir("10kpulsed"); File f = _TestUtil.getTempDir("10kpulsed");
MockDirectoryWrapper dir = newFSDirectory(f); BaseDirectoryWrapper dir = newFSDirectory(f);
dir.setCheckIndexOnClose(false); // we do this ourselves explicitly dir.setCheckIndexOnClose(false); // we do this ourselves explicitly
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp)); newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
@ -105,7 +106,7 @@ public class Test10KPulsings extends LuceneTestCase {
Codec cp = _TestUtil.alwaysPostingsFormat(new Pulsing40PostingsFormat(freqCutoff)); Codec cp = _TestUtil.alwaysPostingsFormat(new Pulsing40PostingsFormat(freqCutoff));
File f = _TestUtil.getTempDir("10knotpulsed"); File f = _TestUtil.getTempDir("10knotpulsed");
MockDirectoryWrapper dir = newFSDirectory(f); BaseDirectoryWrapper dir = newFSDirectory(f);
dir.setCheckIndexOnClose(false); // we do this ourselves explicitly dir.setCheckIndexOnClose(false); // we do this ourselves explicitly
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp)); newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));

View File

@ -33,6 +33,7 @@ import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
@ -84,7 +85,7 @@ public class TestPulsingReuse extends LuceneTestCase {
public void testNestedPulsing() throws Exception { public void testNestedPulsing() throws Exception {
// we always run this test with pulsing codec. // we always run this test with pulsing codec.
Codec cp = _TestUtil.alwaysPostingsFormat(new NestedPulsingPostingsFormat()); Codec cp = _TestUtil.alwaysPostingsFormat(new NestedPulsingPostingsFormat());
MockDirectoryWrapper dir = newDirectory(); BaseDirectoryWrapper dir = newDirectory();
dir.setCheckIndexOnClose(false); // will do this ourselves, custom codec dir.setCheckIndexOnClose(false); // will do this ourselves, custom codec
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp)); newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));

View File

@ -25,6 +25,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
@ -39,8 +40,10 @@ public class Test2BPostings extends LuceneTestCase {
@Nightly @Nightly
public void test() throws Exception { public void test() throws Exception {
MockDirectoryWrapper dir = newFSDirectory(_TestUtil.getTempDir("2BPostings")); BaseDirectoryWrapper dir = newFSDirectory(_TestUtil.getTempDir("2BPostings"));
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER); if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
dir.setCheckIndexOnClose(false); // don't double-checkindex dir.setCheckIndexOnClose(false); // don't double-checkindex
IndexWriter w = new IndexWriter(dir, IndexWriter w = new IndexWriter(dir,

View File

@ -146,9 +146,11 @@ public class Test2BTerms extends LuceneTestCase {
List<BytesRef> savedTerms = null; List<BytesRef> savedTerms = null;
MockDirectoryWrapper dir = newFSDirectory(_TestUtil.getTempDir("2BTerms")); BaseDirectoryWrapper dir = newFSDirectory(_TestUtil.getTempDir("2BTerms"));
//MockDirectoryWrapper dir = newFSDirectory(new File("/p/lucene/indices/2bindex")); //MockDirectoryWrapper dir = newFSDirectory(new File("/p/lucene/indices/2bindex"));
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER); if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
dir.setCheckIndexOnClose(false); // don't double-checkindex dir.setCheckIndexOnClose(false); // don't double-checkindex
if (true) { if (true) {

View File

@ -51,6 +51,7 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
@ -1168,7 +1169,7 @@ public class TestAddIndexes extends LuceneTestCase {
* simple test that ensures we getting expected exceptions * simple test that ensures we getting expected exceptions
*/ */
public void testAddIndexMissingCodec() throws IOException { public void testAddIndexMissingCodec() throws IOException {
MockDirectoryWrapper toAdd = newDirectory(); BaseDirectoryWrapper toAdd = newDirectory();
// Disable checkIndex, else we get an exception because // Disable checkIndex, else we get an exception because
// of the unregistered codec: // of the unregistered codec:
toAdd.setCheckIndexOnClose(false); toAdd.setCheckIndexOnClose(false);

View File

@ -55,6 +55,7 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
@ -177,7 +178,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
} }
File oldIndxeDir = _TestUtil.getTempDir(unsupportedNames[i]); File oldIndxeDir = _TestUtil.getTempDir(unsupportedNames[i]);
_TestUtil.unzip(getDataFile("unsupported." + unsupportedNames[i] + ".zip"), oldIndxeDir); _TestUtil.unzip(getDataFile("unsupported." + unsupportedNames[i] + ".zip"), oldIndxeDir);
MockDirectoryWrapper dir = newFSDirectory(oldIndxeDir); BaseDirectoryWrapper dir = newFSDirectory(oldIndxeDir);
// don't checkindex, these are intentionally not supported // don't checkindex, these are intentionally not supported
dir.setCheckIndexOnClose(false); dir.setCheckIndexOnClose(false);

View File

@ -23,6 +23,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
@ -67,7 +68,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
// Make sure running BG merges still work fine even when // Make sure running BG merges still work fine even when
// we are hitting exceptions during flushing. // we are hitting exceptions during flushing.
public void testFlushExceptions() throws IOException { public void testFlushExceptions() throws IOException {
MockDirectoryWrapper directory = newDirectory(); MockDirectoryWrapper directory = newMockDirectory();
FailOnlyOnFlush failure = new FailOnlyOnFlush(); FailOnlyOnFlush failure = new FailOnlyOnFlush();
directory.failOn(failure); directory.failOn(failure);
@ -120,7 +121,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
// Test that deletes committed after a merge started and // Test that deletes committed after a merge started and
// before it finishes, are correctly merged back: // before it finishes, are correctly merged back:
public void testDeleteMerging() throws IOException { public void testDeleteMerging() throws IOException {
MockDirectoryWrapper directory = newDirectory(); Directory directory = newDirectory();
LogDocMergePolicy mp = new LogDocMergePolicy(); LogDocMergePolicy mp = new LogDocMergePolicy();
// Force degenerate merging so we can get a mix of // Force degenerate merging so we can get a mix of
@ -164,7 +165,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
} }
public void testNoExtraFiles() throws IOException { public void testNoExtraFiles() throws IOException {
MockDirectoryWrapper directory = newDirectory(); Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())) TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2)); .setMaxBufferedDocs(2));
@ -195,7 +196,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
} }
public void testNoWaitClose() throws IOException { public void testNoWaitClose() throws IOException {
MockDirectoryWrapper directory = newDirectory(); Directory directory = newDirectory();
Document doc = new Document(); Document doc = new Document();
Field idField = newStringField("id", "", Field.Store.YES); Field idField = newStringField("id", "", Field.Store.YES);
doc.add(idField); doc.add(idField);

View File

@ -30,7 +30,7 @@ import org.apache.lucene.document.Document;
public class TestCrash extends LuceneTestCase { public class TestCrash extends LuceneTestCase {
private IndexWriter initIndex(Random random, boolean initialCommit) throws IOException { private IndexWriter initIndex(Random random, boolean initialCommit) throws IOException {
return initIndex(random, newDirectory(random), initialCommit); return initIndex(random, newMockDirectory(random), initialCommit);
} }
private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit) throws IOException { private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit) throws IOException {

View File

@ -29,7 +29,6 @@ import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper; import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LineFileDocs;
@ -44,7 +43,7 @@ public class TestCustomNorms extends LuceneTestCase {
public void testFloatNorms() throws IOException { public void testFloatNorms() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())); new MockAnalyzer(random()));
Similarity provider = new MySimProvider(); Similarity provider = new MySimProvider();
@ -85,7 +84,7 @@ public class TestCustomNorms extends LuceneTestCase {
} }
public void testExceptionOnRandomType() throws IOException { public void testExceptionOnRandomType() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())); new MockAnalyzer(random()));
Similarity provider = new MySimProvider(); Similarity provider = new MySimProvider();

View File

@ -89,7 +89,7 @@ public class TestDocTermOrds extends LuceneTestCase {
} }
public void testRandom() throws Exception { public void testRandom() throws Exception {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
final int NUM_TERMS = atLeast(20); final int NUM_TERMS = atLeast(20);
final Set<BytesRef> terms = new HashSet<BytesRef>(); final Set<BytesRef> terms = new HashSet<BytesRef>();
@ -176,7 +176,7 @@ public class TestDocTermOrds extends LuceneTestCase {
} }
public void testRandomWithPrefix() throws Exception { public void testRandomWithPrefix() throws Exception {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
final Set<String> prefixes = new HashSet<String>(); final Set<String> prefixes = new HashSet<String>();
final int numPrefix = _TestUtil.nextInt(random(), 2, 7); final int numPrefix = _TestUtil.nextInt(random(), 2, 7);

View File

@ -26,7 +26,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
@ -148,7 +148,7 @@ public class TestFilterAtomicReader extends LuceneTestCase {
Directory target = newDirectory(); Directory target = newDirectory();
// We mess with the postings so this can fail: // We mess with the postings so this can fail:
((MockDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false); ((BaseDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexReader reader = new TestReader(DirectoryReader.open(directory)); IndexReader reader = new TestReader(DirectoryReader.open(directory));

View File

@ -231,7 +231,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
final int numDocumentsToIndex = 50 + random().nextInt(50); final int numDocumentsToIndex = 50 + random().nextInt(50);
for (int i = 0; i < numThreads.length; i++) { for (int i = 0; i < numThreads.length; i++) {
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex); AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
// mock a very slow harddisk sometimes here so that flushing is very slow // mock a very slow harddisk sometimes here so that flushing is very slow
dir.setThrottling(MockDirectoryWrapper.Throttling.SOMETIMES); dir.setThrottling(MockDirectoryWrapper.Throttling.SOMETIMES);
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,

View File

@ -37,7 +37,7 @@ public class TestForTooMuchCloning extends LuceneTestCase {
// NOTE: if we see a fail on this test with "NestedPulsing" its because its // NOTE: if we see a fail on this test with "NestedPulsing" its because its
// reuse isnt perfect (but reasonable). see TestPulsingReuse.testNestedPulsing // reuse isnt perfect (but reasonable). see TestPulsingReuse.testNestedPulsing
// for more details // for more details
final MockDirectoryWrapper dir = newDirectory(); final MockDirectoryWrapper dir = newMockDirectory();
final TieredMergePolicy tmp = new TieredMergePolicy(); final TieredMergePolicy tmp = new TieredMergePolicy();
tmp.setMaxMergeAtOnce(2); tmp.setMaxMergeAtOnce(2);
final RandomIndexWriter w = new RandomIndexWriter(random(), dir, final RandomIndexWriter w = new RandomIndexWriter(random(), dir,

View File

@ -39,8 +39,10 @@ import org.apache.lucene.util.LuceneTestCase;
public class TestIndexFileDeleter extends LuceneTestCase { public class TestIndexFileDeleter extends LuceneTestCase {
public void testDeleteLeftoverFiles() throws IOException { public void testDeleteLeftoverFiles() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
dir.setPreventDoubleWrite(false); if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
}
LogMergePolicy mergePolicy = newLogMergePolicy(true, 10); LogMergePolicy mergePolicy = newLogMergePolicy(true, 10);
mergePolicy.setNoCFSRatio(1); // This test expects all of its segments to be in CFS mergePolicy.setNoCFSRatio(1); // This test expects all of its segments to be in CFS

View File

@ -213,7 +213,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testIndexNoDocuments() throws IOException { public void testIndexNoDocuments() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.commit(); writer.commit();
writer.close(); writer.close();
@ -235,7 +235,7 @@ public class TestIndexWriter extends LuceneTestCase {
} }
public void testManyFields() throws IOException { public void testManyFields() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10)); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
for(int j=0;j<100;j++) { for(int j=0;j<100;j++) {
Document doc = new Document(); Document doc = new Document();
@ -265,7 +265,7 @@ public class TestIndexWriter extends LuceneTestCase {
} }
public void testSmallRAMBuffer() throws IOException { public void testSmallRAMBuffer() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter( IndexWriter writer = new IndexWriter(
dir, dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())). newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
@ -405,7 +405,7 @@ public class TestIndexWriter extends LuceneTestCase {
} }
public void testDiverseDocs() throws IOException { public void testDiverseDocs() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.5)); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.5));
int n = atLeast(1); int n = atLeast(1);
for(int i=0;i<n;i++) { for(int i=0;i<n;i++) {
@ -454,7 +454,7 @@ public class TestIndexWriter extends LuceneTestCase {
} }
public void testEnablingNorms() throws IOException { public void testEnablingNorms() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10)); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
// Enable norms for only 1 doc, pre flush // Enable norms for only 1 doc, pre flush
FieldType customType = new FieldType(TextField.TYPE_STORED); FieldType customType = new FieldType(TextField.TYPE_STORED);
@ -510,7 +510,7 @@ public class TestIndexWriter extends LuceneTestCase {
} }
public void testHighFreqTerm() throws IOException { public void testHighFreqTerm() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.01)); TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.01));
// Massive doc that has 128 K a's // Massive doc that has 128 K a's

View File

@ -93,7 +93,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
* and add docs to it. * and add docs to it.
*/ */
public void testCommitOnCloseAbort() throws IOException { public void testCommitOnCloseAbort() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10)); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
for (int i = 0; i < 14; i++) { for (int i = 0; i < 14; i++) {
TestIndexWriter.addDoc(writer); TestIndexWriter.addDoc(writer);
@ -139,7 +139,9 @@ public class TestIndexWriterCommit extends LuceneTestCase {
// On abort, writer in fact may write to the same // On abort, writer in fact may write to the same
// segments_N file: // segments_N file:
dir.setPreventDoubleWrite(false); if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
}
for(int i=0;i<12;i++) { for(int i=0;i<12;i++) {
for(int j=0;j<17;j++) { for(int j=0;j<17;j++) {
@ -179,7 +181,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
final String idFormat = _TestUtil.getPostingsFormat("id"); final String idFormat = _TestUtil.getPostingsFormat("id");
final String contentFormat = _TestUtil.getPostingsFormat("content"); final String contentFormat = _TestUtil.getPostingsFormat("content");
assumeFalse("This test cannot run with Memory codec", idFormat.equals("Memory") || contentFormat.equals("Memory")); assumeFalse("This test cannot run with Memory codec", idFormat.equals("Memory") || contentFormat.equals("Memory"));
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
Analyzer analyzer; Analyzer analyzer;
if (random().nextBoolean()) { if (random().nextBoolean()) {
// no payloads // no payloads
@ -258,11 +260,13 @@ public class TestIndexWriterCommit extends LuceneTestCase {
* and close(). * and close().
*/ */
public void testCommitOnCloseForceMerge() throws IOException { public void testCommitOnCloseForceMerge() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
// Must disable throwing exc on double-write: this // Must disable throwing exc on double-write: this
// test uses IW.rollback which easily results in // test uses IW.rollback which easily results in
// writing to same file more than once // writing to same file more than once
dir.setPreventDoubleWrite(false); if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
}
IndexWriter writer = new IndexWriter( IndexWriter writer = new IndexWriter(
dir, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
@ -543,8 +547,10 @@ public class TestIndexWriterCommit extends LuceneTestCase {
// LUCENE-1274: test writer.prepareCommit() // LUCENE-1274: test writer.prepareCommit()
public void testPrepareCommitRollback() throws IOException { public void testPrepareCommitRollback() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
dir.setPreventDoubleWrite(false); if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
}
IndexWriter writer = new IndexWriter( IndexWriter writer = new IndexWriter(
dir, dir,

View File

@ -426,7 +426,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
int END_COUNT = 144; int END_COUNT = 144;
// First build up a starting index: // First build up a starting index:
MockDirectoryWrapper startDir = newDirectory(); MockDirectoryWrapper startDir = newMockDirectory();
// TODO: find the resource leak that only occurs sometimes here. // TODO: find the resource leak that only occurs sometimes here.
startDir.setNoDeleteOpenFile(false); startDir.setNoDeleteOpenFile(false);
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
@ -689,7 +689,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
"Venice has lots of canals" }; "Venice has lots of canals" };
String[] text = { "Amsterdam", "Venice" }; String[] text = { "Amsterdam", "Venice" };
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy())); TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
@ -814,7 +814,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
"Venice has lots of canals" }; "Venice has lots of canals" };
String[] text = { "Amsterdam", "Venice" }; String[] text = { "Amsterdam", "Venice" };
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
modifier.commit(); modifier.commit();
dir.failOn(failure.reset()); dir.failOn(failure.reset());

View File

@ -36,6 +36,7 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
@ -221,7 +222,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
if (VERBOSE) { if (VERBOSE) {
System.out.println("\nTEST: start testRandomExceptions"); System.out.println("\nTEST: start testRandomExceptions");
} }
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
MockAnalyzer analyzer = new MockAnalyzer(random()); MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases. analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
@ -265,7 +266,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} }
public void testRandomExceptionsThreads() throws Throwable { public void testRandomExceptionsThreads() throws Throwable {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
MockAnalyzer analyzer = new MockAnalyzer(random()); MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases. analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer) MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
@ -556,7 +557,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// LUCENE-1072: make sure an errant exception on flushing // LUCENE-1072: make sure an errant exception on flushing
// one segment only takes out those docs in that one flush // one segment only takes out those docs in that one flush
public void testDocumentsWriterAbort() throws IOException { public void testDocumentsWriterAbort() throws IOException {
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
FailOnlyOnFlush failure = new FailOnlyOnFlush(); FailOnlyOnFlush failure = new FailOnlyOnFlush();
failure.setDoFail(); failure.setDoFail();
dir.failOn(failure); dir.failOn(failure);
@ -597,7 +598,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
if (VERBOSE) { if (VERBOSE) {
System.out.println("TEST: cycle i=" + i); System.out.println("TEST: cycle i=" + i);
} }
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy())); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy()));
// don't allow a sudden merge to clean up the deleted // don't allow a sudden merge to clean up the deleted
@ -692,7 +693,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
final int NUM_ITER = 100; final int NUM_ITER = 100;
for(int i=0;i<2;i++) { for(int i=0;i<2;i++) {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
{ {
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
@ -822,7 +823,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// LUCENE-1044: test exception during sync // LUCENE-1044: test exception during sync
public void testExceptionDuringSync() throws IOException { public void testExceptionDuringSync() throws IOException {
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
FailOnlyInSync failure = new FailOnlyInSync(); FailOnlyInSync failure = new FailOnlyInSync();
dir.failOn(failure); dir.failOn(failure);
@ -908,7 +909,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}; };
for (FailOnlyInCommit failure : failures) { for (FailOnlyInCommit failure : failures) {
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
dir.setFailOnCreateOutput(false); dir.setFailOnCreateOutput(false);
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))); TEST_VERSION_CURRENT, new MockAnalyzer(random())));
@ -1076,7 +1077,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// latest segments file and make sure we get an // latest segments file and make sure we get an
// IOException trying to open the index: // IOException trying to open the index:
public void testSimulatedCorruptIndex1() throws IOException { public void testSimulatedCorruptIndex1() throws IOException {
MockDirectoryWrapper dir = newDirectory(); BaseDirectoryWrapper dir = newDirectory();
dir.setCheckIndexOnClose(false); // we are corrupting it! dir.setCheckIndexOnClose(false); // we are corrupting it!
IndexWriter writer = null; IndexWriter writer = null;
@ -1124,7 +1125,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// files and make sure we get an IOException trying to // files and make sure we get an IOException trying to
// open the index: // open the index:
public void testSimulatedCorruptIndex2() throws IOException { public void testSimulatedCorruptIndex2() throws IOException {
MockDirectoryWrapper dir = newDirectory(); BaseDirectoryWrapper dir = newDirectory();
dir.setCheckIndexOnClose(false); // we are corrupting it! dir.setCheckIndexOnClose(false); // we are corrupting it!
IndexWriter writer = null; IndexWriter writer = null;
@ -1174,8 +1175,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// gracefully fallback to the previous segments file), // gracefully fallback to the previous segments file),
// and that we can add to the index: // and that we can add to the index:
public void testSimulatedCrashedWriter() throws IOException { public void testSimulatedCrashedWriter() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
dir.setPreventDoubleWrite(false); if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
}
IndexWriter writer = null; IndexWriter writer = null;
@ -1240,7 +1243,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
int num = atLeast(1); int num = atLeast(1);
for (int j = 0; j < num; j++) { for (int j = 0; j < num; j++) {
for (FailOnTermVectors failure : failures) { for (FailOnTermVectors failure : failures) {
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))); TEST_VERSION_CURRENT, new MockAnalyzer(random())));
dir.failOn(failure); dir.failOn(failure);

View File

@ -31,7 +31,7 @@ import org.apache.lucene.util._TestUtil;
public class TestIndexWriterForceMerge extends LuceneTestCase { public class TestIndexWriterForceMerge extends LuceneTestCase {
public void testPartialMerge() throws IOException { public void testPartialMerge() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
final Document doc = new Document(); final Document doc = new Document();
doc.add(newStringField("content", "aaa", Field.Store.NO)); doc.add(newStringField("content", "aaa", Field.Store.NO));
@ -72,7 +72,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
} }
public void testMaxNumSegments2() throws IOException { public void testMaxNumSegments2() throws IOException {
MockDirectoryWrapper dir = newDirectory(); Directory dir = newDirectory();
final Document doc = new Document(); final Document doc = new Document();
doc.add(newStringField("content", "aaa", Field.Store.NO)); doc.add(newStringField("content", "aaa", Field.Store.NO));
@ -121,7 +121,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
*/ */
public void testForceMergeTempSpaceUsage() throws IOException { public void testForceMergeTempSpaceUsage() throws IOException {
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy())); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
if (VERBOSE) { if (VERBOSE) {
System.out.println("TEST: config1=" + writer.getConfig()); System.out.println("TEST: config1=" + writer.getConfig());

View File

@ -182,7 +182,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// Now, build a starting index that has START_COUNT docs. We // Now, build a starting index that has START_COUNT docs. We
// will then try to addIndexes into a copy of this: // will then try to addIndexes into a copy of this:
MockDirectoryWrapper startDir = newDirectory(); MockDirectoryWrapper startDir = newMockDirectory();
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for(int j=0;j<START_COUNT;j++) { for(int j=0;j<START_COUNT;j++) {
addDocWithIndex(writer, j); addDocWithIndex(writer, j);
@ -476,7 +476,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// LUCENE-2593 // LUCENE-2593
public void testCorruptionAfterDiskFullDuringMerge() throws IOException { public void testCorruptionAfterDiskFullDuringMerge() throws IOException {
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
//IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setReaderPooling(true)); //IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setReaderPooling(true));
IndexWriter w = new IndexWriter( IndexWriter w = new IndexWriter(
dir, dir,
@ -520,7 +520,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// an IndexWriter (hit during DW.ThreadState.init()) is // an IndexWriter (hit during DW.ThreadState.init()) is
// OK: // OK:
public void testImmediateDiskFull() throws IOException { public void testImmediateDiskFull() throws IOException {
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler())); .setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
dir.setMaxSizeInBytes(Math.max(1, dir.getRecomputedActualSizeInBytes())); dir.setMaxSizeInBytes(Math.max(1, dir.getRecomputedActualSizeInBytes()));

View File

@ -27,9 +27,7 @@ import java.lang.reflect.Method;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.Constants; import org.apache.lucene.util.Constants;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
@ -134,7 +132,7 @@ public class TestIndexWriterOnJRECrash extends TestNRTThreads {
*/ */
public boolean checkIndexes(File file) throws IOException { public boolean checkIndexes(File file) throws IOException {
if (file.isDirectory()) { if (file.isDirectory()) {
MockDirectoryWrapper dir = newFSDirectory(file); BaseDirectoryWrapper dir = newFSDirectory(file);
dir.setCheckIndexOnClose(false); // don't double-checkindex dir.setCheckIndexOnClose(false); // don't double-checkindex
if (DirectoryReader.indexExists(dir)) { if (DirectoryReader.indexExists(dir)) {
if (VERBOSE) { if (VERBOSE) {

View File

@ -708,7 +708,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Stress test reopen during addIndexes // Stress test reopen during addIndexes
public void testDuringAddIndexes() throws Exception { public void testDuringAddIndexes() throws Exception {
MockDirectoryWrapper dir1 = newDirectory(); Directory dir1 = newDirectory();
final IndexWriter writer = new IndexWriter( final IndexWriter writer = new IndexWriter(
dir1, dir1,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())). newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
@ -781,8 +781,10 @@ public class TestIndexWriterReader extends LuceneTestCase {
assertEquals(0, excs.size()); assertEquals(0, excs.size());
r.close(); r.close();
final Collection<String> openDeletedFiles = dir1.getOpenDeletedFiles(); if (dir1 instanceof MockDirectoryWrapper) {
assertEquals("openDeleted=" + openDeletedFiles, 0, openDeletedFiles.size()); final Collection<String> openDeletedFiles = ((MockDirectoryWrapper)dir1).getOpenDeletedFiles();
assertEquals("openDeleted=" + openDeletedFiles, 0, openDeletedFiles.size());
}
writer.close(); writer.close();

View File

@ -31,6 +31,7 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
@ -130,7 +131,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
if (VERBOSE) { if (VERBOSE) {
System.out.println("\nTEST: iter=" + iter); System.out.println("\nTEST: iter=" + iter);
} }
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
IndexWriter writer = new IndexWriter( IndexWriter writer = new IndexWriter(
dir, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
@ -245,7 +246,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
if (VERBOSE) { if (VERBOSE) {
System.out.println("TEST: iter=" + iter); System.out.println("TEST: iter=" + iter);
} }
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
IndexWriter writer = new IndexWriter( IndexWriter writer = new IndexWriter(
dir, dir,
@ -302,7 +303,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
// Runs test, with one thread, using the specific failure // Runs test, with one thread, using the specific failure
// to trigger an IOException // to trigger an IOException
public void _testSingleThreadFailure(MockDirectoryWrapper.Failure failure) throws IOException { public void _testSingleThreadFailure(MockDirectoryWrapper.Failure failure) throws IOException {
MockDirectoryWrapper dir = newDirectory(); MockDirectoryWrapper dir = newMockDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())) IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler())); .setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
@ -435,7 +436,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
// and closes before the second IndexWriter time's out trying to get the Lock, // and closes before the second IndexWriter time's out trying to get the Lock,
// we should see both documents // we should see both documents
public void testOpenTwoIndexWritersOnDifferentThreads() throws IOException, InterruptedException { public void testOpenTwoIndexWritersOnDifferentThreads() throws IOException, InterruptedException {
final MockDirectoryWrapper dir = newDirectory(); final Directory dir = newDirectory();
CountDownLatch oneIWConstructed = new CountDownLatch(1); CountDownLatch oneIWConstructed = new CountDownLatch(1);
DelayedIndexAndCloseRunnable thread1 = new DelayedIndexAndCloseRunnable( DelayedIndexAndCloseRunnable thread1 = new DelayedIndexAndCloseRunnable(
dir, oneIWConstructed); dir, oneIWConstructed);
@ -503,8 +504,10 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
// LUCENE-4147 // LUCENE-4147
public void testRollbackAndCommitWithThreads() throws Exception { public void testRollbackAndCommitWithThreads() throws Exception {
final MockDirectoryWrapper d = newFSDirectory(_TestUtil.getTempDir("RollbackAndCommitWithThreads")); final BaseDirectoryWrapper d = newFSDirectory(_TestUtil.getTempDir("RollbackAndCommitWithThreads"));
d.setPreventDoubleWrite(false); if (d instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)d).setPreventDoubleWrite(false);
}
final int threadCount = _TestUtil.nextInt(random(), 2, 6); final int threadCount = _TestUtil.nextInt(random(), 2, 6);

View File

@ -24,6 +24,7 @@ import java.util.Set;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
@ -35,12 +36,14 @@ public class TestNeverDelete extends LuceneTestCase {
public void testIndexing() throws Exception { public void testIndexing() throws Exception {
final File tmpDir = _TestUtil.getTempDir("TestNeverDelete"); final File tmpDir = _TestUtil.getTempDir("TestNeverDelete");
final MockDirectoryWrapper d = newFSDirectory(tmpDir); final BaseDirectoryWrapper d = newFSDirectory(tmpDir);
// We want to "see" files removed if Lucene removed // We want to "see" files removed if Lucene removed
// them. This is still worth running on Windows since // them. This is still worth running on Windows since
// some files the IR opens and closes. // some files the IR opens and closes.
d.setNoDeleteOpenFile(false); if (d instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)d).setNoDeleteOpenFile(false);
}
final RandomIndexWriter w = new RandomIndexWriter(random(), final RandomIndexWriter w = new RandomIndexWriter(random(),
d, d,
newIndexWriterConfig(TEST_VERSION_CURRENT, newIndexWriterConfig(TEST_VERSION_CURRENT,

View File

@ -35,7 +35,7 @@ public class TestRollingUpdates extends LuceneTestCase {
@Test @Test
public void testRollingUpdates() throws Exception { public void testRollingUpdates() throws Exception {
Random random = new Random(random().nextLong()); Random random = new Random(random().nextLong());
final MockDirectoryWrapper dir = newDirectory(); final BaseDirectoryWrapper dir = newDirectory();
dir.setCheckIndexOnClose(false); // we use a custom codec provider dir.setCheckIndexOnClose(false); // we use a custom codec provider
final LineFileDocs docs = new LineFileDocs(random, true); final LineFileDocs docs = new LineFileDocs(random, true);

View File

@ -76,7 +76,7 @@ public class TestFSTs extends LuceneTestCase {
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
dir = newDirectory(); dir = newMockDirectory();
dir.setPreventDoubleWrite(false); dir.setPreventDoubleWrite(false);
} }
@ -1107,7 +1107,7 @@ public class TestFSTs extends LuceneTestCase {
final int RUN_TIME_MSEC = atLeast(500); final int RUN_TIME_MSEC = atLeast(500);
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(-1).setRAMBufferSizeMB(64); final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(-1).setRAMBufferSizeMB(64);
final File tempDir = _TestUtil.getTempDir("fstlines"); final File tempDir = _TestUtil.getTempDir("fstlines");
final MockDirectoryWrapper dir = newFSDirectory(tempDir); final Directory dir = newFSDirectory(tempDir);
final IndexWriter writer = new IndexWriter(dir, conf); final IndexWriter writer = new IndexWriter(dir, conf);
final long stopTime = System.currentTimeMillis() + RUN_TIME_MSEC; final long stopTime = System.currentTimeMillis() + RUN_TIME_MSEC;
Document doc; Document doc;

View File

@ -37,8 +37,8 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FailOnNonBulkMergesInfoStream; import org.apache.lucene.util.FailOnNonBulkMergesInfoStream;
@ -433,7 +433,7 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas
final LineFileDocs docs = new LineFileDocs(random, true); final LineFileDocs docs = new LineFileDocs(random, true);
final File tempDir = _TestUtil.getTempDir(testName); final File tempDir = _TestUtil.getTempDir(testName);
dir = newFSDirectory(tempDir); dir = newFSDirectory(tempDir);
((MockDirectoryWrapper) dir).setCheckIndexOnClose(false); // don't double-checkIndex, we do it ourselves. ((BaseDirectoryWrapper) dir).setCheckIndexOnClose(false); // don't double-checkIndex, we do it ourselves.
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setInfoStream(new FailOnNonBulkMergesInfoStream()); new MockAnalyzer(random())).setInfoStream(new FailOnNonBulkMergesInfoStream());

View File

@ -0,0 +1,174 @@
package org.apache.lucene.store;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Collection;
import org.apache.lucene.util._TestUtil;
/**
* Calls check index on close.
*/
// do NOT make any methods in this class synchronized, volatile
// do NOT import anything from the concurrency package.
// no randoms, no nothing.
public class BaseDirectoryWrapper extends Directory {
/** our in directory */
protected final Directory delegate;
/** best effort: base on in Directory is volatile */
protected boolean open;
private boolean checkIndexOnClose = true;
private boolean crossCheckTermVectorsOnClose = true;
public BaseDirectoryWrapper(Directory delegate) {
this.delegate = delegate;
}
@Override
public void close() throws IOException {
open = false;
if (checkIndexOnClose && indexPossiblyExists()) {
_TestUtil.checkIndex(this, crossCheckTermVectorsOnClose);
}
delegate.close();
}
public boolean isOpen() {
return open;
}
/**
* don't rely upon DirectoryReader.fileExists to determine if we should
* checkIndex() or not. It might mask real problems, where we silently
* don't checkindex at all. instead we look for a segments file.
*/
protected boolean indexPossiblyExists() {
String files[];
try {
files = listAll();
} catch (IOException ex) {
// this means directory doesn't exist, which is ok. return false
return false;
}
for (String f : files) {
if (f.startsWith("segments_")) {
return true;
}
}
return false;
}
/**
* Set whether or not checkindex should be run
* on close
*/
public void setCheckIndexOnClose(boolean value) {
this.checkIndexOnClose = value;
}
public boolean getCheckIndexOnClose() {
return checkIndexOnClose;
}
public void setCrossCheckTermVectorsOnClose(boolean value) {
this.crossCheckTermVectorsOnClose = value;
}
public boolean getCrossCheckTermVectorsOnClose() {
return crossCheckTermVectorsOnClose;
}
// directory methods: delegate
@Override
public String[] listAll() throws IOException {
return delegate.listAll();
}
@Override
public boolean fileExists(String name) throws IOException {
return delegate.fileExists(name);
}
@Override
public void deleteFile(String name) throws IOException {
delegate.deleteFile(name);
}
@Override
public long fileLength(String name) throws IOException {
return delegate.fileLength(name);
}
@Override
public IndexOutput createOutput(String name, IOContext context) throws IOException {
return delegate.createOutput(name, context);
}
@Override
public void sync(Collection<String> names) throws IOException {
delegate.sync(names);
}
@Override
public IndexInput openInput(String name, IOContext context) throws IOException {
return delegate.openInput(name, context);
}
@Override
public Lock makeLock(String name) {
return delegate.makeLock(name);
}
@Override
public void clearLock(String name) throws IOException {
delegate.clearLock(name);
}
@Override
public void setLockFactory(LockFactory lockFactory) throws IOException {
delegate.setLockFactory(lockFactory);
}
@Override
public LockFactory getLockFactory() {
return delegate.getLockFactory();
}
@Override
public String getLockID() {
return delegate.getLockID();
}
@Override
public String toString() {
return "BaseDirectoryWrapper(" + delegate.toString() + ")";
}
@Override
public void copy(Directory to, String src, String dest, IOContext context) throws IOException {
delegate.copy(to, src, dest, context);
}
@Override
public IndexInputSlicer createSlicer(String name, IOContext context) throws IOException {
return delegate.createSlicer(name, context);
}
}

View File

@ -57,8 +57,7 @@ import org.apache.lucene.util._TestUtil;
* </ul> * </ul>
*/ */
public class MockDirectoryWrapper extends Directory { public class MockDirectoryWrapper extends BaseDirectoryWrapper {
final Directory delegate;
long maxSize; long maxSize;
// Max actual bytes used. This is set by MockRAMOutputStream: // Max actual bytes used. This is set by MockRAMOutputStream:
@ -67,8 +66,6 @@ public class MockDirectoryWrapper extends Directory {
Random randomState; Random randomState;
boolean noDeleteOpenFile = true; boolean noDeleteOpenFile = true;
boolean preventDoubleWrite = true; boolean preventDoubleWrite = true;
boolean checkIndexOnClose = true;
boolean crossCheckTermVectorsOnClose = true;
boolean trackDiskUsage = false; boolean trackDiskUsage = false;
private Set<String> unSyncedFiles; private Set<String> unSyncedFiles;
private Set<String> createdFiles; private Set<String> createdFiles;
@ -109,7 +106,7 @@ public class MockDirectoryWrapper extends Directory {
} }
public MockDirectoryWrapper(Random random, Directory delegate) { public MockDirectoryWrapper(Random random, Directory delegate) {
this.delegate = delegate; super(delegate);
// must make a private random since our methods are // must make a private random since our methods are
// called from different threads; else test failures may // called from different threads; else test failures may
// not be reproducible from the original seed // not be reproducible from the original seed
@ -251,19 +248,19 @@ public class MockDirectoryWrapper extends Directory {
} }
} }
final IndexOutput tempOut = delegate.createOutput(tempFileName, LuceneTestCase.newIOContext(randomState)); final IndexOutput tempOut = delegate.createOutput(tempFileName, LuceneTestCase.newIOContext(randomState));
IndexInput in = delegate.openInput(name, LuceneTestCase.newIOContext(randomState)); IndexInput ii = delegate.openInput(name, LuceneTestCase.newIOContext(randomState));
tempOut.copyBytes(in, in.length()/2); tempOut.copyBytes(ii, ii.length()/2);
tempOut.close(); tempOut.close();
in.close(); ii.close();
// Delete original and copy bytes back: // Delete original and copy bytes back:
deleteFile(name, true); deleteFile(name, true);
final IndexOutput out = delegate.createOutput(name, LuceneTestCase.newIOContext(randomState)); final IndexOutput out = delegate.createOutput(name, LuceneTestCase.newIOContext(randomState));
in = delegate.openInput(tempFileName, LuceneTestCase.newIOContext(randomState)); ii = delegate.openInput(tempFileName, LuceneTestCase.newIOContext(randomState));
out.copyBytes(in, in.length()); out.copyBytes(ii, ii.length());
out.close(); out.close();
in.close(); ii.close();
deleteFile(tempFileName, true); deleteFile(tempFileName, true);
} else if (damage == 3) { } else if (damage == 3) {
// The file survived intact: // The file survived intact:
@ -316,26 +313,6 @@ public class MockDirectoryWrapper extends Directory {
return noDeleteOpenFile; return noDeleteOpenFile;
} }
/**
* Set whether or not checkindex should be run
* on close
*/
public void setCheckIndexOnClose(boolean value) {
this.checkIndexOnClose = value;
}
public boolean getCheckIndexOnClose() {
return checkIndexOnClose;
}
public void setCrossCheckTermVectorsOnClose(boolean value) {
this.crossCheckTermVectorsOnClose = value;
}
public boolean getCrossCheckTermVectorsOnClose() {
return crossCheckTermVectorsOnClose;
}
/** /**
* If 0.0, no exceptions will be thrown. Else this should * If 0.0, no exceptions will be thrown. Else this should
* be a double 0.0 - 1.0. We will randomly throw an * be a double 0.0 - 1.0. We will randomly throw an
@ -575,8 +552,8 @@ public class MockDirectoryWrapper extends Directory {
throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open locks: " + openLocks); throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open locks: " + openLocks);
} }
open = false; open = false;
if (checkIndexOnClose) { if (getCheckIndexOnClose()) {
if (indexPossiblyExists(this)) { if (indexPossiblyExists()) {
if (LuceneTestCase.VERBOSE) { if (LuceneTestCase.VERBOSE) {
System.out.println("\nNOTE: MockDirectoryWrapper: now crash"); System.out.println("\nNOTE: MockDirectoryWrapper: now crash");
} }
@ -584,7 +561,7 @@ public class MockDirectoryWrapper extends Directory {
if (LuceneTestCase.VERBOSE) { if (LuceneTestCase.VERBOSE) {
System.out.println("\nNOTE: MockDirectoryWrapper: now run CheckIndex"); System.out.println("\nNOTE: MockDirectoryWrapper: now run CheckIndex");
} }
_TestUtil.checkIndex(this, crossCheckTermVectorsOnClose); _TestUtil.checkIndex(this, getCrossCheckTermVectorsOnClose());
if (assertNoUnreferencedFilesOnClose) { if (assertNoUnreferencedFilesOnClose) {
// now look for unreferenced files: // now look for unreferenced files:
@ -612,26 +589,6 @@ public class MockDirectoryWrapper extends Directory {
} }
delegate.close(); delegate.close();
} }
/** don't rely upon DirectoryReader.fileExists to determine if we should
* checkIndex() or not. It might mask real problems, where we silently
* don't checkindex at all. instead we look for a segments file.
*/
private boolean indexPossiblyExists(Directory d) {
String files[];
try {
files = d.listAll();
} catch (IOException ex) {
// this means directory doesn't exist, which is ok. return false
return false;
}
for (String f : files) {
if (f.startsWith("segments_")) {
return true;
}
}
return false;
}
synchronized void removeOpenFile(Closeable c, String name) { synchronized void removeOpenFile(Closeable c, String name) {
Integer v = openFiles.get(name); Integer v = openFiles.get(name);
@ -658,8 +615,7 @@ public class MockDirectoryWrapper extends Directory {
removeOpenFile(in, name); removeOpenFile(in, name);
} }
boolean open = true; @Override
public synchronized boolean isOpen() { public synchronized boolean isOpen() {
return open; return open;
} }

View File

@ -2,6 +2,7 @@ package org.apache.lucene.util;
import java.io.Closeable; import java.io.Closeable;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.junit.Assert; import org.junit.Assert;
@ -23,15 +24,15 @@ import org.junit.Assert;
*/ */
/** /**
* Attempts to close a {@link MockDirectoryWrapper}. * Attempts to close a {@link BaseDirectoryWrapper}.
* *
* @see LuceneTestCase#newDirectory(java.util.Random) * @see LuceneTestCase#newDirectory(java.util.Random)
*/ */
final class CloseableDirectory implements Closeable { final class CloseableDirectory implements Closeable {
private final MockDirectoryWrapper dir; private final BaseDirectoryWrapper dir;
private final TestRuleMarkFailure failureMarker; private final TestRuleMarkFailure failureMarker;
public CloseableDirectory(MockDirectoryWrapper dir, public CloseableDirectory(BaseDirectoryWrapper dir,
TestRuleMarkFailure failureMarker) { TestRuleMarkFailure failureMarker) {
this.dir = dir; this.dir = dir;
this.failureMarker = failureMarker; this.failureMarker = failureMarker;

View File

@ -780,48 +780,48 @@ public abstract class LuceneTestCase extends Assert {
* Returns a new Directory instance. Use this when the test does not * Returns a new Directory instance. Use this when the test does not
* care about the specific Directory implementation (most tests). * care about the specific Directory implementation (most tests).
* <p> * <p>
* The Directory is wrapped with {@link MockDirectoryWrapper}. * The Directory is wrapped with {@link BaseDirectoryWrapper}.
* By default this means it will be picky, such as ensuring that you * this means usually it will be picky, such as ensuring that you
* properly close it and all open files in your test. It will emulate * properly close it and all open files in your test. It will emulate
* some features of Windows, such as not allowing open files to be * some features of Windows, such as not allowing open files to be
* overwritten. * overwritten.
*/ */
public static MockDirectoryWrapper newDirectory() { public static BaseDirectoryWrapper newDirectory() {
return newDirectory(random()); return newDirectory(random());
} }
/** /**
* Returns a new Directory instance, using the specified random. * Returns a new Directory instance, using the specified random.
* See {@link #newDirectory()} for more information. * See {@link #newDirectory()} for more information.
*/ */
public static MockDirectoryWrapper newDirectory(Random r) { public static BaseDirectoryWrapper newDirectory(Random r) {
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY); return wrapDirectory(r, newDirectoryImpl(r, TEST_DIRECTORY), rarely(r));
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, maybeNRTWrap(r, impl)); }
closeAfterSuite(new CloseableDirectory(dir, suiteFailureMarker));
dir.setThrottling(TEST_THROTTLING); public static MockDirectoryWrapper newMockDirectory() {
if (VERBOSE) { return newMockDirectory(random());
System.out.println("NOTE: LuceneTestCase.newDirectory: returning " + dir); }
}
return dir; public static MockDirectoryWrapper newMockDirectory(Random r) {
} return (MockDirectoryWrapper) wrapDirectory(r, newDirectoryImpl(r, TEST_DIRECTORY), false);
}
/** /**
* Returns a new Directory instance, with contents copied from the * Returns a new Directory instance, with contents copied from the
* provided directory. See {@link #newDirectory()} for more * provided directory. See {@link #newDirectory()} for more
* information. * information.
*/ */
public static MockDirectoryWrapper newDirectory(Directory d) throws IOException { public static BaseDirectoryWrapper newDirectory(Directory d) throws IOException {
return newDirectory(random(), d); return newDirectory(random(), d);
} }
/** Returns a new FSDirectory instance over the given file, which must be a folder. */ /** Returns a new FSDirectory instance over the given file, which must be a folder. */
public static MockDirectoryWrapper newFSDirectory(File f) { public static BaseDirectoryWrapper newFSDirectory(File f) {
return newFSDirectory(f, null); return newFSDirectory(f, null);
} }
/** Returns a new FSDirectory instance over the given file, which must be a folder. */ /** Returns a new FSDirectory instance over the given file, which must be a folder. */
public static MockDirectoryWrapper newFSDirectory(File f, LockFactory lf) { public static BaseDirectoryWrapper newFSDirectory(File f, LockFactory lf) {
String fsdirClass = TEST_DIRECTORY; String fsdirClass = TEST_DIRECTORY;
if (fsdirClass.equals("random")) { if (fsdirClass.equals("random")) {
fsdirClass = RandomPicks.randomFrom(random(), FS_DIRECTORIES); fsdirClass = RandomPicks.randomFrom(random(), FS_DIRECTORIES);
@ -838,14 +838,11 @@ public abstract class LuceneTestCase extends Assert {
} }
Directory fsdir = newFSDirectoryImpl(clazz, f); Directory fsdir = newFSDirectoryImpl(clazz, f);
MockDirectoryWrapper dir = new MockDirectoryWrapper( BaseDirectoryWrapper wrapped = wrapDirectory(random(), fsdir, rarely());
random(), maybeNRTWrap(random(), fsdir));
if (lf != null) { if (lf != null) {
dir.setLockFactory(lf); wrapped.setLockFactory(lf);
} }
closeAfterSuite(new CloseableDirectory(dir, suiteFailureMarker)); return wrapped;
dir.setThrottling(TEST_THROTTLING);
return dir;
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -856,22 +853,27 @@ public abstract class LuceneTestCase extends Assert {
* with contents copied from the provided directory. See * with contents copied from the provided directory. See
* {@link #newDirectory()} for more information. * {@link #newDirectory()} for more information.
*/ */
public static MockDirectoryWrapper newDirectory(Random r, Directory d) throws IOException { public static BaseDirectoryWrapper newDirectory(Random r, Directory d) throws IOException {
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY); Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
for (String file : d.listAll()) { for (String file : d.listAll()) {
d.copy(impl, file, file, newIOContext(r)); d.copy(impl, file, file, newIOContext(r));
} }
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, maybeNRTWrap(r, impl)); return wrapDirectory(r, impl, rarely(r));
closeAfterSuite(new CloseableDirectory(dir, suiteFailureMarker));
dir.setThrottling(TEST_THROTTLING);
return dir;
} }
private static Directory maybeNRTWrap(Random random, Directory directory) { private static BaseDirectoryWrapper wrapDirectory(Random random, Directory directory, boolean bare) {
if (rarely(random)) { if (rarely(random)) {
return new NRTCachingDirectory(directory, random.nextDouble(), random.nextDouble()); directory = new NRTCachingDirectory(directory, random.nextDouble(), random.nextDouble());
}
if (bare) {
BaseDirectoryWrapper base = new BaseDirectoryWrapper(directory);
closeAfterSuite(new CloseableDirectory(base, suiteFailureMarker));
return base;
} else { } else {
return directory; MockDirectoryWrapper mock = new MockDirectoryWrapper(random, directory);
mock.setThrottling(TEST_THROTTLING);
closeAfterSuite(new CloseableDirectory(mock, suiteFailureMarker));
return mock;
} }
} }

View File

@ -31,12 +31,14 @@ public class MockDirectoryFactory extends CachingDirectoryFactory {
@Override @Override
protected Directory create(String path) throws IOException { protected Directory create(String path) throws IOException {
MockDirectoryWrapper dir = LuceneTestCase.newDirectory(); Directory dir = LuceneTestCase.newDirectory();
// Somehow removing unref'd files in Solr tests causes // Somehow removing unref'd files in Solr tests causes
// problems... there's some interaction w/ // problems... there's some interaction w/
// CachingDirectoryFactory. Once we track down where Solr // CachingDirectoryFactory. Once we track down where Solr
// isn't closing an IW, we can re-enable this: // isn't closing an IW, we can re-enable this:
dir.setAssertNoUnrefencedFilesOnClose(false); if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setAssertNoUnrefencedFilesOnClose(false);
}
return dir; return dir;
} }

View File

@ -31,12 +31,14 @@ public class MockFSDirectoryFactory extends CachingDirectoryFactory {
@Override @Override
public Directory create(String path) throws IOException { public Directory create(String path) throws IOException {
MockDirectoryWrapper dir = LuceneTestCase.newFSDirectory(new File(path)); Directory dir = LuceneTestCase.newFSDirectory(new File(path));
// Somehow removing unref'd files in Solr tests causes // Somehow removing unref'd files in Solr tests causes
// problems... there's some interaction w/ // problems... there's some interaction w/
// CachingDirectoryFactory. Once we track down where Solr // CachingDirectoryFactory. Once we track down where Solr
// isn't closing an IW, we can re-enable this: // isn't closing an IW, we can re-enable this:
dir.setAssertNoUnrefencedFilesOnClose(false); if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setAssertNoUnrefencedFilesOnClose(false);
}
return dir; return dir;
} }
} }