mirror of https://github.com/apache/lucene.git
LUCENE-4243: MockDirectoryWrapper synchronizes too much
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1363971 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0ac72088e8
commit
22b5c84800
|
@ -29,7 +29,6 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -45,7 +44,7 @@ public class AddIndexesTaskTest extends BenchmarkTestCase {
|
|||
|
||||
// create a dummy index under inputDir
|
||||
inputDir = new File(testDir, "input");
|
||||
MockDirectoryWrapper tmpDir = newFSDirectory(inputDir);
|
||||
Directory tmpDir = newFSDirectory(inputDir);
|
||||
try {
|
||||
IndexWriter writer = new IndexWriter(tmpDir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
|
|
|
@ -59,7 +59,7 @@ public class TestExternalCodecs extends LuceneTestCase {
|
|||
System.out.println("TEST: NUM_DOCS=" + NUM_DOCS);
|
||||
}
|
||||
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
BaseDirectoryWrapper dir = newDirectory();
|
||||
dir.setCheckIndexOnClose(false); // we use a custom codec provider
|
||||
IndexWriter w = new IndexWriter(
|
||||
dir,
|
||||
|
|
|
@ -85,7 +85,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testSubclassConcurrentMergeScheduler() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
dir.failOn(new FailOnlyOnMerge());
|
||||
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.apache.lucene.index.MultiFields;
|
|||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
@ -54,7 +55,7 @@ public class Test10KPulsings extends LuceneTestCase {
|
|||
Codec cp = _TestUtil.alwaysPostingsFormat(new Pulsing40PostingsFormat(1));
|
||||
|
||||
File f = _TestUtil.getTempDir("10kpulsed");
|
||||
MockDirectoryWrapper dir = newFSDirectory(f);
|
||||
BaseDirectoryWrapper dir = newFSDirectory(f);
|
||||
dir.setCheckIndexOnClose(false); // we do this ourselves explicitly
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
|
||||
|
@ -105,7 +106,7 @@ public class Test10KPulsings extends LuceneTestCase {
|
|||
Codec cp = _TestUtil.alwaysPostingsFormat(new Pulsing40PostingsFormat(freqCutoff));
|
||||
|
||||
File f = _TestUtil.getTempDir("10knotpulsed");
|
||||
MockDirectoryWrapper dir = newFSDirectory(f);
|
||||
BaseDirectoryWrapper dir = newFSDirectory(f);
|
||||
dir.setCheckIndexOnClose(false); // we do this ourselves explicitly
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.index.DocsAndPositionsEnum;
|
|||
import org.apache.lucene.index.DocsEnum;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -84,7 +85,7 @@ public class TestPulsingReuse extends LuceneTestCase {
|
|||
public void testNestedPulsing() throws Exception {
|
||||
// we always run this test with pulsing codec.
|
||||
Codec cp = _TestUtil.alwaysPostingsFormat(new NestedPulsingPostingsFormat());
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
BaseDirectoryWrapper dir = newDirectory();
|
||||
dir.setCheckIndexOnClose(false); // will do this ourselves, custom codec
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
@ -39,8 +40,10 @@ public class Test2BPostings extends LuceneTestCase {
|
|||
|
||||
@Nightly
|
||||
public void test() throws Exception {
|
||||
MockDirectoryWrapper dir = newFSDirectory(_TestUtil.getTempDir("2BPostings"));
|
||||
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
||||
BaseDirectoryWrapper dir = newFSDirectory(_TestUtil.getTempDir("2BPostings"));
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
||||
}
|
||||
dir.setCheckIndexOnClose(false); // don't double-checkindex
|
||||
|
||||
IndexWriter w = new IndexWriter(dir,
|
||||
|
|
|
@ -146,9 +146,11 @@ public class Test2BTerms extends LuceneTestCase {
|
|||
|
||||
List<BytesRef> savedTerms = null;
|
||||
|
||||
MockDirectoryWrapper dir = newFSDirectory(_TestUtil.getTempDir("2BTerms"));
|
||||
BaseDirectoryWrapper dir = newFSDirectory(_TestUtil.getTempDir("2BTerms"));
|
||||
//MockDirectoryWrapper dir = newFSDirectory(new File("/p/lucene/indices/2bindex"));
|
||||
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
|
||||
}
|
||||
dir.setCheckIndexOnClose(false); // don't double-checkindex
|
||||
|
||||
if (true) {
|
||||
|
|
|
@ -51,6 +51,7 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
|||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
@ -1168,7 +1169,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
* simple test that ensures we getting expected exceptions
|
||||
*/
|
||||
public void testAddIndexMissingCodec() throws IOException {
|
||||
MockDirectoryWrapper toAdd = newDirectory();
|
||||
BaseDirectoryWrapper toAdd = newDirectory();
|
||||
// Disable checkIndex, else we get an exception because
|
||||
// of the unregistered codec:
|
||||
toAdd.setCheckIndexOnClose(false);
|
||||
|
|
|
@ -55,6 +55,7 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
@ -177,7 +178,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
File oldIndxeDir = _TestUtil.getTempDir(unsupportedNames[i]);
|
||||
_TestUtil.unzip(getDataFile("unsupported." + unsupportedNames[i] + ".zip"), oldIndxeDir);
|
||||
MockDirectoryWrapper dir = newFSDirectory(oldIndxeDir);
|
||||
BaseDirectoryWrapper dir = newFSDirectory(oldIndxeDir);
|
||||
// don't checkindex, these are intentionally not supported
|
||||
dir.setCheckIndexOnClose(false);
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
|
@ -67,7 +68,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
// Make sure running BG merges still work fine even when
|
||||
// we are hitting exceptions during flushing.
|
||||
public void testFlushExceptions() throws IOException {
|
||||
MockDirectoryWrapper directory = newDirectory();
|
||||
MockDirectoryWrapper directory = newMockDirectory();
|
||||
FailOnlyOnFlush failure = new FailOnlyOnFlush();
|
||||
directory.failOn(failure);
|
||||
|
||||
|
@ -120,7 +121,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
// Test that deletes committed after a merge started and
|
||||
// before it finishes, are correctly merged back:
|
||||
public void testDeleteMerging() throws IOException {
|
||||
MockDirectoryWrapper directory = newDirectory();
|
||||
Directory directory = newDirectory();
|
||||
|
||||
LogDocMergePolicy mp = new LogDocMergePolicy();
|
||||
// Force degenerate merging so we can get a mix of
|
||||
|
@ -164,7 +165,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testNoExtraFiles() throws IOException {
|
||||
MockDirectoryWrapper directory = newDirectory();
|
||||
Directory directory = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(2));
|
||||
|
@ -195,7 +196,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testNoWaitClose() throws IOException {
|
||||
MockDirectoryWrapper directory = newDirectory();
|
||||
Directory directory = newDirectory();
|
||||
Document doc = new Document();
|
||||
Field idField = newStringField("id", "", Field.Store.YES);
|
||||
doc.add(idField);
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.lucene.document.Document;
|
|||
public class TestCrash extends LuceneTestCase {
|
||||
|
||||
private IndexWriter initIndex(Random random, boolean initialCommit) throws IOException {
|
||||
return initIndex(random, newDirectory(random), initialCommit);
|
||||
return initIndex(random, newMockDirectory(random), initialCommit);
|
||||
}
|
||||
|
||||
private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit) throws IOException {
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.lucene.search.similarities.DefaultSimilarity;
|
|||
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LineFileDocs;
|
||||
|
@ -44,7 +43,7 @@ public class TestCustomNorms extends LuceneTestCase {
|
|||
|
||||
public void testFloatNorms() throws IOException {
|
||||
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random()));
|
||||
Similarity provider = new MySimProvider();
|
||||
|
@ -85,7 +84,7 @@ public class TestCustomNorms extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testExceptionOnRandomType() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random()));
|
||||
Similarity provider = new MySimProvider();
|
||||
|
|
|
@ -89,7 +89,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testRandom() throws Exception {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
|
||||
final int NUM_TERMS = atLeast(20);
|
||||
final Set<BytesRef> terms = new HashSet<BytesRef>();
|
||||
|
@ -176,7 +176,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testRandomWithPrefix() throws Exception {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
|
||||
final Set<String> prefixes = new HashSet<String>();
|
||||
final int numPrefix = _TestUtil.nextInt(random(), 2, 7);
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -148,7 +148,7 @@ public class TestFilterAtomicReader extends LuceneTestCase {
|
|||
Directory target = newDirectory();
|
||||
|
||||
// We mess with the postings so this can fail:
|
||||
((MockDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
|
||||
((BaseDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
|
||||
|
||||
writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
|
||||
IndexReader reader = new TestReader(DirectoryReader.open(directory));
|
||||
|
|
|
@ -231,7 +231,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
|
|||
final int numDocumentsToIndex = 50 + random().nextInt(50);
|
||||
for (int i = 0; i < numThreads.length; i++) {
|
||||
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
// mock a very slow harddisk sometimes here so that flushing is very slow
|
||||
dir.setThrottling(MockDirectoryWrapper.Throttling.SOMETIMES);
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
|
|
|
@ -37,7 +37,7 @@ public class TestForTooMuchCloning extends LuceneTestCase {
|
|||
// NOTE: if we see a fail on this test with "NestedPulsing" its because its
|
||||
// reuse isnt perfect (but reasonable). see TestPulsingReuse.testNestedPulsing
|
||||
// for more details
|
||||
final MockDirectoryWrapper dir = newDirectory();
|
||||
final MockDirectoryWrapper dir = newMockDirectory();
|
||||
final TieredMergePolicy tmp = new TieredMergePolicy();
|
||||
tmp.setMaxMergeAtOnce(2);
|
||||
final RandomIndexWriter w = new RandomIndexWriter(random(), dir,
|
||||
|
|
|
@ -39,8 +39,10 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
|
||||
public void testDeleteLeftoverFiles() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
dir.setPreventDoubleWrite(false);
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
|
||||
}
|
||||
|
||||
LogMergePolicy mergePolicy = newLogMergePolicy(true, 10);
|
||||
mergePolicy.setNoCFSRatio(1); // This test expects all of its segments to be in CFS
|
||||
|
|
|
@ -213,7 +213,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
|
||||
public void testIndexNoDocuments() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
|
||||
writer.commit();
|
||||
writer.close();
|
||||
|
@ -235,7 +235,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testManyFields() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
|
||||
for(int j=0;j<100;j++) {
|
||||
Document doc = new Document();
|
||||
|
@ -265,7 +265,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testSmallRAMBuffer() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
|
||||
|
@ -405,7 +405,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testDiverseDocs() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.5));
|
||||
int n = atLeast(1);
|
||||
for(int i=0;i<n;i++) {
|
||||
|
@ -454,7 +454,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testEnablingNorms() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
|
||||
// Enable norms for only 1 doc, pre flush
|
||||
FieldType customType = new FieldType(TextField.TYPE_STORED);
|
||||
|
@ -510,7 +510,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testHighFreqTerm() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.01));
|
||||
// Massive doc that has 128 K a's
|
||||
|
|
|
@ -93,7 +93,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
* and add docs to it.
|
||||
*/
|
||||
public void testCommitOnCloseAbort() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
|
||||
for (int i = 0; i < 14; i++) {
|
||||
TestIndexWriter.addDoc(writer);
|
||||
|
@ -139,7 +139,9 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
|
||||
// On abort, writer in fact may write to the same
|
||||
// segments_N file:
|
||||
dir.setPreventDoubleWrite(false);
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
|
||||
}
|
||||
|
||||
for(int i=0;i<12;i++) {
|
||||
for(int j=0;j<17;j++) {
|
||||
|
@ -179,7 +181,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
final String idFormat = _TestUtil.getPostingsFormat("id");
|
||||
final String contentFormat = _TestUtil.getPostingsFormat("content");
|
||||
assumeFalse("This test cannot run with Memory codec", idFormat.equals("Memory") || contentFormat.equals("Memory"));
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
Analyzer analyzer;
|
||||
if (random().nextBoolean()) {
|
||||
// no payloads
|
||||
|
@ -258,11 +260,13 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
* and close().
|
||||
*/
|
||||
public void testCommitOnCloseForceMerge() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
// Must disable throwing exc on double-write: this
|
||||
// test uses IW.rollback which easily results in
|
||||
// writing to same file more than once
|
||||
dir.setPreventDoubleWrite(false);
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
|
||||
|
@ -543,8 +547,10 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
|
||||
// LUCENE-1274: test writer.prepareCommit()
|
||||
public void testPrepareCommitRollback() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
dir.setPreventDoubleWrite(false);
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
|
||||
}
|
||||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
|
|
|
@ -426,7 +426,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
int END_COUNT = 144;
|
||||
|
||||
// First build up a starting index:
|
||||
MockDirectoryWrapper startDir = newDirectory();
|
||||
MockDirectoryWrapper startDir = newMockDirectory();
|
||||
// TODO: find the resource leak that only occurs sometimes here.
|
||||
startDir.setNoDeleteOpenFile(false);
|
||||
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
|
@ -689,7 +689,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
"Venice has lots of canals" };
|
||||
String[] text = { "Amsterdam", "Venice" };
|
||||
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
|
@ -814,7 +814,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
"Venice has lots of canals" };
|
||||
String[] text = { "Amsterdam", "Venice" };
|
||||
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
modifier.commit();
|
||||
dir.failOn(failure.reset());
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
|||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
|
@ -221,7 +222,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
if (VERBOSE) {
|
||||
System.out.println("\nTEST: start testRandomExceptions");
|
||||
}
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
|
||||
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
|
||||
|
@ -265,7 +266,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testRandomExceptionsThreads() throws Throwable {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
|
||||
|
@ -556,7 +557,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
// LUCENE-1072: make sure an errant exception on flushing
|
||||
// one segment only takes out those docs in that one flush
|
||||
public void testDocumentsWriterAbort() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
FailOnlyOnFlush failure = new FailOnlyOnFlush();
|
||||
failure.setDoFail();
|
||||
dir.failOn(failure);
|
||||
|
@ -597,7 +598,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
if (VERBOSE) {
|
||||
System.out.println("TEST: cycle i=" + i);
|
||||
}
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
// don't allow a sudden merge to clean up the deleted
|
||||
|
@ -692,7 +693,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
final int NUM_ITER = 100;
|
||||
|
||||
for(int i=0;i<2;i++) {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
|
||||
{
|
||||
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
|
@ -822,7 +823,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
// LUCENE-1044: test exception during sync
|
||||
public void testExceptionDuringSync() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
FailOnlyInSync failure = new FailOnlyInSync();
|
||||
dir.failOn(failure);
|
||||
|
||||
|
@ -908,7 +909,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
};
|
||||
|
||||
for (FailOnlyInCommit failure : failures) {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
dir.setFailOnCreateOutput(false);
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
|
||||
|
@ -1076,7 +1077,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
// latest segments file and make sure we get an
|
||||
// IOException trying to open the index:
|
||||
public void testSimulatedCorruptIndex1() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
BaseDirectoryWrapper dir = newDirectory();
|
||||
dir.setCheckIndexOnClose(false); // we are corrupting it!
|
||||
|
||||
IndexWriter writer = null;
|
||||
|
@ -1124,7 +1125,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
// files and make sure we get an IOException trying to
|
||||
// open the index:
|
||||
public void testSimulatedCorruptIndex2() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
BaseDirectoryWrapper dir = newDirectory();
|
||||
dir.setCheckIndexOnClose(false); // we are corrupting it!
|
||||
IndexWriter writer = null;
|
||||
|
||||
|
@ -1174,8 +1175,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
// gracefully fallback to the previous segments file),
|
||||
// and that we can add to the index:
|
||||
public void testSimulatedCrashedWriter() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
dir.setPreventDoubleWrite(false);
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
|
||||
}
|
||||
|
||||
IndexWriter writer = null;
|
||||
|
||||
|
@ -1240,7 +1243,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
int num = atLeast(1);
|
||||
for (int j = 0; j < num; j++) {
|
||||
for (FailOnTermVectors failure : failures) {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
|
||||
dir.failOn(failure);
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.lucene.util._TestUtil;
|
|||
public class TestIndexWriterForceMerge extends LuceneTestCase {
|
||||
public void testPartialMerge() throws IOException {
|
||||
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
|
||||
final Document doc = new Document();
|
||||
doc.add(newStringField("content", "aaa", Field.Store.NO));
|
||||
|
@ -72,7 +72,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testMaxNumSegments2() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
Directory dir = newDirectory();
|
||||
|
||||
final Document doc = new Document();
|
||||
doc.add(newStringField("content", "aaa", Field.Store.NO));
|
||||
|
@ -121,7 +121,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
|
|||
*/
|
||||
public void testForceMergeTempSpaceUsage() throws IOException {
|
||||
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: config1=" + writer.getConfig());
|
||||
|
|
|
@ -182,7 +182,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
|
||||
// Now, build a starting index that has START_COUNT docs. We
|
||||
// will then try to addIndexes into a copy of this:
|
||||
MockDirectoryWrapper startDir = newDirectory();
|
||||
MockDirectoryWrapper startDir = newMockDirectory();
|
||||
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
|
||||
for(int j=0;j<START_COUNT;j++) {
|
||||
addDocWithIndex(writer, j);
|
||||
|
@ -476,7 +476,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
|
||||
// LUCENE-2593
|
||||
public void testCorruptionAfterDiskFullDuringMerge() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
//IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setReaderPooling(true));
|
||||
IndexWriter w = new IndexWriter(
|
||||
dir,
|
||||
|
@ -520,7 +520,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
// an IndexWriter (hit during DW.ThreadState.init()) is
|
||||
// OK:
|
||||
public void testImmediateDiskFull() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
dir.setMaxSizeInBytes(Math.max(1, dir.getRecomputedActualSizeInBytes()));
|
||||
|
|
|
@ -27,9 +27,7 @@ import java.lang.reflect.Method;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
|
@ -134,7 +132,7 @@ public class TestIndexWriterOnJRECrash extends TestNRTThreads {
|
|||
*/
|
||||
public boolean checkIndexes(File file) throws IOException {
|
||||
if (file.isDirectory()) {
|
||||
MockDirectoryWrapper dir = newFSDirectory(file);
|
||||
BaseDirectoryWrapper dir = newFSDirectory(file);
|
||||
dir.setCheckIndexOnClose(false); // don't double-checkindex
|
||||
if (DirectoryReader.indexExists(dir)) {
|
||||
if (VERBOSE) {
|
||||
|
|
|
@ -708,7 +708,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
// Stress test reopen during addIndexes
|
||||
public void testDuringAddIndexes() throws Exception {
|
||||
MockDirectoryWrapper dir1 = newDirectory();
|
||||
Directory dir1 = newDirectory();
|
||||
final IndexWriter writer = new IndexWriter(
|
||||
dir1,
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
|
||||
|
@ -781,8 +781,10 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
assertEquals(0, excs.size());
|
||||
r.close();
|
||||
final Collection<String> openDeletedFiles = dir1.getOpenDeletedFiles();
|
||||
assertEquals("openDeleted=" + openDeletedFiles, 0, openDeletedFiles.size());
|
||||
if (dir1 instanceof MockDirectoryWrapper) {
|
||||
final Collection<String> openDeletedFiles = ((MockDirectoryWrapper)dir1).getOpenDeletedFiles();
|
||||
assertEquals("openDeleted=" + openDeletedFiles, 0, openDeletedFiles.size());
|
||||
}
|
||||
|
||||
writer.close();
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.lucene.document.FieldType;
|
|||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
@ -130,7 +131,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
|||
if (VERBOSE) {
|
||||
System.out.println("\nTEST: iter=" + iter);
|
||||
}
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
|
||||
|
@ -245,7 +246,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
|||
if (VERBOSE) {
|
||||
System.out.println("TEST: iter=" + iter);
|
||||
}
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
|
@ -302,7 +303,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
|||
// Runs test, with one thread, using the specific failure
|
||||
// to trigger an IOException
|
||||
public void _testSingleThreadFailure(MockDirectoryWrapper.Failure failure) throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
|
@ -435,7 +436,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
|||
// and closes before the second IndexWriter time's out trying to get the Lock,
|
||||
// we should see both documents
|
||||
public void testOpenTwoIndexWritersOnDifferentThreads() throws IOException, InterruptedException {
|
||||
final MockDirectoryWrapper dir = newDirectory();
|
||||
final Directory dir = newDirectory();
|
||||
CountDownLatch oneIWConstructed = new CountDownLatch(1);
|
||||
DelayedIndexAndCloseRunnable thread1 = new DelayedIndexAndCloseRunnable(
|
||||
dir, oneIWConstructed);
|
||||
|
@ -503,8 +504,10 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
|||
|
||||
// LUCENE-4147
|
||||
public void testRollbackAndCommitWithThreads() throws Exception {
|
||||
final MockDirectoryWrapper d = newFSDirectory(_TestUtil.getTempDir("RollbackAndCommitWithThreads"));
|
||||
d.setPreventDoubleWrite(false);
|
||||
final BaseDirectoryWrapper d = newFSDirectory(_TestUtil.getTempDir("RollbackAndCommitWithThreads"));
|
||||
if (d instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)d).setPreventDoubleWrite(false);
|
||||
}
|
||||
|
||||
final int threadCount = _TestUtil.nextInt(random(), 2, 6);
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.Set;
|
|||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
@ -35,12 +36,14 @@ public class TestNeverDelete extends LuceneTestCase {
|
|||
|
||||
public void testIndexing() throws Exception {
|
||||
final File tmpDir = _TestUtil.getTempDir("TestNeverDelete");
|
||||
final MockDirectoryWrapper d = newFSDirectory(tmpDir);
|
||||
final BaseDirectoryWrapper d = newFSDirectory(tmpDir);
|
||||
|
||||
// We want to "see" files removed if Lucene removed
|
||||
// them. This is still worth running on Windows since
|
||||
// some files the IR opens and closes.
|
||||
d.setNoDeleteOpenFile(false);
|
||||
if (d instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)d).setNoDeleteOpenFile(false);
|
||||
}
|
||||
final RandomIndexWriter w = new RandomIndexWriter(random(),
|
||||
d,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
|
|
|
@ -35,7 +35,7 @@ public class TestRollingUpdates extends LuceneTestCase {
|
|||
@Test
|
||||
public void testRollingUpdates() throws Exception {
|
||||
Random random = new Random(random().nextLong());
|
||||
final MockDirectoryWrapper dir = newDirectory();
|
||||
final BaseDirectoryWrapper dir = newDirectory();
|
||||
dir.setCheckIndexOnClose(false); // we use a custom codec provider
|
||||
final LineFileDocs docs = new LineFileDocs(random, true);
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ public class TestFSTs extends LuceneTestCase {
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
dir = newDirectory();
|
||||
dir = newMockDirectory();
|
||||
dir.setPreventDoubleWrite(false);
|
||||
}
|
||||
|
||||
|
@ -1107,7 +1107,7 @@ public class TestFSTs extends LuceneTestCase {
|
|||
final int RUN_TIME_MSEC = atLeast(500);
|
||||
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(-1).setRAMBufferSizeMB(64);
|
||||
final File tempDir = _TestUtil.getTempDir("fstlines");
|
||||
final MockDirectoryWrapper dir = newFSDirectory(tempDir);
|
||||
final Directory dir = newFSDirectory(tempDir);
|
||||
final IndexWriter writer = new IndexWriter(dir, conf);
|
||||
final long stopTime = System.currentTimeMillis() + RUN_TIME_MSEC;
|
||||
Document doc;
|
||||
|
|
|
@ -37,8 +37,8 @@ import org.apache.lucene.search.Sort;
|
|||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.FailOnNonBulkMergesInfoStream;
|
||||
|
@ -433,7 +433,7 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas
|
|||
final LineFileDocs docs = new LineFileDocs(random, true);
|
||||
final File tempDir = _TestUtil.getTempDir(testName);
|
||||
dir = newFSDirectory(tempDir);
|
||||
((MockDirectoryWrapper) dir).setCheckIndexOnClose(false); // don't double-checkIndex, we do it ourselves.
|
||||
((BaseDirectoryWrapper) dir).setCheckIndexOnClose(false); // don't double-checkIndex, we do it ourselves.
|
||||
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random())).setInfoStream(new FailOnNonBulkMergesInfoStream());
|
||||
|
||||
|
|
|
@ -0,0 +1,174 @@
|
|||
package org.apache.lucene.store;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
/**
|
||||
* Calls check index on close.
|
||||
*/
|
||||
// do NOT make any methods in this class synchronized, volatile
|
||||
// do NOT import anything from the concurrency package.
|
||||
// no randoms, no nothing.
|
||||
public class BaseDirectoryWrapper extends Directory {
|
||||
/** our in directory */
|
||||
protected final Directory delegate;
|
||||
/** best effort: base on in Directory is volatile */
|
||||
protected boolean open;
|
||||
|
||||
private boolean checkIndexOnClose = true;
|
||||
private boolean crossCheckTermVectorsOnClose = true;
|
||||
|
||||
public BaseDirectoryWrapper(Directory delegate) {
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
open = false;
|
||||
if (checkIndexOnClose && indexPossiblyExists()) {
|
||||
_TestUtil.checkIndex(this, crossCheckTermVectorsOnClose);
|
||||
}
|
||||
delegate.close();
|
||||
}
|
||||
|
||||
public boolean isOpen() {
|
||||
return open;
|
||||
}
|
||||
|
||||
/**
|
||||
* don't rely upon DirectoryReader.fileExists to determine if we should
|
||||
* checkIndex() or not. It might mask real problems, where we silently
|
||||
* don't checkindex at all. instead we look for a segments file.
|
||||
*/
|
||||
protected boolean indexPossiblyExists() {
|
||||
String files[];
|
||||
try {
|
||||
files = listAll();
|
||||
} catch (IOException ex) {
|
||||
// this means directory doesn't exist, which is ok. return false
|
||||
return false;
|
||||
}
|
||||
for (String f : files) {
|
||||
if (f.startsWith("segments_")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether or not checkindex should be run
|
||||
* on close
|
||||
*/
|
||||
public void setCheckIndexOnClose(boolean value) {
|
||||
this.checkIndexOnClose = value;
|
||||
}
|
||||
|
||||
public boolean getCheckIndexOnClose() {
|
||||
return checkIndexOnClose;
|
||||
}
|
||||
|
||||
public void setCrossCheckTermVectorsOnClose(boolean value) {
|
||||
this.crossCheckTermVectorsOnClose = value;
|
||||
}
|
||||
|
||||
public boolean getCrossCheckTermVectorsOnClose() {
|
||||
return crossCheckTermVectorsOnClose;
|
||||
}
|
||||
|
||||
// directory methods: delegate
|
||||
|
||||
@Override
|
||||
public String[] listAll() throws IOException {
|
||||
return delegate.listAll();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean fileExists(String name) throws IOException {
|
||||
return delegate.fileExists(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFile(String name) throws IOException {
|
||||
delegate.deleteFile(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long fileLength(String name) throws IOException {
|
||||
return delegate.fileLength(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexOutput createOutput(String name, IOContext context) throws IOException {
|
||||
return delegate.createOutput(name, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sync(Collection<String> names) throws IOException {
|
||||
delegate.sync(names);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexInput openInput(String name, IOContext context) throws IOException {
|
||||
return delegate.openInput(name, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lock makeLock(String name) {
|
||||
return delegate.makeLock(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearLock(String name) throws IOException {
|
||||
delegate.clearLock(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLockFactory(LockFactory lockFactory) throws IOException {
|
||||
delegate.setLockFactory(lockFactory);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LockFactory getLockFactory() {
|
||||
return delegate.getLockFactory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLockID() {
|
||||
return delegate.getLockID();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BaseDirectoryWrapper(" + delegate.toString() + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copy(Directory to, String src, String dest, IOContext context) throws IOException {
|
||||
delegate.copy(to, src, dest, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexInputSlicer createSlicer(String name, IOContext context) throws IOException {
|
||||
return delegate.createSlicer(name, context);
|
||||
}
|
||||
}
|
|
@ -57,8 +57,7 @@ import org.apache.lucene.util._TestUtil;
|
|||
* </ul>
|
||||
*/
|
||||
|
||||
public class MockDirectoryWrapper extends Directory {
|
||||
final Directory delegate;
|
||||
public class MockDirectoryWrapper extends BaseDirectoryWrapper {
|
||||
long maxSize;
|
||||
|
||||
// Max actual bytes used. This is set by MockRAMOutputStream:
|
||||
|
@ -67,8 +66,6 @@ public class MockDirectoryWrapper extends Directory {
|
|||
Random randomState;
|
||||
boolean noDeleteOpenFile = true;
|
||||
boolean preventDoubleWrite = true;
|
||||
boolean checkIndexOnClose = true;
|
||||
boolean crossCheckTermVectorsOnClose = true;
|
||||
boolean trackDiskUsage = false;
|
||||
private Set<String> unSyncedFiles;
|
||||
private Set<String> createdFiles;
|
||||
|
@ -109,7 +106,7 @@ public class MockDirectoryWrapper extends Directory {
|
|||
}
|
||||
|
||||
public MockDirectoryWrapper(Random random, Directory delegate) {
|
||||
this.delegate = delegate;
|
||||
super(delegate);
|
||||
// must make a private random since our methods are
|
||||
// called from different threads; else test failures may
|
||||
// not be reproducible from the original seed
|
||||
|
@ -251,19 +248,19 @@ public class MockDirectoryWrapper extends Directory {
|
|||
}
|
||||
}
|
||||
final IndexOutput tempOut = delegate.createOutput(tempFileName, LuceneTestCase.newIOContext(randomState));
|
||||
IndexInput in = delegate.openInput(name, LuceneTestCase.newIOContext(randomState));
|
||||
tempOut.copyBytes(in, in.length()/2);
|
||||
IndexInput ii = delegate.openInput(name, LuceneTestCase.newIOContext(randomState));
|
||||
tempOut.copyBytes(ii, ii.length()/2);
|
||||
tempOut.close();
|
||||
in.close();
|
||||
ii.close();
|
||||
|
||||
// Delete original and copy bytes back:
|
||||
deleteFile(name, true);
|
||||
|
||||
final IndexOutput out = delegate.createOutput(name, LuceneTestCase.newIOContext(randomState));
|
||||
in = delegate.openInput(tempFileName, LuceneTestCase.newIOContext(randomState));
|
||||
out.copyBytes(in, in.length());
|
||||
ii = delegate.openInput(tempFileName, LuceneTestCase.newIOContext(randomState));
|
||||
out.copyBytes(ii, ii.length());
|
||||
out.close();
|
||||
in.close();
|
||||
ii.close();
|
||||
deleteFile(tempFileName, true);
|
||||
} else if (damage == 3) {
|
||||
// The file survived intact:
|
||||
|
@ -316,26 +313,6 @@ public class MockDirectoryWrapper extends Directory {
|
|||
return noDeleteOpenFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether or not checkindex should be run
|
||||
* on close
|
||||
*/
|
||||
public void setCheckIndexOnClose(boolean value) {
|
||||
this.checkIndexOnClose = value;
|
||||
}
|
||||
|
||||
public boolean getCheckIndexOnClose() {
|
||||
return checkIndexOnClose;
|
||||
}
|
||||
|
||||
public void setCrossCheckTermVectorsOnClose(boolean value) {
|
||||
this.crossCheckTermVectorsOnClose = value;
|
||||
}
|
||||
|
||||
public boolean getCrossCheckTermVectorsOnClose() {
|
||||
return crossCheckTermVectorsOnClose;
|
||||
}
|
||||
|
||||
/**
|
||||
* If 0.0, no exceptions will be thrown. Else this should
|
||||
* be a double 0.0 - 1.0. We will randomly throw an
|
||||
|
@ -575,8 +552,8 @@ public class MockDirectoryWrapper extends Directory {
|
|||
throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open locks: " + openLocks);
|
||||
}
|
||||
open = false;
|
||||
if (checkIndexOnClose) {
|
||||
if (indexPossiblyExists(this)) {
|
||||
if (getCheckIndexOnClose()) {
|
||||
if (indexPossiblyExists()) {
|
||||
if (LuceneTestCase.VERBOSE) {
|
||||
System.out.println("\nNOTE: MockDirectoryWrapper: now crash");
|
||||
}
|
||||
|
@ -584,7 +561,7 @@ public class MockDirectoryWrapper extends Directory {
|
|||
if (LuceneTestCase.VERBOSE) {
|
||||
System.out.println("\nNOTE: MockDirectoryWrapper: now run CheckIndex");
|
||||
}
|
||||
_TestUtil.checkIndex(this, crossCheckTermVectorsOnClose);
|
||||
_TestUtil.checkIndex(this, getCrossCheckTermVectorsOnClose());
|
||||
|
||||
if (assertNoUnreferencedFilesOnClose) {
|
||||
// now look for unreferenced files:
|
||||
|
@ -612,26 +589,6 @@ public class MockDirectoryWrapper extends Directory {
|
|||
}
|
||||
delegate.close();
|
||||
}
|
||||
|
||||
/** don't rely upon DirectoryReader.fileExists to determine if we should
|
||||
* checkIndex() or not. It might mask real problems, where we silently
|
||||
* don't checkindex at all. instead we look for a segments file.
|
||||
*/
|
||||
private boolean indexPossiblyExists(Directory d) {
|
||||
String files[];
|
||||
try {
|
||||
files = d.listAll();
|
||||
} catch (IOException ex) {
|
||||
// this means directory doesn't exist, which is ok. return false
|
||||
return false;
|
||||
}
|
||||
for (String f : files) {
|
||||
if (f.startsWith("segments_")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
synchronized void removeOpenFile(Closeable c, String name) {
|
||||
Integer v = openFiles.get(name);
|
||||
|
@ -658,8 +615,7 @@ public class MockDirectoryWrapper extends Directory {
|
|||
removeOpenFile(in, name);
|
||||
}
|
||||
|
||||
boolean open = true;
|
||||
|
||||
@Override
|
||||
public synchronized boolean isOpen() {
|
||||
return open;
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.apache.lucene.util;
|
|||
|
||||
import java.io.Closeable;
|
||||
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.junit.Assert;
|
||||
|
||||
|
@ -23,15 +24,15 @@ import org.junit.Assert;
|
|||
*/
|
||||
|
||||
/**
|
||||
* Attempts to close a {@link MockDirectoryWrapper}.
|
||||
* Attempts to close a {@link BaseDirectoryWrapper}.
|
||||
*
|
||||
* @see LuceneTestCase#newDirectory(java.util.Random)
|
||||
*/
|
||||
final class CloseableDirectory implements Closeable {
|
||||
private final MockDirectoryWrapper dir;
|
||||
private final BaseDirectoryWrapper dir;
|
||||
private final TestRuleMarkFailure failureMarker;
|
||||
|
||||
public CloseableDirectory(MockDirectoryWrapper dir,
|
||||
public CloseableDirectory(BaseDirectoryWrapper dir,
|
||||
TestRuleMarkFailure failureMarker) {
|
||||
this.dir = dir;
|
||||
this.failureMarker = failureMarker;
|
||||
|
|
|
@ -780,48 +780,48 @@ public abstract class LuceneTestCase extends Assert {
|
|||
* Returns a new Directory instance. Use this when the test does not
|
||||
* care about the specific Directory implementation (most tests).
|
||||
* <p>
|
||||
* The Directory is wrapped with {@link MockDirectoryWrapper}.
|
||||
* By default this means it will be picky, such as ensuring that you
|
||||
* The Directory is wrapped with {@link BaseDirectoryWrapper}.
|
||||
* this means usually it will be picky, such as ensuring that you
|
||||
* properly close it and all open files in your test. It will emulate
|
||||
* some features of Windows, such as not allowing open files to be
|
||||
* overwritten.
|
||||
*/
|
||||
public static MockDirectoryWrapper newDirectory() {
|
||||
public static BaseDirectoryWrapper newDirectory() {
|
||||
return newDirectory(random());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns a new Directory instance, using the specified random.
|
||||
* See {@link #newDirectory()} for more information.
|
||||
*/
|
||||
public static MockDirectoryWrapper newDirectory(Random r) {
|
||||
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, maybeNRTWrap(r, impl));
|
||||
closeAfterSuite(new CloseableDirectory(dir, suiteFailureMarker));
|
||||
public static BaseDirectoryWrapper newDirectory(Random r) {
|
||||
return wrapDirectory(r, newDirectoryImpl(r, TEST_DIRECTORY), rarely(r));
|
||||
}
|
||||
|
||||
dir.setThrottling(TEST_THROTTLING);
|
||||
if (VERBOSE) {
|
||||
System.out.println("NOTE: LuceneTestCase.newDirectory: returning " + dir);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
public static MockDirectoryWrapper newMockDirectory() {
|
||||
return newMockDirectory(random());
|
||||
}
|
||||
|
||||
public static MockDirectoryWrapper newMockDirectory(Random r) {
|
||||
return (MockDirectoryWrapper) wrapDirectory(r, newDirectoryImpl(r, TEST_DIRECTORY), false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new Directory instance, with contents copied from the
|
||||
* provided directory. See {@link #newDirectory()} for more
|
||||
* information.
|
||||
*/
|
||||
public static MockDirectoryWrapper newDirectory(Directory d) throws IOException {
|
||||
public static BaseDirectoryWrapper newDirectory(Directory d) throws IOException {
|
||||
return newDirectory(random(), d);
|
||||
}
|
||||
|
||||
/** Returns a new FSDirectory instance over the given file, which must be a folder. */
|
||||
public static MockDirectoryWrapper newFSDirectory(File f) {
|
||||
public static BaseDirectoryWrapper newFSDirectory(File f) {
|
||||
return newFSDirectory(f, null);
|
||||
}
|
||||
|
||||
/** Returns a new FSDirectory instance over the given file, which must be a folder. */
|
||||
public static MockDirectoryWrapper newFSDirectory(File f, LockFactory lf) {
|
||||
public static BaseDirectoryWrapper newFSDirectory(File f, LockFactory lf) {
|
||||
String fsdirClass = TEST_DIRECTORY;
|
||||
if (fsdirClass.equals("random")) {
|
||||
fsdirClass = RandomPicks.randomFrom(random(), FS_DIRECTORIES);
|
||||
|
@ -838,14 +838,11 @@ public abstract class LuceneTestCase extends Assert {
|
|||
}
|
||||
|
||||
Directory fsdir = newFSDirectoryImpl(clazz, f);
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(
|
||||
random(), maybeNRTWrap(random(), fsdir));
|
||||
BaseDirectoryWrapper wrapped = wrapDirectory(random(), fsdir, rarely());
|
||||
if (lf != null) {
|
||||
dir.setLockFactory(lf);
|
||||
wrapped.setLockFactory(lf);
|
||||
}
|
||||
closeAfterSuite(new CloseableDirectory(dir, suiteFailureMarker));
|
||||
dir.setThrottling(TEST_THROTTLING);
|
||||
return dir;
|
||||
return wrapped;
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
@ -856,22 +853,27 @@ public abstract class LuceneTestCase extends Assert {
|
|||
* with contents copied from the provided directory. See
|
||||
* {@link #newDirectory()} for more information.
|
||||
*/
|
||||
public static MockDirectoryWrapper newDirectory(Random r, Directory d) throws IOException {
|
||||
public static BaseDirectoryWrapper newDirectory(Random r, Directory d) throws IOException {
|
||||
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
|
||||
for (String file : d.listAll()) {
|
||||
d.copy(impl, file, file, newIOContext(r));
|
||||
}
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, maybeNRTWrap(r, impl));
|
||||
closeAfterSuite(new CloseableDirectory(dir, suiteFailureMarker));
|
||||
dir.setThrottling(TEST_THROTTLING);
|
||||
return dir;
|
||||
return wrapDirectory(r, impl, rarely(r));
|
||||
}
|
||||
|
||||
private static Directory maybeNRTWrap(Random random, Directory directory) {
|
||||
private static BaseDirectoryWrapper wrapDirectory(Random random, Directory directory, boolean bare) {
|
||||
if (rarely(random)) {
|
||||
return new NRTCachingDirectory(directory, random.nextDouble(), random.nextDouble());
|
||||
directory = new NRTCachingDirectory(directory, random.nextDouble(), random.nextDouble());
|
||||
}
|
||||
if (bare) {
|
||||
BaseDirectoryWrapper base = new BaseDirectoryWrapper(directory);
|
||||
closeAfterSuite(new CloseableDirectory(base, suiteFailureMarker));
|
||||
return base;
|
||||
} else {
|
||||
return directory;
|
||||
MockDirectoryWrapper mock = new MockDirectoryWrapper(random, directory);
|
||||
mock.setThrottling(TEST_THROTTLING);
|
||||
closeAfterSuite(new CloseableDirectory(mock, suiteFailureMarker));
|
||||
return mock;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,12 +31,14 @@ public class MockDirectoryFactory extends CachingDirectoryFactory {
|
|||
|
||||
@Override
|
||||
protected Directory create(String path) throws IOException {
|
||||
MockDirectoryWrapper dir = LuceneTestCase.newDirectory();
|
||||
Directory dir = LuceneTestCase.newDirectory();
|
||||
// Somehow removing unref'd files in Solr tests causes
|
||||
// problems... there's some interaction w/
|
||||
// CachingDirectoryFactory. Once we track down where Solr
|
||||
// isn't closing an IW, we can re-enable this:
|
||||
dir.setAssertNoUnrefencedFilesOnClose(false);
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setAssertNoUnrefencedFilesOnClose(false);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
|
||||
|
|
|
@ -31,12 +31,14 @@ public class MockFSDirectoryFactory extends CachingDirectoryFactory {
|
|||
|
||||
@Override
|
||||
public Directory create(String path) throws IOException {
|
||||
MockDirectoryWrapper dir = LuceneTestCase.newFSDirectory(new File(path));
|
||||
Directory dir = LuceneTestCase.newFSDirectory(new File(path));
|
||||
// Somehow removing unref'd files in Solr tests causes
|
||||
// problems... there's some interaction w/
|
||||
// CachingDirectoryFactory. Once we track down where Solr
|
||||
// isn't closing an IW, we can re-enable this:
|
||||
dir.setAssertNoUnrefencedFilesOnClose(false);
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setAssertNoUnrefencedFilesOnClose(false);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue