LUCENE-2594: cutover oal.index.* tests to use a random IWC

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@984202 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2010-08-10 20:32:47 +00:00
parent 91b5291d5a
commit 006bd17be2
54 changed files with 799 additions and 644 deletions

View File

@ -106,8 +106,8 @@ public class SegmentReader extends IndexReader implements Cloneable {
CoreReaders(SegmentReader origInstance, Directory dir, SegmentInfo si, int readBufferSize, int termsIndexDivisor, CodecProvider codecs) throws IOException {
if (termsIndexDivisor < 1 && termsIndexDivisor != -1) {
throw new IllegalArgumentException("indexDivisor must be -1 (don't load terms index) or greater than 0: got " + termsIndexDivisor);
if (termsIndexDivisor == 0) {
throw new IllegalArgumentException("indexDivisor must be < 0 (don't load terms index) or greater than 0 (got 0)");
}
segment = si.name;

View File

@ -111,6 +111,7 @@ public class SimpleStandardTermsIndexReader extends StandardTermsIndexReader {
// In case terms index gets loaded, later, on demand
totalIndexInterval = indexInterval * indexDivisor;
}
assert totalIndexInterval > 0;
seekDir(in, dirOffset);
@ -365,6 +366,7 @@ public class SimpleStandardTermsIndexReader extends StandardTermsIndexReader {
public void getIndexOffset(BytesRef term, TermsIndexResult result) throws IOException {
int lo = 0; // binary search
int hi = numIndexTerms - 1;
assert totalIndexInterval > 0 : "totalIndexInterval=" + totalIndexInterval;
while (hi >= lo) {
int mid = (lo + hi) >>> 1;
@ -411,8 +413,12 @@ public class SimpleStandardTermsIndexReader extends StandardTermsIndexReader {
public void loadTermsIndex(int indexDivisor) throws IOException {
if (!indexLoaded) {
this.indexDivisor = indexDivisor;
this.totalIndexInterval = indexInterval * indexDivisor;
if (indexDivisor < 0) {
this.indexDivisor = -indexDivisor;
} else {
this.indexDivisor = indexDivisor;
}
this.totalIndexInterval = indexInterval * this.indexDivisor;
Iterator<FieldIndexReader> it = fields.values().iterator();
while(it.hasNext()) {

View File

@ -362,7 +362,7 @@ public class StandardTermsDictReader extends FieldsProducer {
bytesReader.reset(indexResult.term);
state.ord = indexResult.position-1;
assert state.ord >= -1: "ord=" + state.ord;
assert state.ord >= -1: "ord=" + state.ord + " pos=" + indexResult.position;
startOrd = indexResult.position;
} else {

View File

@ -28,8 +28,8 @@ import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
@ -39,8 +39,7 @@ public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
public void testCaching() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer()));
RandomIndexWriter writer = new RandomIndexWriter(newRandom(), dir);
Document doc = new Document();
TokenStream stream = new TokenStream() {
private int index = 0;
@ -73,9 +72,8 @@ public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
// 2) now add the document to the index and verify if all tokens are indexed
// don't reset the stream here, the DocumentWriter should do that implicitly
writer.addDocument(doc);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = writer.getReader();
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader,
MultiFields.getDeletedDocs(reader),
"preanalyzed",
@ -101,7 +99,7 @@ public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
assertEquals(1, termPositions.freq());
assertEquals(2, termPositions.nextPosition());
reader.close();
writer.close();
// 3) reset stream and consume tokens again
stream.reset();
checkTokens(stream);

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
*/
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.MockAnalyzer;
@ -33,6 +34,14 @@ import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.PhraseQuery;
public class TestAddIndexes extends LuceneTestCase {
private Random random;
@Override
public void setUp() throws Exception {
super.setUp();
random = newRandom();
}
public void testSimpleCase() throws IOException {
// main directory
Directory dir = new RAMDirectory();
@ -42,7 +51,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = null;
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer())
.setOpenMode(OpenMode.CREATE));
// add 100 documents
@ -51,7 +60,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.close();
_TestUtil.checkIndex(dir);
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
writer = newWriter(aux, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
// add 40 documents in separate files
@ -59,14 +68,14 @@ public class TestAddIndexes extends LuceneTestCase {
assertEquals(40, writer.maxDoc());
writer.close();
writer = newWriter(aux2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
writer = newWriter(aux2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
// add 40 documents in compound files
addDocs2(writer, 50);
assertEquals(50, writer.maxDoc());
writer.close();
// test doc count before segments are merged
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
assertEquals(100, writer.maxDoc());
writer.addIndexes(new Directory[] { aux, aux2 });
assertEquals(190, writer.maxDoc());
@ -81,14 +90,14 @@ public class TestAddIndexes extends LuceneTestCase {
// now add another set in.
Directory aux3 = new RAMDirectory();
writer = newWriter(aux3, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = newWriter(aux3, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
// add 40 documents
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
writer.close();
// test doc count before segments are merged/index is optimized
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
assertEquals(190, writer.maxDoc());
writer.addIndexes(new Directory[] { aux3 });
assertEquals(230, writer.maxDoc());
@ -102,7 +111,7 @@ public class TestAddIndexes extends LuceneTestCase {
verifyTermDocs(dir, new Term("content", "bbb"), 50);
// now optimize it.
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@ -115,11 +124,11 @@ public class TestAddIndexes extends LuceneTestCase {
// now add a single document
Directory aux4 = new RAMDirectory();
writer = newWriter(aux4, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = newWriter(aux4, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
addDocs2(writer, 1);
writer.close();
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
assertEquals(230, writer.maxDoc());
writer.addIndexes(new Directory[] { aux4 });
assertEquals(231, writer.maxDoc());
@ -137,7 +146,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory aux = new MockRAMDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
IndexWriter writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer.addIndexes(new Directory[] {aux});
// Adds 10 docs, then replaces them with another 10
@ -174,7 +183,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory aux = new RAMDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
IndexWriter writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
@ -212,7 +221,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory aux = new RAMDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
IndexWriter writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
@ -253,25 +262,25 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = null;
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
// add 100 documents
addDocs(writer, 100);
assertEquals(100, writer.maxDoc());
writer.close();
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
writer = newWriter(aux, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
// add 140 documents in separate files
addDocs(writer, 40);
writer.close();
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
writer = newWriter(aux, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
addDocs(writer, 100);
writer.close();
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
try {
// cannot add self
writer.addIndexes(new Directory[] { aux, dir });
@ -297,7 +306,7 @@ public class TestAddIndexes extends LuceneTestCase {
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(
IndexWriter writer = newWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(4);
@ -321,7 +330,7 @@ public class TestAddIndexes extends LuceneTestCase {
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(9));
IndexWriter writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(9));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(4);
addDocs(writer, 2);
@ -343,7 +352,7 @@ public class TestAddIndexes extends LuceneTestCase {
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(
IndexWriter writer = newWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(4);
@ -373,7 +382,7 @@ public class TestAddIndexes extends LuceneTestCase {
assertEquals(10, reader.numDocs());
reader.close();
IndexWriter writer = newWriter(dir, new IndexWriterConfig(
IndexWriter writer = newWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(4));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(4);
@ -394,7 +403,7 @@ public class TestAddIndexes extends LuceneTestCase {
setUpDirs(dir, aux);
IndexWriter writer = newWriter(aux2, new IndexWriterConfig(
IndexWriter writer = newWriter(aux2, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
@ -417,7 +426,7 @@ public class TestAddIndexes extends LuceneTestCase {
assertEquals(22, reader.numDocs());
reader.close();
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(6));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(4);
@ -473,14 +482,14 @@ public class TestAddIndexes extends LuceneTestCase {
private void setUpDirs(Directory dir, Directory aux) throws IOException {
IndexWriter writer = null;
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
// add 1000 documents in 1 segment
addDocs(writer, 1000);
assertEquals(1000, writer.maxDoc());
assertEquals(1, writer.getSegmentCount());
writer.close();
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
writer = newWriter(aux, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
@ -488,7 +497,7 @@ public class TestAddIndexes extends LuceneTestCase {
for (int i = 0; i < 3; i++) {
addDocs(writer, 10);
writer.close();
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(100));
writer = newWriter(aux, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(100));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
@ -506,7 +515,7 @@ public class TestAddIndexes extends LuceneTestCase {
lmp.setUseCompoundFile(false);
lmp.setUseCompoundDocStore(false);
lmp.setMergeFactor(100);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(5).setMergePolicy(lmp));
@ -535,7 +544,7 @@ public class TestAddIndexes extends LuceneTestCase {
lmp.setUseCompoundFile(false);
lmp.setUseCompoundDocStore(false);
lmp.setMergeFactor(4);
writer = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer())
.setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(lmp));
writer.addIndexes(new Directory[] {dir});

View File

@ -168,7 +168,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
}
try {
writer = new IndexWriter(dir, new IndexWriterConfig(
writer = new IndexWriter(dir, newIndexWriterConfig(rnd,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMergeScheduler(new SerialMergeScheduler()) // no threads!
);
@ -220,13 +220,14 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
}
public void testAddOldIndexes() throws IOException {
Random random = newRandom();
for (String name : oldNames) {
unzip(getDataFile("index." + name + ".zip"), name);
String fullPath = fullDir(name);
Directory dir = FSDirectory.open(new File(fullPath));
Directory targetDir = new RAMDirectory();
IndexWriter w = new IndexWriter(targetDir, new IndexWriterConfig(
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
w.addIndexes(new Directory[] { dir });
w.close();
@ -239,6 +240,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
}
public void testAddOldIndexesReader() throws IOException {
Random random = newRandom();
for (String name : oldNames) {
unzip(getDataFile("index." + name + ".zip"), name);
String fullPath = fullDir(name);
@ -246,7 +248,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
IndexReader reader = IndexReader.open(dir);
Directory targetDir = new RAMDirectory();
IndexWriter w = new IndexWriter(targetDir, new IndexWriterConfig(
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
w.addIndexes(new IndexReader[] { reader });
w.close();
@ -268,17 +270,19 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
}
public void testIndexOldIndexNoAdds() throws IOException {
Random random = newRandom();
for(int i=0;i<oldNames.length;i++) {
unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
changeIndexNoAdds(oldNames[i]);
changeIndexNoAdds(random, oldNames[i]);
rmDir(oldNames[i]);
}
}
public void testIndexOldIndex() throws IOException {
Random random = newRandom();
for(int i=0;i<oldNames.length;i++) {
unzip(getDataFile("index." + oldNames[i] + ".zip"), oldNames[i]);
changeIndexWithAdds(oldNames[i]);
changeIndexWithAdds(random, oldNames[i]);
rmDir(oldNames[i]);
}
}
@ -357,14 +361,13 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
return v0 - v1;
}
public void changeIndexWithAdds(String dirName) throws IOException {
public void changeIndexWithAdds(Random random, String dirName) throws IOException {
String origDirName = dirName;
dirName = fullDir(dirName);
Directory dir = FSDirectory.open(new File(dirName));
// open writer
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
// add 10 docs
for(int i=0;i<10;i++) {
@ -391,11 +394,13 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
// make sure we can do delete & setNorm against this segment:
IndexReader reader = IndexReader.open(dir, false);
searcher = new IndexSearcher(reader);
Term searchTerm = new Term("id", "6");
int delCount = reader.deleteDocuments(searchTerm);
assertEquals("wrong delete count", 1, delCount);
reader.setNorm(22, "content", (float) 2.0);
reader.setNorm(searcher.search(new TermQuery(new Term("id", "22")), 10).scoreDocs[0].doc, "content", (float) 2.0);
reader.close();
searcher.close();
// make sure they "took":
searcher = new IndexSearcher(dir, true);
@ -407,7 +412,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
searcher.close();
// optimize
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@ -422,7 +427,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
dir.close();
}
public void changeIndexNoAdds(String dirName) throws IOException {
public void changeIndexNoAdds(Random random, String dirName) throws IOException {
dirName = fullDir(dirName);
@ -454,7 +459,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
searcher.close();
// optimize
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@ -469,14 +474,14 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
dir.close();
}
public void createIndex(String dirName, boolean doCFS) throws IOException {
public void createIndex(Random random, String dirName, boolean doCFS) throws IOException {
rmDir(dirName);
dirName = fullDir(dirName);
Directory dir = FSDirectory.open(new File(dirName));
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10);
IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10);
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundDocStore(doCFS);
IndexWriter writer = new IndexWriter(dir, conf);
@ -488,7 +493,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
writer.close();
// open fresh writer so we get no prx file in the added segment
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10);
conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10);
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundDocStore(doCFS);
writer = new IndexWriter(dir, conf);
@ -516,7 +521,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
try {
Directory dir = FSDirectory.open(new File(fullDir(outputDir)));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(-1).setRAMBufferSizeMB(16.0));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(true);
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
for(int i=0;i<35;i++) {
addDoc(writer, i);
}

View File

@ -34,12 +34,13 @@ public class TestCheckIndex extends LuceneTestCase {
public void testDeletedDocs() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
Document doc = new Document();
doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for(int i=0;i<19;i++) {
writer.addDocument(doc);
}
writer.optimize();
writer.close();
IndexReader reader = IndexReader.open(dir, false);
reader.deleteDocument(5);

View File

@ -353,7 +353,7 @@ public class TestCodecs extends MultiCodecTestCase {
public void testSepPositionAfterMerge() throws IOException {
final Directory dir = new RAMDirectory();
final IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_31,
final IndexWriterConfig config = newIndexWriterConfig(newRandom(), Version.LUCENE_31,
new MockAnalyzer());
config.setCodecProvider(new MockSepCodecs());
final IndexWriter writer = new IndexWriter(dir, config);

View File

@ -26,6 +26,7 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.util.LuceneTestCase;
import java.io.IOException;
import java.util.Random;
public class TestConcurrentMergeScheduler extends LuceneTestCase {
@ -65,7 +66,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
FailOnlyOnFlush failure = new FailOnlyOnFlush();
directory.failOn(failure);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
Document doc = new Document();
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
doc.add(idField);
@ -114,7 +115,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
// merging of segments with and without deletes at the
// start:
mp.setMinMergeDocs(1000);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(newRandom(),
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMergePolicy(mp));
@ -147,8 +148,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
public void testNoExtraFiles() throws IOException {
RAMDirectory directory = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
Random random = newRandom();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(2));
@ -164,7 +165,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
TestIndexWriter.assertNoUnreferencedFiles(directory, "testNoExtraFiles");
// Reopen
writer = new IndexWriter(directory, new IndexWriterConfig(
writer = new IndexWriter(directory, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
}
@ -176,12 +177,12 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
public void testNoWaitClose() throws IOException {
RAMDirectory directory = new MockRAMDirectory();
Random random = newRandom();
Document doc = new Document();
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
doc.add(idField);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(100);
for(int iter=0;iter<10;iter++) {
@ -210,7 +211,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
reader.close();
// Reopen
writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(100);
}
writer.close();

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
*/
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.store.MockRAMDirectory;
@ -28,14 +29,15 @@ import org.apache.lucene.document.Field;
public class TestCrash extends LuceneTestCase {
private IndexWriter initIndex(boolean initialCommit) throws IOException {
return initIndex(new MockRAMDirectory(), initialCommit);
private IndexWriter initIndex(Random random, boolean initialCommit) throws IOException {
return initIndex(random, new MockRAMDirectory(), initialCommit);
}
private IndexWriter initIndex(MockRAMDirectory dir, boolean initialCommit) throws IOException {
private IndexWriter initIndex(Random random, MockRAMDirectory dir, boolean initialCommit) throws IOException {
dir.setLockFactory(NoLockFactory.getNoLockFactory());
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(10).setMergeScheduler(new ConcurrentMergeScheduler()));
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
if (initialCommit) {
writer.commit();
@ -53,6 +55,7 @@ public class TestCrash extends LuceneTestCase {
private void crash(final IndexWriter writer) throws IOException {
final MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler();
cms.sync();
dir.crash();
cms.sync();
dir.clearCrash();
@ -62,7 +65,7 @@ public class TestCrash extends LuceneTestCase {
// This test relies on being able to open a reader before any commit
// happened, so we must create an initial commit just to allow that, but
// before any documents were added.
IndexWriter writer = initIndex(true);
IndexWriter writer = initIndex(newRandom(), true);
MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
crash(writer);
IndexReader reader = IndexReader.open(dir, false);
@ -73,11 +76,12 @@ public class TestCrash extends LuceneTestCase {
// This test relies on being able to open a reader before any commit
// happened, so we must create an initial commit just to allow that, but
// before any documents were added.
IndexWriter writer = initIndex(true);
Random random = newRandom();
IndexWriter writer = initIndex(random, true);
MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
dir.setPreventDoubleWrite(false);
crash(writer);
writer = initIndex(dir, false);
writer = initIndex(random, dir, false);
writer.close();
IndexReader reader = IndexReader.open(dir, false);
@ -85,10 +89,11 @@ public class TestCrash extends LuceneTestCase {
}
public void testCrashAfterReopen() throws IOException {
IndexWriter writer = initIndex(false);
Random random = newRandom();
IndexWriter writer = initIndex(random, false);
MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
writer.close();
writer = initIndex(dir, false);
writer = initIndex(random, dir, false);
assertEquals(314, writer.maxDoc());
crash(writer);
@ -107,7 +112,7 @@ public class TestCrash extends LuceneTestCase {
public void testCrashAfterClose() throws IOException {
IndexWriter writer = initIndex(false);
IndexWriter writer = initIndex(newRandom(), false);
MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
writer.close();
@ -126,7 +131,7 @@ public class TestCrash extends LuceneTestCase {
public void testCrashAfterCloseNoWait() throws IOException {
IndexWriter writer = initIndex(false);
IndexWriter writer = initIndex(newRandom(), false);
MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
writer.close(false);
@ -145,7 +150,7 @@ public class TestCrash extends LuceneTestCase {
public void testCrashReaderDeletes() throws IOException {
IndexWriter writer = initIndex(false);
IndexWriter writer = initIndex(newRandom(), false);
MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
writer.close(false);
@ -166,7 +171,7 @@ public class TestCrash extends LuceneTestCase {
public void testCrashReaderDeletesAfterClose() throws IOException {
IndexWriter writer = initIndex(false);
IndexWriter writer = initIndex(newRandom(), false);
MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
writer.close(false);

View File

@ -20,6 +20,7 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.Collection;
@ -199,10 +200,11 @@ public class TestDeletionPolicy extends LuceneTestCase {
final double SECONDS = 2.0;
boolean useCompoundFile = true;
Random random = newRandom();
Directory dir = new RAMDirectory();
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT,
IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer())
.setIndexDeletionPolicy(policy);
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
@ -216,7 +218,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Record last time when writer performed deletes of
// past commits
lastDeleteTime = System.currentTimeMillis();
conf = new IndexWriterConfig(TEST_VERSION_CURRENT,
conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer()).setOpenMode(
OpenMode.APPEND).setIndexDeletionPolicy(policy);
lmp = (LogMergePolicy) conf.getMergePolicy();
@ -271,7 +273,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
* Test a silly deletion policy that keeps all commits around.
*/
public void testKeepAllDeletionPolicy() throws IOException {
Random random = newRandom();
for(int pass=0;pass<2;pass++) {
boolean useCompoundFile = (pass % 2) != 0;
@ -282,20 +285,21 @@ public class TestDeletionPolicy extends LuceneTestCase {
Directory dir = new RAMDirectory();
policy.dir = dir;
IndexWriterConfig conf = new IndexWriterConfig(
IndexWriterConfig conf = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10)
.setMergeScheduler(new SerialMergeScheduler());
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
lmp.setMergeFactor(10);
IndexWriter writer = new IndexWriter(dir, conf);
for(int i=0;i<107;i++) {
addDoc(writer);
}
writer.close();
conf = new IndexWriterConfig(TEST_VERSION_CURRENT,
conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer()).setOpenMode(
OpenMode.APPEND).setIndexDeletionPolicy(policy);
lmp = (LogMergePolicy) conf.getMergePolicy();
@ -338,7 +342,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Open & close a writer and assert that it
// actually removed something:
int preCount = dir.listAll().length;
writer = new IndexWriter(dir, new IndexWriterConfig(
writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT,
new MockAnalyzer()).setOpenMode(
OpenMode.APPEND).setIndexDeletionPolicy(policy));
@ -356,16 +360,18 @@ public class TestDeletionPolicy extends LuceneTestCase {
* then, opens a new IndexWriter on a previous commit
* point. */
public void testOpenPriorSnapshot() throws IOException {
Random random = newRandom();
// Never deletes a commit
KeepAllDeletionPolicy policy = new KeepAllDeletionPolicy();
Directory dir = new MockRAMDirectory();
policy.dir = dir;
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(2));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
for(int i=0;i<10;i++) {
addDoc(writer);
if ((1+i)%2 == 0)
@ -383,7 +389,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertTrue(lastCommit != null);
// Now add 1 doc and optimize
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(policy));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(policy));
addDoc(writer);
assertEquals(11, writer.numDocs());
writer.optimize();
@ -392,7 +398,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertEquals(6, IndexReader.listCommits(dir).size());
// Now open writer on the commit just before optimize:
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
@ -405,7 +411,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertEquals(11, r.numDocs());
r.close();
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Commits the rollback:
@ -422,7 +428,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
r.close();
// Reoptimize
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(policy));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(policy));
writer.optimize();
writer.close();
@ -433,7 +439,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Now open writer on the commit just before optimize,
// but this time keeping only the last commit:
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexCommit(lastCommit));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Reader still sees optimized index, because writer
@ -460,7 +466,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
* you know there are no readers.
*/
public void testKeepNoneOnInitDeletionPolicy() throws IOException {
Random random = newRandom();
for(int pass=0;pass<2;pass++) {
boolean useCompoundFile = (pass % 2) != 0;
@ -469,7 +476,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
Directory dir = new RAMDirectory();
IndexWriterConfig conf = new IndexWriterConfig(
IndexWriterConfig conf = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
@ -482,7 +489,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
}
writer.close();
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
@ -509,7 +516,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
* Test a deletion policy that keeps last N commits.
*/
public void testKeepLastNDeletionPolicy() throws IOException {
Random random = newRandom();
final int N = 5;
for(int pass=0;pass<2;pass++) {
@ -521,7 +528,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
for(int j=0;j<N+1;j++) {
IndexWriterConfig conf = new IndexWriterConfig(
IndexWriterConfig conf = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
@ -571,7 +578,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
* around, with reader doing deletes.
*/
public void testKeepLastNDeletionPolicyWithReader() throws IOException {
Random random = newRandom();
final int N = 10;
for(int pass=0;pass<2;pass++) {
@ -581,7 +589,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
Directory dir = new RAMDirectory();
IndexWriterConfig conf = new IndexWriterConfig(
IndexWriterConfig conf = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy);
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
@ -593,7 +601,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
Query query = new TermQuery(searchTerm);
for(int i=0;i<N+1;i++) {
conf = new IndexWriterConfig(
conf = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
lmp = (LogMergePolicy) conf.getMergePolicy();
@ -615,7 +623,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
reader.close();
searcher.close();
}
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
@ -680,7 +688,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
* around, through creates.
*/
public void testKeepLastNDeletionPolicyWithCreates() throws IOException {
Random random = newRandom();
final int N = 10;
for(int pass=0;pass<2;pass++) {
@ -690,7 +699,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
Directory dir = new RAMDirectory();
IndexWriterConfig conf = new IndexWriterConfig(
IndexWriterConfig conf = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
@ -704,7 +713,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
for(int i=0;i<N+1;i++) {
conf = new IndexWriterConfig(
conf = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
@ -727,7 +736,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
reader.close();
searcher.close();
writer = new IndexWriter(dir, new IndexWriterConfig(
writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy));
// This will not commit: there are no changes

View File

@ -28,6 +28,7 @@ import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
import java.util.Random;
public class TestDirectoryReader extends LuceneTestCase {
protected Directory dir;
@ -124,16 +125,17 @@ public class TestDirectoryReader extends LuceneTestCase {
}
public void testIsCurrent() throws IOException {
Random random = newRandom();
RAMDirectory ramDir1=new RAMDirectory();
addDoc(ramDir1, "test foo", true);
addDoc(random, ramDir1, "test foo", true);
RAMDirectory ramDir2=new RAMDirectory();
addDoc(ramDir2, "test blah", true);
addDoc(random, ramDir2, "test blah", true);
IndexReader[] readers = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir2, false)};
MultiReader mr = new MultiReader(readers);
assertTrue(mr.isCurrent()); // just opened, must be current
addDoc(ramDir1, "more text", false);
addDoc(random, ramDir1, "more text", false);
assertFalse(mr.isCurrent()); // has been modified, not current anymore
addDoc(ramDir2, "even more text", false);
addDoc(random, ramDir2, "even more text", false);
assertFalse(mr.isCurrent()); // has been modified even more, not current anymore
try {
mr.getVersion();
@ -145,12 +147,13 @@ public class TestDirectoryReader extends LuceneTestCase {
}
public void testMultiTermDocs() throws IOException {
Random random = newRandom();
RAMDirectory ramDir1=new RAMDirectory();
addDoc(ramDir1, "test foo", true);
addDoc(random, ramDir1, "test foo", true);
RAMDirectory ramDir2=new RAMDirectory();
addDoc(ramDir2, "test blah", true);
addDoc(random, ramDir2, "test blah", true);
RAMDirectory ramDir3=new RAMDirectory();
addDoc(ramDir3, "test wow", true);
addDoc(random, ramDir3, "test wow", true);
IndexReader[] readers1 = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir3, false)};
IndexReader[] readers2 = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir2, false), IndexReader.open(ramDir3, false)};
@ -181,8 +184,8 @@ public class TestDirectoryReader extends LuceneTestCase {
assertTrue(ret > 0);
}
private void addDoc(RAMDirectory ramDir1, String s, boolean create) throws IOException {
IndexWriter iw = new IndexWriter(ramDir1, new IndexWriterConfig(
private void addDoc(Random random, RAMDirectory ramDir1, String s, boolean create) throws IOException {
IndexWriter iw = new IndexWriter(ramDir1, newIndexWriterConfig(random,
TEST_VERSION_CURRENT,
new MockAnalyzer()).setOpenMode(
create ? OpenMode.CREATE : OpenMode.APPEND));

View File

@ -25,6 +25,7 @@ import java.io.StringWriter;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import junit.framework.TestSuite;
import junit.textui.TestRunner;
@ -109,11 +110,13 @@ public class TestDoc extends LuceneTestCase {
public void testIndexAndMerge() throws Exception {
StringWriter sw = new StringWriter();
PrintWriter out = new PrintWriter(sw, true);
Random random = newRandom();
Directory directory = FSDirectory.open(indexDir);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.CREATE));
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(-1));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
SegmentInfo si1 = indexDoc(writer, "test.txt");
printSegment(out, si1);
@ -141,9 +144,10 @@ public class TestDoc extends LuceneTestCase {
out = new PrintWriter(sw, true);
directory = FSDirectory.open(indexDir);
writer = new IndexWriter(directory, new IndexWriterConfig(
writer = new IndexWriter(directory, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.CREATE));
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(-1));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
si1 = indexDoc(writer, "test.txt");
printSegment(out, si1);

View File

@ -60,7 +60,7 @@ public class TestDocumentWriter extends LuceneTestCase {
public void testAddDocument() throws Exception {
Document testDoc = new Document();
DocHelper.setupDoc(testDoc);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.addDocument(testDoc);
writer.commit();
SegmentInfo info = writer.newestSegment();
@ -117,7 +117,7 @@ public class TestDocumentWriter extends LuceneTestCase {
}
};
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
doc.add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.ANALYZED));
@ -181,7 +181,7 @@ public class TestDocumentWriter extends LuceneTestCase {
}
};
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
doc.add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.ANALYZED));
@ -206,7 +206,7 @@ public class TestDocumentWriter extends LuceneTestCase {
public void testPreAnalyzedField() throws IOException {
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(),
TEST_VERSION_CURRENT, new MockAnalyzer()));
Document doc = new Document();
@ -266,7 +266,7 @@ public class TestDocumentWriter extends LuceneTestCase {
doc.add(new Field("f2", "v1", Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
doc.add(new Field("f2", "v2", Store.YES, Index.NOT_ANALYZED, TermVector.NO));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(),
TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.addDocument(doc);
writer.close();
@ -300,7 +300,7 @@ public class TestDocumentWriter extends LuceneTestCase {
doc.add(f);
doc.add(new Field("f2", "v2", Store.YES, Index.NO));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(),
TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.addDocument(doc);
writer.optimize(); // be sure to have a single segment

View File

@ -22,6 +22,7 @@ import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import org.apache.lucene.analysis.MockAnalyzer;
@ -47,7 +48,7 @@ public class TestFieldsReader extends LuceneTestCase {
private RAMDirectory dir = new RAMDirectory();
private Document testDoc = new Document();
private FieldInfos fieldInfos = null;
private Random random;
private final static String TEST_SEGMENT_NAME = "_0";
public TestFieldsReader(String s) {
@ -60,7 +61,8 @@ public class TestFieldsReader extends LuceneTestCase {
fieldInfos = new FieldInfos();
DocHelper.setupDoc(testDoc);
fieldInfos.add(testDoc);
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
random = newRandom();
IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer());
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundDocStore(false);
IndexWriter writer = new IndexWriter(dir, conf);
@ -292,7 +294,7 @@ public class TestFieldsReader extends LuceneTestCase {
FSDirectory tmpDir = FSDirectory.open(file);
assertTrue(tmpDir != null);
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE);
IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE);
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
IndexWriter writer = new IndexWriter(tmpDir, conf);
writer.addDocument(testDoc);
@ -472,7 +474,7 @@ public class TestFieldsReader extends LuceneTestCase {
try {
Directory dir = new FaultyFSDirectory(indexDir);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
for(int i=0;i<2;i++)
writer.addDocument(testDoc);

View File

@ -31,6 +31,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Bits;
import java.io.IOException;
import java.util.Random;
public class TestFilterIndexReader extends LuceneTestCase {
@ -130,8 +131,9 @@ public class TestFilterIndexReader extends LuceneTestCase {
* @throws Exception on error
*/
public void testFilterIndexReader() throws Exception {
Random random = newRandom();
RAMDirectory directory = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document d1 = new Document();
d1.add(new Field("default","one two", Field.Store.YES, Field.Index.ANALYZED));
@ -149,7 +151,7 @@ public class TestFilterIndexReader extends LuceneTestCase {
//IndexReader reader = new TestReader(IndexReader.open(directory, true));
RAMDirectory target = new MockRAMDirectory();
writer = new IndexWriter(target, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = new IndexWriter(target, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexReader reader = new TestReader(IndexReader.open(directory, true));
writer.addIndexes(reader);
writer.close();

View File

@ -66,7 +66,7 @@ public class TestFlex extends LuceneTestCase {
public void testTermOrd() throws Exception {
Directory d = new MockRAMDirectory();
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
IndexWriter w = new IndexWriter(d, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT,
new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
Document doc = new Document();
doc.add(new Field("f", "a b c", Field.Store.NO, Field.Index.ANALYZED));

View File

@ -38,12 +38,14 @@ import java.util.*;
public class TestIndexFileDeleter extends LuceneTestCase {
public void testDeleteLeftoverFiles() throws IOException {
Random random = newRandom();
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(true);
int i;
for(i=0;i<35;i++) {
addDoc(writer, i);
@ -145,7 +147,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
// Open & close a writer: it should delete the above 4
// files and nothing more:
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer.close();
String[] files2 = dir.listAll();

View File

@ -27,6 +27,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Random;
import java.util.Set;
import java.util.SortedSet;
@ -68,7 +69,15 @@ public class TestIndexReader extends LuceneTestCase
// TestRunner.run (new TestIndexReader("testDeleteReaderReaderConflict"));
// TestRunner.run (new TestIndexReader("testFilesOpenClose"));
}
private Random random;
@Override
public void setUp() throws Exception {
super.setUp();
random = newRandom();
}
public TestIndexReader(String name) {
super(name);
}
@ -80,7 +89,7 @@ public class TestIndexReader extends LuceneTestCase
commitUserData.put("foo", "fighters");
// set up writer
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(2));
for(int i=0;i<27;i++)
@ -103,7 +112,7 @@ public class TestIndexReader extends LuceneTestCase
assertTrue(c.equals(r.getIndexCommit()));
// Change the index
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer()).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(2));
for(int i=0;i<7;i++)
@ -115,7 +124,7 @@ public class TestIndexReader extends LuceneTestCase
assertFalse(r2.getIndexCommit().isOptimized());
r3.close();
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer())
.setOpenMode(OpenMode.APPEND));
writer.optimize();
@ -130,7 +139,7 @@ public class TestIndexReader extends LuceneTestCase
public void testIsCurrent() throws Exception {
RAMDirectory d = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
addDocumentWithFields(writer);
writer.close();
@ -138,13 +147,13 @@ public class TestIndexReader extends LuceneTestCase
IndexReader reader = IndexReader.open(d, false);
assertTrue(reader.isCurrent());
// modify index by adding another document:
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
// re-create index:
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
@ -160,7 +169,7 @@ public class TestIndexReader extends LuceneTestCase
public void testGetFieldNames() throws Exception {
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
addDocumentWithFields(writer);
writer.close();
@ -173,7 +182,7 @@ public class TestIndexReader extends LuceneTestCase
assertTrue(fieldNames.contains("unstored"));
reader.close();
// add more documents
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
// want to get some more segments here
int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
@ -252,7 +261,7 @@ public class TestIndexReader extends LuceneTestCase
public void testTermVectors() throws Exception {
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
// want to get some more segments here
// new termvector fields
@ -310,7 +319,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 100 documents with term : aaa
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
for (int i = 0; i < 100; i++) {
addDoc(writer, searchTerm.text());
}
@ -350,7 +359,7 @@ public class TestIndexReader extends LuceneTestCase
Directory dir = new RAMDirectory();
byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
for (int i = 0; i < 10; i++) {
addDoc(writer, "document number " + (i + 1));
@ -359,7 +368,7 @@ public class TestIndexReader extends LuceneTestCase
addDocumentWithTermVectorFields(writer);
}
writer.close();
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
Document doc = new Document();
doc.add(new Field("bin1", bin));
doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
@ -396,7 +405,7 @@ public class TestIndexReader extends LuceneTestCase
// force optimize
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
reader = IndexReader.open(dir, false);
@ -424,7 +433,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 11 documents with term : aaa
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
for (int i = 0; i < 11; i++) {
addDoc(writer, searchTerm.text());
}
@ -465,7 +474,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 11 documents with term : aaa
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.commit();
for (int i = 0; i < 11; i++) {
addDoc(writer, searchTerm.text());
@ -509,7 +518,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 1 documents with term : aaa
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
addDoc(writer, searchTerm.text());
writer.close();
@ -553,7 +562,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 1 documents with term : aaa
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
addDoc(writer, searchTerm.text());
@ -607,7 +616,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm2 = new Term("content", "bbb");
// add 100 documents with term : aaa
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
for (int i = 0; i < 100; i++) {
addDoc(writer, searchTerm.text());
}
@ -622,7 +631,7 @@ public class TestIndexReader extends LuceneTestCase
assertTermDocsCount("first reader", reader, searchTerm2, 0);
// add 100 documents with term : bbb
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
for (int i = 0; i < 100; i++) {
addDoc(writer, searchTerm2.text());
}
@ -687,7 +696,7 @@ public class TestIndexReader extends LuceneTestCase
// Create initial data set
File dirFile = new File(TEMP_DIR, "testIndex");
Directory dir = getDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
addDoc(writer, "test");
writer.close();
dir.close();
@ -697,7 +706,7 @@ public class TestIndexReader extends LuceneTestCase
dir = getDirectory();
// Now create the data set again, just as before
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
addDoc(writer, "test");
writer.close();
dir.close();
@ -723,7 +732,7 @@ public class TestIndexReader extends LuceneTestCase
else
dir = getDirectory();
assertFalse(IndexReader.indexExists(dir));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
writer.close();
@ -740,7 +749,7 @@ public class TestIndexReader extends LuceneTestCase
// incremented:
Thread.sleep(1000);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
reader = IndexReader.open(dir, false);
@ -757,7 +766,7 @@ public class TestIndexReader extends LuceneTestCase
public void testVersion() throws IOException {
Directory dir = new MockRAMDirectory();
assertFalse(IndexReader.indexExists(dir));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
addDocumentWithFields(writer);
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
writer.close();
@ -768,7 +777,7 @@ public class TestIndexReader extends LuceneTestCase
reader.close();
// modify index and check version has been
// incremented:
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
reader = IndexReader.open(dir, false);
@ -779,10 +788,10 @@ public class TestIndexReader extends LuceneTestCase
public void testLock() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
addDocumentWithFields(writer);
writer.close();
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
IndexReader reader = IndexReader.open(dir, false);
try {
reader.deleteDocument(0);
@ -799,7 +808,7 @@ public class TestIndexReader extends LuceneTestCase
public void testUndeleteAll() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
addDocumentWithFields(writer);
addDocumentWithFields(writer);
writer.close();
@ -816,7 +825,7 @@ public class TestIndexReader extends LuceneTestCase
public void testUndeleteAllAfterClose() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
addDocumentWithFields(writer);
addDocumentWithFields(writer);
writer.close();
@ -833,7 +842,7 @@ public class TestIndexReader extends LuceneTestCase
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
addDocumentWithFields(writer);
addDocumentWithFields(writer);
writer.close();
@ -870,7 +879,7 @@ public class TestIndexReader extends LuceneTestCase
// First build up a starting index:
RAMDirectory startDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(startDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
for(int i=0;i<157;i++) {
Document d = new Document();
d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
@ -1044,7 +1053,7 @@ public class TestIndexReader extends LuceneTestCase
public void testDocsOutOfOrderJIRA140() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
for(int i=0;i<11;i++) {
addDoc(writer, "aaa");
}
@ -1062,7 +1071,7 @@ public class TestIndexReader extends LuceneTestCase
}
reader.close();
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
// We must add more docs to get a new segment written
for(int i=0;i<11;i++) {
@ -1084,7 +1093,7 @@ public class TestIndexReader extends LuceneTestCase
public void testExceptionReleaseWriteLockJIRA768() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
addDoc(writer, "aaa");
writer.close();
@ -1147,7 +1156,7 @@ public class TestIndexReader extends LuceneTestCase
// add 100 documents with term : aaa
// add 100 documents with term : bbb
// add 100 documents with term : ccc
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
for (int i = 0; i < 100; i++) {
addDoc(writer, searchTerm1.text());
addDoc(writer, searchTerm2.text());
@ -1375,9 +1384,10 @@ public class TestIndexReader extends LuceneTestCase
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(2));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
for(int i=0;i<27;i++)
addDocumentWithFields(writer);
writer.close();
@ -1392,9 +1402,10 @@ public class TestIndexReader extends LuceneTestCase
assertTrue(c.equals(r.getIndexCommit()));
// Change the index
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer()).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(2));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
for(int i=0;i<7;i++)
addDocumentWithFields(writer);
writer.close();
@ -1404,7 +1415,7 @@ public class TestIndexReader extends LuceneTestCase
assertFalse(r2.getIndexCommit().isOptimized());
r2.close();
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer())
.setOpenMode(OpenMode.APPEND));
writer.optimize();
@ -1420,7 +1431,7 @@ public class TestIndexReader extends LuceneTestCase
public void testReadOnly() throws Throwable {
RAMDirectory d = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
addDocumentWithFields(writer);
writer.commit();
@ -1435,9 +1446,10 @@ public class TestIndexReader extends LuceneTestCase
// expected
}
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer())
.setOpenMode(OpenMode.APPEND));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
addDocumentWithFields(writer);
writer.close();
@ -1454,7 +1466,7 @@ public class TestIndexReader extends LuceneTestCase
// expected
}
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer())
.setOpenMode(OpenMode.APPEND));
writer.optimize();
@ -1462,6 +1474,7 @@ public class TestIndexReader extends LuceneTestCase
// Make sure reopen to a single segment is still readonly:
IndexReader r3 = r2.reopen();
assertFalse(r3 == r2);
r2.close();
assertFalse(r == r2);
@ -1474,7 +1487,7 @@ public class TestIndexReader extends LuceneTestCase
}
// Make sure write lock isn't held
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
new MockAnalyzer())
.setOpenMode(OpenMode.APPEND));
writer.close();
@ -1486,7 +1499,7 @@ public class TestIndexReader extends LuceneTestCase
// LUCENE-1474
public void testIndexReader() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("b"));
@ -1504,7 +1517,7 @@ public class TestIndexReader extends LuceneTestCase
public void testIndexReaderUnDeleteAll() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
dir.setPreventDoubleWrite(false);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("b"));
@ -1546,7 +1559,7 @@ public class TestIndexReader extends LuceneTestCase
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(2));
writer.addDocument(createDocument("a"));
@ -1571,7 +1584,7 @@ public class TestIndexReader extends LuceneTestCase
// reuse the doc values arrays in FieldCache
public void testFieldCacheReuseAfterClone() throws Exception {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document doc = new Document();
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
writer.addDocument(doc);
@ -1602,9 +1615,10 @@ public class TestIndexReader extends LuceneTestCase
// FieldCache
public void testFieldCacheReuseAfterReopen() throws Exception {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document doc = new Document();
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
writer.addDocument(doc);
writer.commit();
@ -1634,7 +1648,8 @@ public class TestIndexReader extends LuceneTestCase
// reopen switches readOnly
public void testReopenChangeReadonly() throws Exception {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(-1));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
Document doc = new Document();
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
writer.addDocument(doc);
@ -1675,7 +1690,7 @@ public class TestIndexReader extends LuceneTestCase
// LUCENE-1586: getUniqueTermCount
public void testUniqueTermCount() throws Exception {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
Document doc = new Document();
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
@ -1708,7 +1723,7 @@ public class TestIndexReader extends LuceneTestCase
// LUCENE-1609: don't load terms index
public void testNoTermsIndex() throws Throwable {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
Document doc = new Document();
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
@ -1725,7 +1740,8 @@ public class TestIndexReader extends LuceneTestCase
}
assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
writer.addDocument(doc);
writer.close();
@ -1747,7 +1763,7 @@ public class TestIndexReader extends LuceneTestCase
// LUCENE-2046
public void testPrepareCommitIsCurrent() throws Throwable {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.commit();
Document doc = new Document();

View File

@ -17,6 +17,8 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import java.util.Random;
import org.apache.lucene.index.SegmentReader.Norm;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.analysis.MockAnalyzer;
@ -33,11 +35,18 @@ import org.apache.lucene.util.LuceneTestCase;
* implemented properly
*/
public class TestIndexReaderClone extends LuceneTestCase {
Random random;
@Override
public void setUp() throws Exception {
super.setUp();
random = newRandom();
}
public void testCloneReadOnlySegmentReader() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, false);
TestIndexReaderReopen.createIndex(random, dir1, false);
IndexReader reader = IndexReader.open(dir1, false);
IndexReader readOnlyReader = reader.clone(true);
if (!isReadOnly(readOnlyReader)) {
@ -56,7 +65,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testCloneNoChangesStillReadOnly() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
IndexReader r1 = IndexReader.open(dir1, false);
IndexReader r2 = r1.clone(false);
if (!deleteWorked(1, r2)) {
@ -72,7 +81,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testCloneWriteToOrig() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
IndexReader r1 = IndexReader.open(dir1, false);
IndexReader r2 = r1.clone(false);
if (!deleteWorked(1, r1)) {
@ -88,7 +97,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testCloneWriteToClone() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
IndexReader r1 = IndexReader.open(dir1, false);
IndexReader r2 = r1.clone(false);
if (!deleteWorked(1, r2)) {
@ -111,7 +120,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testReopenSegmentReaderToMultiReader() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, false);
TestIndexReaderReopen.createIndex(random, dir1, false);
IndexReader reader1 = IndexReader.open(dir1, false);
TestIndexReaderReopen.modifyIndex(5, dir1);
@ -129,7 +138,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testCloneWriteableToReadOnly() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
IndexReader reader = IndexReader.open(dir1, false);
IndexReader readOnlyReader = reader.clone(true);
if (!isReadOnly(readOnlyReader)) {
@ -151,7 +160,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testReopenWriteableToReadOnly() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
IndexReader reader = IndexReader.open(dir1, false);
final int docCount = reader.numDocs();
assertTrue(deleteWorked(1, reader));
@ -172,7 +181,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testCloneReadOnlyToWriteable() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
IndexReader reader1 = IndexReader.open(dir1, true);
IndexReader reader2 = reader1.clone(false);
@ -195,9 +204,9 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testReadOnlyCloneAfterOptimize() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
IndexReader reader1 = IndexReader.open(dir1, false);
IndexWriter w = new IndexWriter(dir1, new IndexWriterConfig(
IndexWriter w = new IndexWriter(dir1, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
w.optimize();
w.close();
@ -222,7 +231,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testCloneReadOnlyDirectoryReader() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
IndexReader reader = IndexReader.open(dir1, false);
IndexReader readOnlyReader = reader.clone(true);
if (!isReadOnly(readOnlyReader)) {
@ -242,9 +251,9 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testParallelReader() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
final Directory dir2 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir2, true);
TestIndexReaderReopen.createIndex(random, dir2, true);
IndexReader r1 = IndexReader.open(dir1, false);
IndexReader r2 = IndexReader.open(dir2, false);
@ -292,9 +301,9 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testMixedReaders() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
final Directory dir2 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir2, true);
TestIndexReaderReopen.createIndex(random, dir2, true);
IndexReader r1 = IndexReader.open(dir1, false);
IndexReader r2 = IndexReader.open(dir2, false);
@ -307,7 +316,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testSegmentReaderUndeleteall() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, false);
TestIndexReaderReopen.createIndex(random, dir1, false);
SegmentReader origSegmentReader = SegmentReader.getOnlySegmentReader(dir1);
origSegmentReader.deleteDocument(10);
assertDelDocsRefCountEquals(1, origSegmentReader);
@ -320,7 +329,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testSegmentReaderCloseReferencing() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, false);
TestIndexReaderReopen.createIndex(random, dir1, false);
SegmentReader origSegmentReader = SegmentReader.getOnlySegmentReader(dir1);
origSegmentReader.deleteDocument(1);
origSegmentReader.setNorm(4, "field1", 0.5f);
@ -339,7 +348,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testSegmentReaderDelDocsReferenceCounting() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, false);
TestIndexReaderReopen.createIndex(random, dir1, false);
IndexReader origReader = IndexReader.open(dir1, false);
SegmentReader origSegmentReader = SegmentReader.getOnlySegmentReader(origReader);
@ -402,7 +411,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
// LUCENE-1648
public void testCloneWithDeletes() throws Throwable {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, false);
TestIndexReaderReopen.createIndex(random, dir1, false);
IndexReader origReader = IndexReader.open(dir1, false);
origReader.deleteDocument(1);
@ -419,7 +428,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
// LUCENE-1648
public void testCloneWithSetNorm() throws Throwable {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, false);
TestIndexReaderReopen.createIndex(random, dir1, false);
IndexReader orig = IndexReader.open(dir1, false);
orig.setNorm(1, "field1", 17.0f);
final byte encoded = Similarity.getDefault().encodeNormValue(17.0f);
@ -449,7 +458,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testCloneSubreaders() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, true);
TestIndexReaderReopen.createIndex(random, dir1, true);
IndexReader reader = IndexReader.open(dir1, false);
reader.deleteDocument(1); // acquire write lock
IndexReader[] subs = reader.getSequentialSubReaders();
@ -468,7 +477,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testLucene1516Bug() throws Exception {
final Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, false);
TestIndexReaderReopen.createIndex(random, dir1, false);
IndexReader r1 = IndexReader.open(dir1, false);
r1.incRef();
IndexReader r2 = r1.clone(false);
@ -485,7 +494,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testCloseStoredFields() throws Exception {
final Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
((LogMergePolicy) w.getConfig().getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) w.getConfig().getMergePolicy()).setUseCompoundDocStore(false);

View File

@ -20,6 +20,7 @@ package org.apache.lucene.index;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.analysis.Analyzer;
@ -83,6 +84,7 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
*/
public void testNorms() throws IOException {
// test with a single index: index1
Random random = newRandom();
File indexDir1 = new File(TEMP_DIR, "lucenetestindex1");
Directory dir1 = FSDirectory.open(indexDir1);
IndexWriter.unlock(dir1);
@ -90,8 +92,8 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
norms = new ArrayList<Float>();
modifiedNorms = new ArrayList<Float>();
createIndex(dir1);
doTestNorms(dir1);
createIndex(random, dir1);
doTestNorms(random, dir1);
// test with a single index: index2
ArrayList<Float> norms1 = norms;
@ -105,15 +107,15 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
File indexDir2 = new File(TEMP_DIR, "lucenetestindex2");
Directory dir2 = FSDirectory.open(indexDir2);
createIndex(dir2);
doTestNorms(dir2);
createIndex(random, dir2);
doTestNorms(random, dir2);
// add index1 and index2 to a third index: index3
File indexDir3 = new File(TEMP_DIR, "lucenetestindex3");
Directory dir3 = FSDirectory.open(indexDir3);
createIndex(dir3);
IndexWriter iw = new IndexWriter(dir3, new IndexWriterConfig(
createIndex(random, dir3);
IndexWriter iw = new IndexWriter(dir3, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(5));
((LogMergePolicy) iw.getConfig().getMergePolicy()).setMergeFactor(3);
@ -129,10 +131,10 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
// test with index3
verifyIndex(dir3);
doTestNorms(dir3);
doTestNorms(random, dir3);
// now with optimize
iw = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT,
iw = new IndexWriter(dir3, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
anlzr).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(5));
((LogMergePolicy) iw.getConfig().getMergePolicy()).setMergeFactor(3);
iw.optimize();
@ -145,8 +147,8 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
}
// try cloning and reopening the norms
private void doTestNorms(Directory dir) throws IOException {
addDocs(dir, 12, true);
private void doTestNorms(Random random, Directory dir) throws IOException {
addDocs(random, dir, 12, true);
IndexReader ir = IndexReader.open(dir, false);
verifyIndex(ir);
modifyNormsForF1(ir);
@ -165,7 +167,7 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
public void testNormsClose() throws IOException {
Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, false);
TestIndexReaderReopen.createIndex(newRandom(), dir1, false);
SegmentReader reader1 = SegmentReader.getOnlySegmentReader(dir1);
reader1.norms("field1");
Norm r1norm = reader1.norms.get("field1");
@ -181,7 +183,7 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
public void testNormsRefCounting() throws IOException {
Directory dir1 = new MockRAMDirectory();
TestIndexReaderReopen.createIndex(dir1, false);
TestIndexReaderReopen.createIndex(newRandom(), dir1, false);
IndexReader reader1 = IndexReader.open(dir1, false);
IndexReader reader2C = (IndexReader) reader1.clone();
@ -232,8 +234,8 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
dir1.close();
}
private void createIndex(Directory dir) throws IOException {
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
private void createIndex(Random random, Directory dir) throws IOException {
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
@ -284,9 +286,9 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
}
}
private void addDocs(Directory dir, int ndocs, boolean compound)
private void addDocs(Random random, Directory dir, int ndocs, boolean compound)
throws IOException {
IndexWriterConfig conf = new IndexWriterConfig(
IndexWriterConfig conf = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(5).setSimilarity(similarityOne);
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();

View File

@ -52,8 +52,9 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testReopen() throws Exception {
final Directory dir1 = new MockRAMDirectory();
Random random = newRandom();
createIndex(dir1, false);
createIndex(random, dir1, false);
performDefaultTests(new TestReopen() {
@Override
@ -71,7 +72,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
final Directory dir2 = new MockRAMDirectory();
createIndex(dir2, true);
createIndex(random, dir2, true);
performDefaultTests(new TestReopen() {
@Override
@ -89,10 +90,11 @@ public class TestIndexReaderReopen extends LuceneTestCase {
}
public void testParallelReaderReopen() throws Exception {
Random random = newRandom();
final Directory dir1 = new MockRAMDirectory();
createIndex(dir1, true);
createIndex(random, dir1, true);
final Directory dir2 = new MockRAMDirectory();
createIndex(dir2, true);
createIndex(random, dir2, true);
performDefaultTests(new TestReopen() {
@ -115,9 +117,9 @@ public class TestIndexReaderReopen extends LuceneTestCase {
dir2.close();
final Directory dir3 = new MockRAMDirectory();
createIndex(dir3, true);
createIndex(random, dir3, true);
final Directory dir4 = new MockRAMDirectory();
createIndex(dir4, true);
createIndex(random, dir4, true);
performTestsWithExceptionInReopen(new TestReopen() {
@ -150,26 +152,26 @@ public class TestIndexReaderReopen extends LuceneTestCase {
// try this once with reopen once recreate, on both RAMDir and FSDir.
public void testCommitReopenFS () throws IOException {
Directory dir = FSDirectory.open(indexDir);
doTestReopenWithCommit(dir, true);
doTestReopenWithCommit(newRandom(), dir, true);
dir.close();
}
public void testCommitRecreateFS () throws IOException {
Directory dir = FSDirectory.open(indexDir);
doTestReopenWithCommit(dir, false);
doTestReopenWithCommit(newRandom(), dir, false);
dir.close();
}
public void testCommitReopenRAM () throws IOException {
Directory dir = new MockRAMDirectory();
doTestReopenWithCommit(dir, true);
doTestReopenWithCommit(newRandom(), dir, true);
dir.close();
}
public void testCommitRecreateRAM () throws IOException {
Directory dir = new MockRAMDirectory();
doTestReopenWithCommit(dir, false);
doTestReopenWithCommit(newRandom(), dir, false);
}
private void doTestReopenWithCommit (Directory dir, boolean withReopen) throws IOException {
IndexWriter iwriter = new IndexWriter(dir, new IndexWriterConfig(
private void doTestReopenWithCommit (Random random, Directory dir, boolean withReopen) throws IOException {
IndexWriter iwriter = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(
OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()));
iwriter.commit();
@ -213,12 +215,12 @@ public class TestIndexReaderReopen extends LuceneTestCase {
}
public void testMultiReaderReopen() throws Exception {
Random random = newRandom();
final Directory dir1 = new MockRAMDirectory();
createIndex(dir1, true);
createIndex(random, dir1, true);
final Directory dir2 = new MockRAMDirectory();
createIndex(dir2, true);
createIndex(random, dir2, true);
performDefaultTests(new TestReopen() {
@ -241,10 +243,10 @@ public class TestIndexReaderReopen extends LuceneTestCase {
dir2.close();
final Directory dir3 = new MockRAMDirectory();
createIndex(dir3, true);
createIndex(random, dir3, true);
final Directory dir4 = new MockRAMDirectory();
createIndex(dir4, true);
createIndex(random, dir4, true);
performTestsWithExceptionInReopen(new TestReopen() {
@ -270,16 +272,17 @@ public class TestIndexReaderReopen extends LuceneTestCase {
}
public void testMixedReaders() throws Exception {
Random random = newRandom();
final Directory dir1 = new MockRAMDirectory();
createIndex(dir1, true);
createIndex(random, dir1, true);
final Directory dir2 = new MockRAMDirectory();
createIndex(dir2, true);
createIndex(random, dir2, true);
final Directory dir3 = new MockRAMDirectory();
createIndex(dir3, false);
createIndex(random, dir3, false);
final Directory dir4 = new MockRAMDirectory();
createIndex(dir4, true);
createIndex(random, dir4, true);
final Directory dir5 = new MockRAMDirectory();
createIndex(dir5, false);
createIndex(random, dir5, false);
performDefaultTests(new TestReopen() {
@ -357,10 +360,10 @@ public class TestIndexReaderReopen extends LuceneTestCase {
}
public void testReferenceCounting() throws IOException {
Random random = newRandom();
for (int mode = 0; mode < 4; mode++) {
Directory dir1 = new MockRAMDirectory();
createIndex(dir1, true);
createIndex(random, dir1, true);
IndexReader reader0 = IndexReader.open(dir1, false);
assertRefCountEquals(1, reader0);
@ -463,11 +466,12 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testReferenceCountingMultiReader() throws IOException {
Random random = newRandom();
for (int mode = 0; mode <=1; mode++) {
Directory dir1 = new MockRAMDirectory();
createIndex(dir1, false);
createIndex(random, dir1, false);
Directory dir2 = new MockRAMDirectory();
createIndex(dir2, true);
createIndex(random, dir2, true);
IndexReader reader1 = IndexReader.open(dir1, false);
assertRefCountEquals(1, reader1);
@ -534,11 +538,12 @@ public class TestIndexReaderReopen extends LuceneTestCase {
}
public void testReferenceCountingParallelReader() throws IOException {
Random random = newRandom();
for (int mode = 0; mode <=1; mode++) {
Directory dir1 = new MockRAMDirectory();
createIndex(dir1, false);
createIndex(random, dir1, false);
Directory dir2 = new MockRAMDirectory();
createIndex(dir2, true);
createIndex(random, dir2, true);
IndexReader reader1 = IndexReader.open(dir1, false);
assertRefCountEquals(1, reader1);
@ -610,7 +615,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testNormsRefCounting() throws IOException {
Directory dir1 = new MockRAMDirectory();
createIndex(dir1, false);
createIndex(newRandom(), dir1, false);
IndexReader reader1 = IndexReader.open(dir1, false);
SegmentReader segmentReader1 = SegmentReader.getOnlySegmentReader(reader1);
@ -701,8 +706,8 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testThreadSafety() throws Exception {
final Directory dir = new MockRAMDirectory();
final int n = 30 * RANDOM_MULTIPLIER;
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
Random random = newRandom();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()));
for (int i = 0; i < n; i++) {
writer.addDocument(createDocument(i, 3));
@ -738,7 +743,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
final List<ReaderCouple> readers = Collections.synchronizedList(new ArrayList<ReaderCouple>());
IndexReader firstReader = IndexReader.open(dir, false);
IndexReader reader = firstReader;
final Random rnd = newRandom();
final Random rnd = random;
ReaderThread[] threads = new ReaderThread[n];
final Set<IndexReader> readersToClose = Collections.synchronizedSet(new HashSet<IndexReader>());
@ -946,9 +951,9 @@ public class TestIndexReaderReopen extends LuceneTestCase {
}
}
public static void createIndex(Directory dir, boolean multiSegment) throws IOException {
public static void createIndex(Random random, Directory dir, boolean multiSegment) throws IOException {
IndexWriter.unlock(dir);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMergePolicy(new LogDocMergePolicy()));
@ -1101,7 +1106,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testCloseOrig() throws Throwable {
Directory dir = new MockRAMDirectory();
createIndex(dir, false);
createIndex(newRandom(), dir, false);
IndexReader r1 = IndexReader.open(dir, false);
IndexReader r2 = IndexReader.open(dir, false);
r2.deleteDocument(0);
@ -1122,7 +1127,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testDeletes() throws Throwable {
Directory dir = new MockRAMDirectory();
createIndex(dir, false); // Create an index with a bunch of docs (1 segment)
createIndex(newRandom(), dir, false); // Create an index with a bunch of docs (1 segment)
modifyIndex(0, dir); // Get delete bitVector on 1st segment
modifyIndex(5, dir); // Add a doc (2 segments)
@ -1155,7 +1160,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testDeletes2() throws Throwable {
Directory dir = new MockRAMDirectory();
createIndex(dir, false);
createIndex(newRandom(), dir, false);
// Get delete bitVector
modifyIndex(0, dir);
IndexReader r1 = IndexReader.open(dir, false);
@ -1191,8 +1196,9 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testReopenOnCommit() throws Throwable {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(new KeepAllCommits()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(),
TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(new KeepAllCommits()).setMaxBufferedDocs(-1));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
for(int i=0;i<4;i++) {
Document doc = new Document();
doc.add(new Field("id", ""+i, Field.Store.NO, Field.Index.NOT_ANALYZED));

File diff suppressed because it is too large Load Diff

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
*/
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
@ -32,7 +33,14 @@ import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
public class TestIndexWriterDelete extends LuceneTestCase {
Random random;
@Override
public void setUp() throws Exception {
super.setUp();
random = newRandom();
}
// test the simple case
public void testSimpleCase() throws IOException {
String[] keywords = { "1", "2" };
@ -42,7 +50,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
String[] text = { "Amsterdam", "Venice" };
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
for (int i = 0; i < keywords.length; i++) {
@ -77,7 +85,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testNonRAMDelete() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
@ -111,7 +119,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testMaxBufferedDeletes() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
writer.deleteDocuments(new Term("foobar", "1"));
writer.deleteDocuments(new Term("foobar", "1"));
@ -125,7 +133,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testRAMDeletes() throws IOException {
for(int t=0;t<2;t++) {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4)
.setMaxBufferedDeleteTerms(4));
@ -166,7 +174,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test when delete terms apply to both disk and ram segments
public void testBothDeletes() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100)
.setMaxBufferedDeleteTerms(100));
@ -198,7 +206,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test that batched delete terms are flushed together
public void testBatchDeletes() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
@ -241,7 +249,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test deleteAll()
public void testDeleteAll() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
@ -287,7 +295,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test rollback of deleteAll()
public void testDeleteAllRollback() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
@ -324,7 +332,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test deleteAll() w/ near real-time reader
public void testDeleteAllNRT() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
@ -414,7 +422,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// First build up a starting index:
MockRAMDirectory startDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(startDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
for (int i = 0; i < 157; i++) {
Document d = new Document();
d.add(new Field("id", Integer.toString(i), Field.Store.YES,
@ -436,9 +444,13 @@ public class TestIndexWriterDelete extends LuceneTestCase {
while (!done) {
MockRAMDirectory dir = new MockRAMDirectory(startDir);
dir.setPreventDoubleWrite(false);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(1000)
.setMaxBufferedDeleteTerms(1000));
IndexWriter modifier = new IndexWriter(dir,
newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(1000)
.setMaxBufferedDeleteTerms(1000)
.setMergeScheduler(new ConcurrentMergeScheduler()));
((ConcurrentMergeScheduler) modifier.getConfig().getMergeScheduler()).setSuppressExceptions();
// For each disk size, first try to commit against
// dir that will hit random IOExceptions & disk
@ -642,8 +654,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
String[] text = { "Amsterdam", "Venice" };
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2));
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false));
LogMergePolicy lmp = (LogMergePolicy) modifier.getConfig().getMergePolicy();
lmp.setUseCompoundFile(true);
lmp.setUseCompoundDocStore(true);
@ -694,11 +706,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// in the ac case, this will be when writing the new segments
// files so we really don't need the new doc, but it's harmless
// in the !ac case, a new segments file won't be created but in
// this case, creation of the cfs file happens next so we need
// the doc (to test that it's okay that we don't lose deletes if
// failing while creating the cfs file)
// a new segments file won't be created but in this
// case, creation of the cfs file happens next so we
// need the doc (to test that it's okay that we don't
// lose deletes if failing while creating the cfs file)
boolean failed = false;
try {
modifier.commit();
@ -753,7 +764,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
String[] text = { "Amsterdam", "Venice" };
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
modifier.commit();
dir.failOn(failure.reset());

View File

@ -129,9 +129,11 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
public void testRandomExceptions() throws Throwable {
Random random = newRandom();
MockRAMDirectory dir = new MockRAMDirectory();
MockIndexWriter writer = new MockIndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.1));
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
.setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler()));
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10);
writer.commit();
@ -169,7 +171,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testRandomExceptionsThreads() throws Throwable {
MockRAMDirectory dir = new MockRAMDirectory();
MockIndexWriter writer = new MockIndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.2));
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer())
.setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler()));
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10);
writer.commit();

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.util.LuceneTestCase;
import java.io.IOException;
import java.util.Random;
public class TestIndexWriterMerging extends LuceneTestCase
@ -34,20 +35,20 @@ public class TestIndexWriterMerging extends LuceneTestCase
* change the index order of documents.
*/
public void testLucene() throws IOException {
Random random = newRandom();
int num=100;
Directory indexA = new MockRAMDirectory();
Directory indexB = new MockRAMDirectory();
fillIndex(indexA, 0, num);
fillIndex(random, indexA, 0, num);
boolean fail = verifyIndex(indexA, 0);
if (fail)
{
fail("Index a is invalid");
}
fillIndex(indexB, num, num);
fillIndex(random, indexB, num, num);
fail = verifyIndex(indexB, num);
if (fail)
{
@ -56,7 +57,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
Directory merged = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(merged, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(merged, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
writer.addIndexes(new Directory[]{indexA, indexB});
@ -90,9 +91,9 @@ public class TestIndexWriterMerging extends LuceneTestCase
return fail;
}
private void fillIndex(Directory dir, int start, int numDocs) throws IOException {
private void fillIndex(Random random, Directory dir, int start, int numDocs) throws IOException {
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT,
new MockAnalyzer())
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2));

View File

@ -40,26 +40,16 @@ import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ThreadInterruptedException;
import java.util.concurrent.atomic.AtomicInteger;
public class TestIndexWriterReader extends LuceneTestCase {
static PrintStream infoStream;
public static class HeavyAtomicInt {
private int value;
public HeavyAtomicInt(int start) {
value = start;
}
public synchronized int addAndGet(int inc) {
value += inc;
return value;
}
public synchronized int incrementAndGet() {
value++;
return value;
}
public synchronized int intValue() {
return value;
}
private Random random;
@Override
public void setUp() throws Exception {
super.setUp();
random = newRandom();
}
public static int count(Term t, IndexReader r) throws IOException {
@ -81,7 +71,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean optimize = true;
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
// create the index
createIndexNoClose(!optimize, "index1", writer);
@ -115,7 +105,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
assertEquals(0, count(new Term("id", id10), r3));
assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document doc = new Document();
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
writer.addDocument(doc);
@ -142,7 +132,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean optimize = false;
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.setInfoStream(infoStream);
// create the index
createIndexNoClose(!optimize, "index1", writer);
@ -150,7 +140,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// create a 2nd index
Directory dir2 = new MockRAMDirectory();
IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
writer2.setInfoStream(infoStream);
createIndexNoClose(!optimize, "index2", writer2);
writer2.close();
@ -187,12 +177,12 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean optimize = false;
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.setInfoStream(infoStream);
// create a 2nd index
Directory dir2 = new MockRAMDirectory();
IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
writer2.setInfoStream(infoStream);
createIndexNoClose(!optimize, "index2", writer2);
writer2.close();
@ -220,7 +210,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean optimize = true;
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setReaderTermsIndexDivisor(2));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setReaderTermsIndexDivisor(2));
writer.setInfoStream(infoStream);
// create the index
createIndexNoClose(!optimize, "index1", writer);
@ -258,7 +248,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.close();
// reopen the writer to verify the delete made it to the directory
writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.setInfoStream(infoStream);
IndexReader w2r1 = writer.getReader();
assertEquals(0, count(new Term("id", id10), w2r1));
@ -272,7 +262,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
int numDirs = 3;
Directory mainDir = new MockRAMDirectory();
IndexWriter mainWriter = new IndexWriter(mainDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
mainWriter.setInfoStream(infoStream);
AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter);
addDirThreads.launchThreads(numDirs);
@ -308,14 +298,14 @@ public class TestIndexWriterReader extends LuceneTestCase {
final List<Throwable> failures = new ArrayList<Throwable>();
IndexReader[] readers;
boolean didClose = false;
HeavyAtomicInt count = new HeavyAtomicInt(0);
HeavyAtomicInt numaddIndexes = new HeavyAtomicInt(0);
AtomicInteger count = new AtomicInteger(0);
AtomicInteger numaddIndexes = new AtomicInteger(0);
public AddDirectoriesThreads(int numDirs, IndexWriter mainWriter) throws Throwable {
this.numDirs = numDirs;
this.mainWriter = mainWriter;
addDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(addDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
for (int i = 0; i < NUM_INIT_DOCS; i++) {
Document doc = createDocument(i, "addindex", 4);
writer.addDocument(doc);
@ -421,7 +411,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
*/
public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception {
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.setInfoStream(infoStream);
IndexReader r1 = writer.getReader();
assertEquals(0, r1.maxDoc());
@ -458,7 +448,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.close();
// test whether the changes made it to the directory
writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexReader w2r1 = writer.getReader();
// insure the deletes were actually flushed to the directory
assertEquals(200, w2r1.maxDoc());
@ -495,9 +485,9 @@ public class TestIndexWriterReader extends LuceneTestCase {
* //} //writer.deleteDocuments(term); td.close(); return doc; }
*/
public static void createIndex(Directory dir1, String indexName,
public static void createIndex(Random random, Directory dir1, String indexName,
boolean multiSegment) throws IOException {
IndexWriter w = new IndexWriter(dir1, new IndexWriterConfig(
IndexWriter w = new IndexWriter(dir1, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMergePolicy(new LogDocMergePolicy()));
for (int i = 0; i < 100; i++) {
@ -534,9 +524,9 @@ public class TestIndexWriterReader extends LuceneTestCase {
Directory dir1 = new MockRAMDirectory();
// Enroll warmer
MyWarmer warmer = new MyWarmer();
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(2).setMergedSegmentWarmer(warmer));
.setMaxBufferedDocs(2).setMergedSegmentWarmer(warmer).setMergeScheduler(new ConcurrentMergeScheduler()));
writer.setInfoStream(infoStream);
// create the index
@ -567,7 +557,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testAfterCommit() throws Exception {
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new ConcurrentMergeScheduler()));
writer.commit();
writer.setInfoStream(infoStream);
@ -600,7 +590,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Make sure reader remains usable even if IndexWriter closes
public void testAfterClose() throws Exception {
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.setInfoStream(infoStream);
// create the index
@ -629,7 +619,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Stress test reopen during addIndexes
public void testDuringAddIndexes() throws Exception {
Directory dir1 = new MockRAMDirectory();
final IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
final IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.setInfoStream(infoStream);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
@ -706,7 +696,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Stress test reopen during add/delete
public void testDuringAddDelete() throws Exception {
Directory dir1 = new MockRAMDirectory();
final IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
final IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
writer.setInfoStream(infoStream);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
@ -786,7 +776,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testExpungeDeletes() throws Throwable {
Directory dir = new MockRAMDirectory();
final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document doc = new Document();
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
@ -810,7 +800,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testDeletesNumDocs() throws Throwable {
Directory dir = new MockRAMDirectory();
final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document doc = new Document();
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
@ -840,7 +830,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testEmptyIndex() throws Exception {
// Ensures that getReader works on an empty index, which hasn't been committed yet.
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexReader r = w.getReader();
assertEquals(0, r.numDocs());
r.close();
@ -849,8 +839,9 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testSegmentWarmer() throws Exception {
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(2).setReaderPooling(true));
((LogMergePolicy) w.getMergePolicy()).setMergeFactor(10);
w.setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() {
public void warm(IndexReader r) throws IOException {
final IndexSearcher s = new IndexSearcher(r);

View File

@ -71,7 +71,7 @@ public class TestLazyBug extends LuceneTestCase {
Directory dir = new RAMDirectory();
try {
Random r = newRandom();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(r,
TEST_VERSION_CURRENT, new MockAnalyzer()));
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setUseCompoundFile(false);

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
*/
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
@ -57,11 +58,11 @@ public class TestLazyProxSkipping extends LuceneTestCase {
}
}
private void createIndex(int numHits) throws IOException {
private void createIndex(Random random, int numHits) throws IOException {
int numDocs = 500;
Directory directory = new SeekCountingDirectory();
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
for (int i = 0; i < numDocs; i++) {
@ -99,8 +100,8 @@ public class TestLazyProxSkipping extends LuceneTestCase {
return this.searcher.search(pq, null, 1000).scoreDocs;
}
private void performTest(int numHits) throws IOException {
createIndex(numHits);
private void performTest(Random random, int numHits) throws IOException {
createIndex(random, numHits);
this.seeksCounter = 0;
ScoreDoc[] hits = search();
// verify that the right number of docs was found
@ -113,13 +114,14 @@ public class TestLazyProxSkipping extends LuceneTestCase {
public void testLazySkipping() throws IOException {
// test whether only the minimum amount of seeks() are performed
performTest(5);
performTest(10);
Random random = newRandom();
performTest(random, 5);
performTest(random, 10);
}
public void testSeek() throws IOException {
Directory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()));
for (int i = 0; i < 10; i++) {
Document doc = new Document();
doc.add(new Field(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED));

View File

@ -33,7 +33,7 @@ public class TestMultiFields extends LuceneTestCase {
for (int iter = 0; iter < num; iter++) {
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(r, TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
Map<BytesRef,List<Integer>> docs = new HashMap<BytesRef,List<Integer>>();
Set<Integer> deleted = new HashSet<Integer>();
@ -132,7 +132,7 @@ public class TestMultiFields extends LuceneTestCase {
public void testSeparateEnums() throws Exception {
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()));
Document d = new Document();
d.add(new Field("f", "j", Field.Store.NO, Field.Index.NOT_ANALYZED));
w.addDocument(d);

View File

@ -33,6 +33,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.util.BytesRef;
/**
@ -56,7 +57,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
public void testSimpleSkip() throws IOException {
Directory dir = new CountingRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new PayloadAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
Term term = new Term("test", "a");
for (int i = 0; i < 5000; i++) {
Document d1 = new Document();
@ -88,7 +89,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
public void checkSkipTo(DocsAndPositionsEnum tp, int target, int maxCounter) throws IOException {
tp.advance(target);
if (maxCounter < counter) {
fail("Too many bytes read: " + counter);
fail("Too many bytes read: " + counter + " vs " + maxCounter);
}
assertEquals("Wrong document " + tp.docID() + " after skipTo target " + target, target, tp.docID());

View File

@ -18,21 +18,21 @@ package org.apache.lucene.index;
*/
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.TestIndexWriterReader.HeavyAtomicInt;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
public class TestNRTReaderWithThreads extends LuceneTestCase {
Random random = new Random();
HeavyAtomicInt seq = new HeavyAtomicInt(1);
AtomicInteger seq = new AtomicInteger(1);
public void testIndexing() throws Exception {
Directory mainDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(mainDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
IndexWriter writer = new IndexWriter(mainDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);

View File

@ -27,7 +27,7 @@ public class TestNewestSegment extends LuceneTestCase {
public void testNewestSegment() throws Exception {
RAMDirectory directory = new RAMDirectory();
Analyzer analyzer = new MockAnalyzer();
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()));
assertNull(writer.newestSegment());
}
}

View File

@ -74,7 +74,7 @@ public class TestNoDeletionPolicy extends LuceneTestCaseJ4 {
@Test
public void testAllCommitsRemain() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(),
TEST_VERSION_CURRENT, new MockAnalyzer())
.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
for (int i = 0; i < 10; i++) {

View File

@ -19,6 +19,7 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Random;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
@ -74,13 +75,14 @@ public class TestNorms extends LuceneTestCase {
* Including optimize.
*/
public void testNorms() throws IOException {
Random random = newRandom();
Directory dir1 = new RAMDirectory();
norms = new ArrayList<Float>();
modifiedNorms = new ArrayList<Float>();
createIndex(dir1);
doTestNorms(dir1);
createIndex(random, dir1);
doTestNorms(random, dir1);
// test with a single index: index2
ArrayList<Float> norms1 = norms;
@ -93,14 +95,14 @@ public class TestNorms extends LuceneTestCase {
Directory dir2 = new RAMDirectory();
createIndex(dir2);
doTestNorms(dir2);
createIndex(random, dir2);
doTestNorms(random, dir2);
// add index1 and index2 to a third index: index3
Directory dir3 = new RAMDirectory();
createIndex(dir3);
IndexWriter iw = new IndexWriter(dir3, new IndexWriterConfig(
createIndex(random, dir3);
IndexWriter iw = new IndexWriter(dir3, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(5));
((LogMergePolicy) iw.getConfig().getMergePolicy()).setMergeFactor(3);
@ -116,10 +118,10 @@ public class TestNorms extends LuceneTestCase {
// test with index3
verifyIndex(dir3);
doTestNorms(dir3);
doTestNorms(random, dir3);
// now with optimize
iw = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT,
iw = new IndexWriter(dir3, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
anlzr).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(5));
((LogMergePolicy) iw.getConfig().getMergePolicy()).setMergeFactor(3);
iw.optimize();
@ -131,21 +133,21 @@ public class TestNorms extends LuceneTestCase {
dir3.close();
}
private void doTestNorms(Directory dir) throws IOException {
private void doTestNorms(Random random, Directory dir) throws IOException {
for (int i=0; i<5; i++) {
addDocs(dir,12,true);
addDocs(random, dir,12,true);
verifyIndex(dir);
modifyNormsForF1(dir);
verifyIndex(dir);
addDocs(dir,12,false);
addDocs(random, dir,12,false);
verifyIndex(dir);
modifyNormsForF1(dir);
verifyIndex(dir);
}
}
private void createIndex(Directory dir) throws IOException {
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
private void createIndex(Random random, Directory dir) throws IOException {
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();
@ -189,8 +191,8 @@ public class TestNorms extends LuceneTestCase {
ir.close();
}
private void addDocs(Directory dir, int ndocs, boolean compound) throws IOException {
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
private void addDocs(Random random, Directory dir, int ndocs, boolean compound) throws IOException {
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
LogMergePolicy lmp = (LogMergePolicy) iw.getConfig().getMergePolicy();

View File

@ -61,7 +61,7 @@ public class TestOmitTf extends LuceneTestCase {
public void testOmitTermFreqAndPositions() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new MockAnalyzer();
IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, analyzer));
Document d = new Document();
// this field will have Tf
@ -108,7 +108,7 @@ public class TestOmitTf extends LuceneTestCase {
public void testMixedMerge() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new MockAnalyzer();
IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(),
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
Document d = new Document();
@ -161,7 +161,7 @@ public class TestOmitTf extends LuceneTestCase {
public void testMixedRAM() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new MockAnalyzer();
IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(),
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
Document d = new Document();
@ -209,7 +209,7 @@ public class TestOmitTf extends LuceneTestCase {
public void testNoPrxFile() throws Throwable {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new MockAnalyzer();
IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(),
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3));
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setMergeFactor(2);
@ -242,7 +242,7 @@ public class TestOmitTf extends LuceneTestCase {
public void testBasic() throws Exception {
Directory dir = new MockRAMDirectory();
Analyzer analyzer = new MockAnalyzer();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(),
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(2)
.setSimilarity(new SimpleSimilarity()));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);

View File

@ -20,6 +20,7 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
@ -41,12 +42,14 @@ public class TestParallelReader extends LuceneTestCase {
private Searcher parallel;
private Searcher single;
private Random random;
@Override
protected void setUp() throws Exception {
super.setUp();
single = single();
parallel = parallel();
random = newRandom();
single = single(random);
parallel = parallel(random);
}
public void testQueries() throws Exception {
@ -66,8 +69,8 @@ public class TestParallelReader extends LuceneTestCase {
}
public void testFieldNames() throws Exception {
Directory dir1 = getDir1();
Directory dir2 = getDir2();
Directory dir1 = getDir1(random);
Directory dir2 = getDir2(random);
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(dir1, false));
pr.add(IndexReader.open(dir2, false));
@ -80,8 +83,8 @@ public class TestParallelReader extends LuceneTestCase {
}
public void testDocument() throws IOException {
Directory dir1 = getDir1();
Directory dir2 = getDir2();
Directory dir1 = getDir1(random);
Directory dir2 = getDir2(random);
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(dir1, false));
pr.add(IndexReader.open(dir2, false));
@ -102,11 +105,11 @@ public class TestParallelReader extends LuceneTestCase {
public void testIncompatibleIndexes() throws IOException {
// two documents:
Directory dir1 = getDir1();
Directory dir1 = getDir1(random);
// one document only:
Directory dir2 = new MockRAMDirectory();
IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
w2.addDocument(d3);
@ -123,8 +126,8 @@ public class TestParallelReader extends LuceneTestCase {
}
public void testIsCurrent() throws IOException {
Directory dir1 = getDir1();
Directory dir2 = getDir2();
Directory dir1 = getDir1(random);
Directory dir2 = getDir2(random);
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(dir1, false));
pr.add(IndexReader.open(dir2, false));
@ -147,17 +150,19 @@ public class TestParallelReader extends LuceneTestCase {
}
public void testIsOptimized() throws IOException {
Directory dir1 = getDir1();
Directory dir2 = getDir2();
Directory dir1 = getDir1(random);
Directory dir2 = getDir2(random);
// add another document to ensure that the indexes are not optimized
IndexWriter modifier = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter modifier = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
((LogMergePolicy) modifier.getMergePolicy()).setMergeFactor(10);
Document d = new Document();
d.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
modifier.addDocument(d);
modifier.close();
modifier = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
modifier = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
((LogMergePolicy) modifier.getMergePolicy()).setMergeFactor(10);
d = new Document();
d.add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
modifier.addDocument(d);
@ -170,7 +175,7 @@ public class TestParallelReader extends LuceneTestCase {
assertFalse(pr.isOptimized());
pr.close();
modifier = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
modifier = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
modifier.optimize();
modifier.close();
@ -182,7 +187,7 @@ public class TestParallelReader extends LuceneTestCase {
pr.close();
modifier = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
modifier = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
modifier.optimize();
modifier.close();
@ -211,9 +216,9 @@ public class TestParallelReader extends LuceneTestCase {
}
// Fields 1-4 indexed together:
private Searcher single() throws IOException {
private Searcher single(Random random) throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@ -232,18 +237,18 @@ public class TestParallelReader extends LuceneTestCase {
}
// Fields 1 & 2 in one index, 3 & 4 in other, with ParallelReader:
private Searcher parallel() throws IOException {
Directory dir1 = getDir1();
Directory dir2 = getDir2();
private Searcher parallel(Random random) throws IOException {
Directory dir1 = getDir1(random);
Directory dir2 = getDir2(random);
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(dir1, false));
pr.add(IndexReader.open(dir2, false));
return new IndexSearcher(pr);
}
private Directory getDir1() throws IOException {
private Directory getDir1(Random random) throws IOException {
Directory dir1 = new MockRAMDirectory();
IndexWriter w1 = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@ -256,9 +261,9 @@ public class TestParallelReader extends LuceneTestCase {
return dir1;
}
private Directory getDir2() throws IOException {
private Directory getDir2(Random random) throws IOException {
Directory dir2 = new RAMDirectory();
IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
d3.add(new Field("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
*/
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
@ -46,15 +47,16 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
* @throws IOException
*/
public void testEmptyIndex() throws IOException {
Random random = newRandom();
RAMDirectory rd1 = new MockRAMDirectory();
IndexWriter iw = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
iw.close();
RAMDirectory rd2 = new MockRAMDirectory(rd1);
RAMDirectory rdOut = new MockRAMDirectory();
IndexWriter iwOut = new IndexWriter(rdOut, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(rd1,true));
pr.add(IndexReader.open(rd2,true));
@ -77,8 +79,9 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
*/
public void testEmptyIndexWithVectors() throws IOException {
RAMDirectory rd1 = new MockRAMDirectory();
Random random = newRandom();
{
IndexWriter iw = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document doc = new Document();
doc.add(new Field("test", "", Store.NO, Index.ANALYZED,
TermVector.YES));
@ -92,14 +95,14 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
ir.deleteDocument(0);
ir.close();
iw = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
iw = new IndexWriter(rd1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
iw.optimize();
iw.close();
}
RAMDirectory rd2 = new MockRAMDirectory();
{
IndexWriter iw = new IndexWriter(rd2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter iw = new IndexWriter(rd2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
Document doc = new Document();
iw.addDocument(doc);
iw.close();
@ -107,7 +110,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
RAMDirectory rdOut = new MockRAMDirectory();
IndexWriter iwOut = new IndexWriter(rdOut, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(rd1,true));
pr.add(IndexReader.open(rd2,true));

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
*/
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.MockAnalyzer;
@ -36,9 +37,9 @@ public class TestParallelTermEnum extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
Document doc;
Random random = newRandom();
RAMDirectory rd1 = new RAMDirectory();
IndexWriter iw1 = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter iw1 = new IndexWriter(rd1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
doc = new Document();
doc.add(new Field("field1", "the quick brown fox jumps", Store.YES,
@ -50,7 +51,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
iw1.close();
RAMDirectory rd2 = new RAMDirectory();
IndexWriter iw2 = new IndexWriter(rd2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter iw2 = new IndexWriter(rd2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
doc = new Document();
doc.add(new Field("field0", "", Store.NO, Index.ANALYZED));

View File

@ -22,6 +22,7 @@ import static org.junit.Assert.*;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
@ -118,23 +119,24 @@ public class TestPayloadProcessorProvider extends LuceneTestCaseJ4 {
private static final int NUM_DOCS = 10;
private IndexWriterConfig getConfig() {
return new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false));
private IndexWriterConfig getConfig(Random random) {
return newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false));
}
private void populateDirs(Directory[] dirs, boolean multipleCommits)
private void populateDirs(Random random, Directory[] dirs, boolean multipleCommits)
throws IOException {
for (int i = 0; i < dirs.length; i++) {
dirs[i] = new MockRAMDirectory();
populateDocs(dirs[i], multipleCommits);
populateDocs(random, dirs[i], multipleCommits);
verifyPayloadExists(dirs[i], "p", new BytesRef("p1"), NUM_DOCS);
verifyPayloadExists(dirs[i], "p", new BytesRef("p2"), NUM_DOCS);
}
}
private void populateDocs(Directory dir, boolean multipleCommits)
private void populateDocs(Random random, Directory dir, boolean multipleCommits)
throws IOException {
IndexWriter writer = new IndexWriter(dir, getConfig());
IndexWriter writer = new IndexWriter(dir, getConfig(random));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
TokenStream payloadTS1 = new PayloadTokenStream("p1");
TokenStream payloadTS2 = new PayloadTokenStream("p2");
for (int i = 0; i < NUM_DOCS; i++) {
@ -172,14 +174,14 @@ public class TestPayloadProcessorProvider extends LuceneTestCaseJ4 {
}
}
private void doTest(boolean addToEmptyIndex,
private void doTest(Random random, boolean addToEmptyIndex,
int numExpectedPayloads, boolean multipleCommits) throws IOException {
Directory[] dirs = new Directory[2];
populateDirs(dirs, multipleCommits);
populateDirs(random, dirs, multipleCommits);
Directory dir = new MockRAMDirectory();
if (!addToEmptyIndex) {
populateDocs(dir, multipleCommits);
populateDocs(random, dir, multipleCommits);
verifyPayloadExists(dir, "p", new BytesRef("p1"), NUM_DOCS);
verifyPayloadExists(dir, "p", new BytesRef("p2"), NUM_DOCS);
}
@ -190,7 +192,7 @@ public class TestPayloadProcessorProvider extends LuceneTestCaseJ4 {
for (Directory d : dirs) {
processors.put(d, new PerTermPayloadProcessor());
}
IndexWriter writer = new IndexWriter(dir, getConfig());
IndexWriter writer = new IndexWriter(dir, getConfig(random));
writer.setPayloadProcessorProvider(new PerDirPayloadProcessor(processors));
IndexReader[] readers = new IndexReader[dirs.length];
@ -214,26 +216,29 @@ public class TestPayloadProcessorProvider extends LuceneTestCaseJ4 {
@Test
public void testAddIndexes() throws Exception {
Random random = newRandom();
// addIndexes - single commit in each
doTest(true, 0, false);
doTest(random, true, 0, false);
// addIndexes - multiple commits in each
doTest(true, 0, true);
doTest(random, true, 0, true);
}
@Test
public void testAddIndexesIntoExisting() throws Exception {
Random random = newRandom();
// addIndexes - single commit in each
doTest(false, NUM_DOCS, false);
doTest(random, false, NUM_DOCS, false);
// addIndexes - multiple commits in each
doTest(false, NUM_DOCS, true);
doTest(random, false, NUM_DOCS, true);
}
@Test
public void testRegularMerges() throws Exception {
Random random = newRandom();
Directory dir = new MockRAMDirectory();
populateDocs(dir, true);
populateDocs(random, dir, true);
verifyPayloadExists(dir, "p", new BytesRef("p1"), NUM_DOCS);
verifyPayloadExists(dir, "p", new BytesRef("p2"), NUM_DOCS);
@ -241,7 +246,7 @@ public class TestPayloadProcessorProvider extends LuceneTestCaseJ4 {
// won't get processed.
Map<Directory, DirPayloadProcessor> processors = new HashMap<Directory, DirPayloadProcessor>();
processors.put(dir, new PerTermPayloadProcessor());
IndexWriter writer = new IndexWriter(dir, getConfig());
IndexWriter writer = new IndexWriter(dir, getConfig(random));
writer.setPayloadProcessorProvider(new PerDirPayloadProcessor(processors));
writer.optimize();
writer.close();

View File

@ -103,7 +103,7 @@ public class TestPayloads extends LuceneTestCase {
rnd = newRandom();
Directory ram = new MockRAMDirectory();
PayloadAnalyzer analyzer = new PayloadAnalyzer();
IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(rnd, TEST_VERSION_CURRENT, analyzer));
Document d = new Document();
// this field won't have any payloads
d.add(new Field("f1", "This field has no payloads", Field.Store.NO, Field.Index.ANALYZED));
@ -130,7 +130,7 @@ public class TestPayloads extends LuceneTestCase {
// now we add another document which has payloads for field f3 and verify if the SegmentMerger
// enabled payloads for that field
writer = new IndexWriter(ram, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(ram, newIndexWriterConfig(rnd, TEST_VERSION_CURRENT,
analyzer).setOpenMode(OpenMode.CREATE));
d = new Document();
d.add(new Field("f1", "This field has no payloads", Field.Store.NO, Field.Index.ANALYZED));
@ -160,20 +160,20 @@ public class TestPayloads extends LuceneTestCase {
rnd = newRandom();
// first perform the test using a RAMDirectory
Directory dir = new MockRAMDirectory();
performTest(dir);
performTest(rnd, dir);
// now use a FSDirectory and repeat same test
File dirName = _TestUtil.getTempDir("test_payloads");
dir = FSDirectory.open(dirName);
performTest(dir);
performTest(rnd, dir);
_TestUtil.rmDir(dirName);
}
// builds an index with payloads in the given Directory and performs
// different tests to verify the payload encoding
private void performTest(Directory dir) throws Exception {
private void performTest(Random random, Directory dir) throws Exception {
PayloadAnalyzer analyzer = new PayloadAnalyzer();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, analyzer)
.setOpenMode(OpenMode.CREATE));
@ -314,7 +314,7 @@ public class TestPayloads extends LuceneTestCase {
// test long payload
analyzer = new PayloadAnalyzer();
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
analyzer).setOpenMode(OpenMode.CREATE));
String singleTerm = "lucene";
@ -490,7 +490,7 @@ public class TestPayloads extends LuceneTestCase {
final ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
Directory dir = new MockRAMDirectory();
final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(rnd,
TEST_VERSION_CURRENT, new MockAnalyzer()));
final String field = "test";

View File

@ -21,6 +21,7 @@ import static org.junit.Assert.*;
import java.io.IOException;
import java.util.Map;
import java.util.Random;
import java.util.Map.Entry;
import org.apache.lucene.document.Document;
@ -56,10 +57,11 @@ public class TestPersistentSnapshotDeletionPolicy extends TestSnapshotDeletionPo
@Override
@Test
public void testExistingSnapshots() throws Exception {
Random random = newRandom();
int numSnapshots = 3;
Directory dir = new MockRAMDirectory();
PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy) getDeletionPolicy();
IndexWriter writer = new IndexWriter(dir, getConfig(psdp));
IndexWriter writer = new IndexWriter(dir, getConfig(random, psdp));
prepareIndexAndSnapshots(psdp, writer, numSnapshots, "snapshot");
writer.close();
psdp.close();
@ -68,7 +70,7 @@ public class TestPersistentSnapshotDeletionPolicy extends TestSnapshotDeletionPo
psdp = new PersistentSnapshotDeletionPolicy(
new KeepOnlyLastCommitDeletionPolicy(), snapshotDir, OpenMode.APPEND,
TEST_VERSION_CURRENT);
new IndexWriter(dir, getConfig(psdp)).close();
new IndexWriter(dir, getConfig(random, psdp)).close();
assertSnapshotExists(dir, psdp, numSnapshots);
assertEquals(numSnapshots, psdp.getSnapshots().size());
@ -83,7 +85,7 @@ public class TestPersistentSnapshotDeletionPolicy extends TestSnapshotDeletionPo
@Test
public void testInvalidSnapshotInfos() throws Exception {
// Add the correct number of documents (1), but without snapshot information
IndexWriter writer = new IndexWriter(snapshotDir, getConfig(null));
IndexWriter writer = new IndexWriter(snapshotDir, getConfig(newRandom(), null));
writer.addDocument(new Document());
writer.close();
try {
@ -98,7 +100,7 @@ public class TestPersistentSnapshotDeletionPolicy extends TestSnapshotDeletionPo
@Test
public void testNoSnapshotInfos() throws Exception {
// Initialize an empty index in snapshotDir - PSDP should initialize successfully.
new IndexWriter(snapshotDir, getConfig(null)).close();
new IndexWriter(snapshotDir, getConfig(newRandom(), null)).close();
new PersistentSnapshotDeletionPolicy(
new KeepOnlyLastCommitDeletionPolicy(), snapshotDir, OpenMode.APPEND,
TEST_VERSION_CURRENT).close();
@ -107,7 +109,7 @@ public class TestPersistentSnapshotDeletionPolicy extends TestSnapshotDeletionPo
@Test(expected=IllegalStateException.class)
public void testTooManySnapshotInfos() throws Exception {
// Write two documents to the snapshots directory - illegal.
IndexWriter writer = new IndexWriter(snapshotDir, getConfig(null));
IndexWriter writer = new IndexWriter(snapshotDir, getConfig(newRandom(), null));
writer.addDocument(new Document());
writer.addDocument(new Document());
writer.close();
@ -122,7 +124,7 @@ public class TestPersistentSnapshotDeletionPolicy extends TestSnapshotDeletionPo
public void testSnapshotRelease() throws Exception {
Directory dir = new MockRAMDirectory();
PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy) getDeletionPolicy();
IndexWriter writer = new IndexWriter(dir, getConfig(psdp));
IndexWriter writer = new IndexWriter(dir, getConfig(newRandom(), psdp));
prepareIndexAndSnapshots(psdp, writer, 1, "snapshot");
writer.close();

View File

@ -17,6 +17,8 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -31,7 +33,8 @@ public class TestRollback extends LuceneTestCase {
// LUCENE-2536
public void testRollbackIntegrityWithBufferFlush() throws Exception {
Directory dir = new MockRAMDirectory();
RandomIndexWriter rw = new RandomIndexWriter(newRandom(), dir);
Random random = newRandom();
RandomIndexWriter rw = new RandomIndexWriter(random, dir);
for (int i = 0; i < 5; i++) {
Document doc = new Document();
doc.add(new Field("pk", Integer.toString(i), Store.YES, Index.ANALYZED_NO_NORMS));
@ -40,7 +43,7 @@ public class TestRollback extends LuceneTestCase {
rw.close();
// If buffer size is small enough to cause a flush, errors ensue...
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setOpenMode(IndexWriterConfig.OpenMode.APPEND));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setOpenMode(IndexWriterConfig.OpenMode.APPEND));
Term pkTerm = new Term("pk", "");
for (int i = 0; i < 3; i++) {

View File

@ -104,7 +104,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
public void testSkipTo(int indexDivisor) throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()));
Term ta = new Term("content","aaa");
for(int i = 0; i < 10; i++)

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
*/
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.BytesRef;
@ -36,9 +37,10 @@ public class TestSegmentTermEnum extends LuceneTestCase {
Directory dir = new RAMDirectory();
public void testTermEnum() throws IOException {
Random random = newRandom();
IndexWriter writer = null;
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
// ADD 100 documents with term : aaa
// add 100 documents with terms: aaa bbb
@ -54,7 +56,7 @@ public class TestSegmentTermEnum extends LuceneTestCase {
verifyDocFreq();
// merge segments by optimizing the index
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@ -65,7 +67,7 @@ public class TestSegmentTermEnum extends LuceneTestCase {
public void testPrevTermAtEnd() throws IOException
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
addDoc(writer, "aaa bbb");
writer.close();
SegmentReader reader = SegmentReader.getOnlySegmentReader(dir);

View File

@ -4,6 +4,7 @@ import static org.junit.Assert.*;
import java.util.Collection;
import java.util.Map;
import java.util.Random;
import java.io.File;
import java.io.IOException;
@ -33,8 +34,8 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
public static final String INDEX_PATH = "test.snapshots";
protected IndexWriterConfig getConfig(IndexDeletionPolicy dp) {
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
protected IndexWriterConfig getConfig(Random random, IndexDeletionPolicy dp) {
IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer());
if (dp != null) {
conf.setIndexDeletionPolicy(dp);
}
@ -84,26 +85,27 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
@Test
public void testSnapshotDeletionPolicy() throws Exception {
Random random = newRandom();
File dir = _TestUtil.getTempDir(INDEX_PATH);
try {
Directory fsDir = FSDirectory.open(dir);
runTest(fsDir);
runTest(random, fsDir);
fsDir.close();
} finally {
_TestUtil.rmDir(dir);
}
MockRAMDirectory dir2 = new MockRAMDirectory();
runTest(dir2);
runTest(random, dir2);
dir2.close();
}
private void runTest(Directory dir) throws Exception {
private void runTest(Random random, Directory dir) throws Exception {
// Run for ~1 seconds
final long stopTime = System.currentTimeMillis() + 1000;
SnapshotDeletionPolicy dp = getDeletionPolicy();
final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(dp)
.setMaxBufferedDocs(2));
writer.commit();
@ -229,10 +231,11 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
public void testBasicSnapshots() throws Exception {
int numSnapshots = 3;
SnapshotDeletionPolicy sdp = getDeletionPolicy();
Random random = newRandom();
// Create 3 snapshots: snapshot0, snapshot1, snapshot2
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
prepareIndexAndSnapshots(sdp, writer, numSnapshots, "snapshot");
writer.close();
@ -243,7 +246,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
// open a new IndexWriter w/ no snapshots to keep and assert that all snapshots are gone.
sdp = getDeletionPolicy();
writer = new IndexWriter(dir, getConfig(sdp));
writer = new IndexWriter(dir, getConfig(random, sdp));
writer.deleteUnusedFiles();
writer.close();
assertEquals("no snapshots should exist", 1, IndexReader.listCommits(dir).size());
@ -260,9 +263,10 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
@Test
public void testMultiThreadedSnapshotting() throws Exception {
Random random = newRandom();
Directory dir = new MockRAMDirectory();
final SnapshotDeletionPolicy sdp = getDeletionPolicy();
final IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
final IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
Thread[] threads = new Thread[10];
for (int i = 0; i < threads.length; i++) {
@ -303,15 +307,16 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
@Test
public void testRollbackToOldSnapshot() throws Exception {
Random random = newRandom();
int numSnapshots = 2;
Directory dir = new MockRAMDirectory();
SnapshotDeletionPolicy sdp = getDeletionPolicy();
IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
prepareIndexAndSnapshots(sdp, writer, numSnapshots, "snapshot");
writer.close();
// now open the writer on "snapshot0" - make sure it succeeds
writer = new IndexWriter(dir, getConfig(sdp).setIndexCommit(sdp.getSnapshot("snapshot0")));
writer = new IndexWriter(dir, getConfig(random, sdp).setIndexCommit(sdp.getSnapshot("snapshot0")));
// this does the actual rollback
writer.commit();
writer.deleteUnusedFiles();
@ -324,9 +329,10 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
@Test
public void testReleaseSnapshot() throws Exception {
Random random = newRandom();
Directory dir = new MockRAMDirectory();
SnapshotDeletionPolicy sdp = getDeletionPolicy();
IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
prepareIndexAndSnapshots(sdp, writer, 1, "snapshot");
// Create another commit - we must do that, because otherwise the "snapshot"
@ -351,18 +357,19 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
@Test
public void testExistingSnapshots() throws Exception {
Random random = newRandom();
// Tests the ability to construct a SDP from existing snapshots, and
// asserts that those snapshots/commit points are protected.
int numSnapshots = 3;
Directory dir = new MockRAMDirectory();
SnapshotDeletionPolicy sdp = getDeletionPolicy();
IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
prepareIndexAndSnapshots(sdp, writer, numSnapshots, "snapshot");
writer.close();
// Make a new policy and initialize with snapshots.
sdp = getDeletionPolicy(sdp.getSnapshots());
writer = new IndexWriter(dir, getConfig(sdp));
writer = new IndexWriter(dir, getConfig(random, sdp));
// attempt to delete unused files - the snapshotted files should not be deleted
writer.deleteUnusedFiles();
writer.close();
@ -371,9 +378,10 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
@Test
public void testSnapshotLastCommitTwice() throws Exception {
Random random = newRandom();
Directory dir = new MockRAMDirectory();
SnapshotDeletionPolicy sdp = getDeletionPolicy();
IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
writer.addDocument(new Document());
writer.commit();
@ -397,11 +405,12 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
@Test
public void testMissingCommits() throws Exception {
Random random = newRandom();
// Tests the behavior of SDP when commits that are given at ctor are missing
// on onInit().
Directory dir = new MockRAMDirectory();
SnapshotDeletionPolicy sdp = getDeletionPolicy();
IndexWriter writer = new IndexWriter(dir, getConfig(sdp));
IndexWriter writer = new IndexWriter(dir, getConfig(random, sdp));
writer.addDocument(new Document());
writer.commit();
IndexCommit ic = sdp.snapshot("s1");
@ -412,14 +421,14 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCaseJ4 {
// open a new writer w/ KeepOnlyLastCommit policy, so it will delete "s1"
// commit.
new IndexWriter(dir, getConfig(null)).close();
new IndexWriter(dir, getConfig(random, null)).close();
assertFalse("snapshotted commit should not exist", dir.fileExists(ic.getSegmentsFileName()));
// Now reinit SDP from the commits in the index - the snapshot id should not
// exist anymore.
sdp = getDeletionPolicy(sdp.getSnapshots());
new IndexWriter(dir, getConfig(sdp)).close();
new IndexWriter(dir, getConfig(random, sdp)).close();
try {
sdp.getSnapshot("s1");

View File

@ -118,7 +118,7 @@ public class TestStressIndexing extends MultiCodecTestCase {
stress test.
*/
public void runStressTest(Directory directory, MergeScheduler mergeScheduler) throws Exception {
IndexWriter modifier = new IndexWriter(directory, new IndexWriterConfig(
IndexWriter modifier = new IndexWriter(directory, newIndexWriterConfig(RANDOM,
TEST_VERSION_CURRENT, new MockAnalyzer())
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10).setMergeScheduler(
mergeScheduler));

View File

@ -85,7 +85,7 @@ public class TestStressIndexing2 extends MultiCodecTestCase {
int maxThreadStates = 1+r.nextInt(10);
boolean doReaderPooling = r.nextBoolean();
Map<String,Document> docs = indexRandom(5, 3, 100, dir1, maxThreadStates, doReaderPooling);
indexSerial(docs, dir2);
indexSerial(r, docs, dir2);
// verifying verify
// verifyEquals(dir1, dir1, "id");
@ -115,7 +115,7 @@ public class TestStressIndexing2 extends MultiCodecTestCase {
Directory dir2 = new MockRAMDirectory();
Map<String,Document> docs = indexRandom(nThreads, iter, range, dir1, maxThreadStates, doReaderPooling);
//System.out.println("TEST: index serial");
indexSerial(docs, dir2);
indexSerial(r, docs, dir2);
//System.out.println("TEST: verify");
verifyEquals(dir1, dir2, "id");
}
@ -141,7 +141,7 @@ public class TestStressIndexing2 extends MultiCodecTestCase {
public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
Map<String,Document> docs = new HashMap<String,Document>();
IndexWriter w = new MockIndexWriter(dir, new IndexWriterConfig(
IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig(r,
TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB(
0.1).setMaxBufferedDocs(maxBufferedDocs));
w.commit();
@ -194,7 +194,7 @@ public class TestStressIndexing2 extends MultiCodecTestCase {
boolean doReaderPooling) throws IOException, InterruptedException {
Map<String,Document> docs = new HashMap<String,Document>();
for(int iter=0;iter<3;iter++) {
IndexWriter w = new MockIndexWriter(dir, new IndexWriterConfig(
IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig(r,
TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)
.setRAMBufferSizeMB(0.1).setMaxBufferedDocs(maxBufferedDocs).setMaxThreadStates(maxThreadStates)
.setReaderPooling(doReaderPooling));
@ -238,8 +238,8 @@ public class TestStressIndexing2 extends MultiCodecTestCase {
}
public static void indexSerial(Map<String,Document> docs, Directory dir) throws IOException {
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
public static void indexSerial(Random random, Map<String,Document> docs, Directory dir) throws IOException {
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()));
// index all docs in a single thread
Iterator<Document> iter = docs.values().iterator();

View File

@ -93,9 +93,11 @@ public class TestTermVectorsReader extends LuceneTestCase {
}
Arrays.sort(tokens);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MyAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MyAnalyzer()).setMaxBufferedDocs(-1));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10);
Document doc = new Document();
for(int i=0;i<testFields.length;i++) {
final Field.TermVector tv;

View File

@ -72,7 +72,7 @@ public class TestTermdocPerf extends LuceneTestCase {
Document doc = new Document();
doc.add(new Field(field,val, Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, analyzer)
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(100);

View File

@ -33,6 +33,7 @@ import org.apache.lucene.util.LuceneTestCase;
import java.io.IOException;
import java.io.File;
import java.util.Random;
public class TestThreadedOptimize extends LuceneTestCase {
@ -53,9 +54,9 @@ public class TestThreadedOptimize extends LuceneTestCase {
failed = true;
}
public void runTest(Directory directory, MergeScheduler merger) throws Exception {
public void runTest(Random random, Directory directory, MergeScheduler merger) throws Exception {
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, ANALYZER)
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2).setMergeScheduler(
merger));
@ -120,7 +121,7 @@ public class TestThreadedOptimize extends LuceneTestCase {
assertEquals(expectedDocCount, writer.maxDoc());
writer.close();
writer = new IndexWriter(directory, new IndexWriterConfig(
writer = new IndexWriter(directory, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, ANALYZER).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(2));
@ -137,15 +138,16 @@ public class TestThreadedOptimize extends LuceneTestCase {
FSDirectory.
*/
public void testThreadedOptimize() throws Exception {
Random random = newRandom();
Directory directory = new MockRAMDirectory();
runTest(directory, new SerialMergeScheduler());
runTest(directory, new ConcurrentMergeScheduler());
runTest(random, directory, new SerialMergeScheduler());
runTest(random, directory, new ConcurrentMergeScheduler());
directory.close();
File dirName = new File(TEMP_DIR, "luceneTestThreadedOptimize");
directory = FSDirectory.open(dirName);
runTest(directory, new SerialMergeScheduler());
runTest(directory, new ConcurrentMergeScheduler());
runTest(random, directory, new SerialMergeScheduler());
runTest(random, directory, new ConcurrentMergeScheduler());
directory.close();
_TestUtil.rmDir(dirName);
}

View File

@ -25,6 +25,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Random;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.MockAnalyzer;
@ -44,7 +45,7 @@ public class TestTransactionRollback extends LuceneTestCase {
private static final String FIELD_RECORD_ID = "record_id";
private Directory dir;
private Random random;
//Rolls back index to a chosen ID
private void rollBackLast(int id) throws Exception {
@ -64,7 +65,7 @@ public class TestTransactionRollback extends LuceneTestCase {
if (last==null)
throw new RuntimeException("Couldn't find commit point "+id);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(
new RollbackDeletionPolicy(id)).setIndexCommit(last));
Map<String,String> data = new HashMap<String,String>();
@ -124,10 +125,10 @@ public class TestTransactionRollback extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
dir = new MockRAMDirectory();
random = newRandom();
//Build index, of records 1 to 100, committing after each batch of 10
IndexDeletionPolicy sdp=new KeepAllDeletionPolicy();
IndexWriter w=new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(sdp));
IndexWriter w=new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(sdp));
for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) {
Document doc=new Document();
doc.add(new Field(FIELD_RECORD_ID,""+currentRecordId,Field.Store.YES,Field.Index.ANALYZED));
@ -195,7 +196,7 @@ public class TestTransactionRollback extends LuceneTestCase {
for(int i=0;i<2;i++) {
// Unless you specify a prior commit point, rollback
// should not work:
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())
.setIndexDeletionPolicy(new DeleteLastCommitPolicy())).close();
IndexReader r = IndexReader.open(dir, true);
assertEquals(100, r.numDocs());

View File

@ -93,13 +93,15 @@ public class TestTransactions extends LuceneTestCase {
@Override
public void doWork() throws Throwable {
IndexWriter writer1 = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(3));
IndexWriter writer1 = new IndexWriter(dir1, newIndexWriterConfig(RANDOM, TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(3).setMergeScheduler(new ConcurrentMergeScheduler()));
((LogMergePolicy) writer1.getConfig().getMergePolicy()).setMergeFactor(2);
((ConcurrentMergeScheduler) writer1.getConfig().getMergeScheduler()).setSuppressExceptions();
// Intentionally use different params so flush/merge
// happen @ different times
IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(RANDOM, TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
((LogMergePolicy) writer2.getConfig().getMergePolicy()).setMergeFactor(3);
((ConcurrentMergeScheduler) writer2.getConfig().getMergeScheduler()).setSuppressExceptions();
@ -181,7 +183,7 @@ public class TestTransactions extends LuceneTestCase {
}
public void initIndex(Directory dir) throws Throwable {
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(RANDOM, TEST_VERSION_CURRENT, new MockAnalyzer()));
for(int j=0; j<7; j++) {
Document d = new Document();
int n = RANDOM.nextInt();

View File

@ -563,6 +563,7 @@ public class LuceneTestCaseJ4 {
}
c.setReaderPooling(r.nextBoolean());
c.setReaderTermsIndexDivisor(_TestUtil.nextInt(r, 1, 4));
return c;
}