mirror of https://github.com/apache/lucene.git
LUCENE-9268: Add some random tests to IndexWriter
Add some tests that perform a set of operations randomly and concurrently on IndexWriter.
This commit is contained in:
parent
01688cd8f1
commit
7b9f212907
|
@ -41,6 +41,7 @@ import java.util.Set;
|
|||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -61,6 +62,7 @@ import org.apache.lucene.document.BinaryDocValuesField;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedDocValuesField;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
|
@ -3775,4 +3777,120 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
IOUtils.close(sm, dir);
|
||||
}
|
||||
}
|
||||
|
||||
public void testRandomOperations() throws Exception {
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setMergePolicy(new FilterMergePolicy(newMergePolicy()) {
|
||||
boolean keepFullyDeletedSegment = random().nextBoolean();
|
||||
|
||||
@Override
|
||||
public boolean keepFullyDeletedSegment(IOSupplier<CodecReader> readerIOSupplier) {
|
||||
return keepFullyDeletedSegment;
|
||||
}
|
||||
});
|
||||
try (Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, iwc);
|
||||
SearcherManager sm = new SearcherManager(writer, new SearcherFactory())) {
|
||||
Semaphore numOperations = new Semaphore(10 + random().nextInt(1000));
|
||||
boolean singleDoc = random().nextBoolean();
|
||||
Thread[] threads = new Thread[1 + random().nextInt(4)];
|
||||
CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(() -> {
|
||||
latch.countDown();
|
||||
try {
|
||||
latch.await();
|
||||
while (numOperations.tryAcquire()) {
|
||||
String id = singleDoc ? "1" : Integer.toString(random().nextInt(10));
|
||||
Document doc = new Document();
|
||||
doc.add(new StringField("id", id, Field.Store.YES));
|
||||
if (random().nextInt(10) <= 2) {
|
||||
writer.updateDocument(new Term("id", id), doc);
|
||||
} else if (random().nextInt(10) <= 2) {
|
||||
writer.deleteDocuments(new Term("id", id));
|
||||
} else {
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
if (random().nextInt(100) < 10) {
|
||||
sm.maybeRefreshBlocking();
|
||||
}
|
||||
if (random().nextInt(100) < 5) {
|
||||
writer.commit();
|
||||
}
|
||||
if (random().nextInt(100) < 1) {
|
||||
writer.forceMerge(1 + random().nextInt(10), random().nextBoolean());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
});
|
||||
threads[i].start();
|
||||
}
|
||||
for (Thread thread : threads) {
|
||||
thread.join();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testRandomOperationsWithSoftDeletes() throws Exception {
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
AtomicInteger seqNo = new AtomicInteger(-1);
|
||||
AtomicInteger retainingSeqNo = new AtomicInteger();
|
||||
iwc.setSoftDeletesField("soft_deletes");
|
||||
iwc.setMergePolicy(new SoftDeletesRetentionMergePolicy("soft_deletes",
|
||||
() -> LongPoint.newRangeQuery("seq_no", retainingSeqNo.longValue(), Long.MAX_VALUE), newMergePolicy()));
|
||||
try (Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, iwc);
|
||||
SearcherManager sm = new SearcherManager(writer, new SearcherFactory())) {
|
||||
Semaphore numOperations = new Semaphore(10 + random().nextInt(1000));
|
||||
boolean singleDoc = random().nextBoolean();
|
||||
Thread[] threads = new Thread[1 + random().nextInt(4)];
|
||||
CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(() -> {
|
||||
latch.countDown();
|
||||
try {
|
||||
latch.await();
|
||||
while (numOperations.tryAcquire()) {
|
||||
String id = singleDoc ? "1" : Integer.toString(random().nextInt(10));
|
||||
Document doc = new Document();
|
||||
doc.add(new StringField("id", id, Field.Store.YES));
|
||||
doc.add(new LongPoint("seq_no", seqNo.getAndIncrement()));
|
||||
if (random().nextInt(10) <= 2) {
|
||||
if (random().nextBoolean()) {
|
||||
doc.add(new NumericDocValuesField(iwc.softDeletesField, 1));
|
||||
}
|
||||
writer.softUpdateDocument(new Term("id", id), doc, new NumericDocValuesField(iwc.softDeletesField, 1));
|
||||
} else {
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
if (random().nextInt(100) < 10) {
|
||||
int min = retainingSeqNo.get();
|
||||
int max = seqNo.get();
|
||||
if (min < max && random().nextBoolean()) {
|
||||
retainingSeqNo.compareAndSet(min, min - random().nextInt(max - min));
|
||||
}
|
||||
}
|
||||
if (random().nextInt(100) < 10) {
|
||||
sm.maybeRefreshBlocking();
|
||||
}
|
||||
if (random().nextInt(100) < 5) {
|
||||
writer.commit();
|
||||
}
|
||||
if (random().nextInt(100) < 1) {
|
||||
writer.forceMerge(1 + random().nextInt(10), random().nextBoolean());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
});
|
||||
threads[i].start();
|
||||
}
|
||||
for (Thread thread : threads) {
|
||||
thread.join();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue