LUCENE-5039: Refactor IndexWriter#testPoint() to use InfoStream instead

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1490533 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Simon Willnauer 2013-06-07 08:13:23 +00:00
parent 3ff9c73184
commit 1f2c01754f
8 changed files with 110 additions and 103 deletions

View File

@ -112,7 +112,7 @@ class DocumentsWriterPerThread {
// Only called by asserts // Only called by asserts
public boolean testPoint(String name) { public boolean testPoint(String name) {
return docWriter.writer.testPoint(name); return docWriter.testPoint(name);
} }
public void clear() { public void clear() {
@ -232,6 +232,13 @@ class DocumentsWriterPerThread {
aborting = true; aborting = true;
} }
final boolean testPoint(String message) {
if (infoStream.isEnabled("TP")) {
infoStream.message("TP", message);
}
return true;
}
boolean checkAndResetHasAborted() { boolean checkAndResetHasAborted() {
final boolean retval = hasAborted; final boolean retval = hasAborted;
hasAborted = false; hasAborted = false;
@ -239,7 +246,7 @@ class DocumentsWriterPerThread {
} }
public void updateDocument(IndexDocument doc, Analyzer analyzer, Term delTerm) throws IOException { public void updateDocument(IndexDocument doc, Analyzer analyzer, Term delTerm) throws IOException {
assert writer.testPoint("DocumentsWriterPerThread addDocument start"); assert testPoint("DocumentsWriterPerThread addDocument start");
assert deleteQueue != null; assert deleteQueue != null;
docState.doc = doc; docState.doc = doc;
docState.analyzer = analyzer; docState.analyzer = analyzer;
@ -292,7 +299,7 @@ class DocumentsWriterPerThread {
} }
public int updateDocuments(Iterable<? extends IndexDocument> docs, Analyzer analyzer, Term delTerm) throws IOException { public int updateDocuments(Iterable<? extends IndexDocument> docs, Analyzer analyzer, Term delTerm) throws IOException {
assert writer.testPoint("DocumentsWriterPerThread addDocuments start"); assert testPoint("DocumentsWriterPerThread addDocuments start");
assert deleteQueue != null; assert deleteQueue != null;
docState.analyzer = analyzer; docState.analyzer = analyzer;
if (segmentInfo == null) { if (segmentInfo == null) {

View File

@ -4162,7 +4162,10 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
// startCommitMergeDeletes // startCommitMergeDeletes
// startMergeInit // startMergeInit
// DocumentsWriter.ThreadState.init start // DocumentsWriter.ThreadState.init start
boolean testPoint(String name) { private final boolean testPoint(String message) {
if (infoStream.isEnabled("TP")) {
infoStream.message("TP", message);
}
return true; return true;
} }

View File

@ -114,7 +114,7 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
@Override @Override
void finishDocument() throws IOException { void finishDocument() throws IOException {
assert docWriter.writer.testPoint("StoredFieldsWriter.finishDocument start"); assert docWriter.testPoint("StoredFieldsWriter.finishDocument start");
initFieldsWriter(IOContext.DEFAULT); initFieldsWriter(IOContext.DEFAULT);
fill(docState.docID); fill(docState.docID);
@ -129,7 +129,7 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
} }
reset(); reset();
assert docWriter.writer.testPoint("StoredFieldsWriter.finishDocument end"); assert docWriter.testPoint("StoredFieldsWriter.finishDocument end");
} }
@Override @Override

View File

@ -94,7 +94,7 @@ final class TermVectorsConsumer extends TermsHashConsumer {
@Override @Override
void finishDocument(TermsHash termsHash) throws IOException { void finishDocument(TermsHash termsHash) throws IOException {
assert docWriter.writer.testPoint("TermVectorsTermsWriter.finishDocument start"); assert docWriter.testPoint("TermVectorsTermsWriter.finishDocument start");
if (!hasVectors) { if (!hasVectors) {
return; return;
@ -117,7 +117,7 @@ final class TermVectorsConsumer extends TermsHashConsumer {
termsHash.reset(); termsHash.reset();
reset(); reset();
assert docWriter.writer.testPoint("TermVectorsTermsWriter.finishDocument end"); assert docWriter.testPoint("TermVectorsTermsWriter.finishDocument end");
} }
@Override @Override

View File

@ -25,18 +25,7 @@ import org.apache.lucene.store.*;
import org.apache.lucene.util.*; import org.apache.lucene.util.*;
public class TestAtomicUpdate extends LuceneTestCase { public class TestAtomicUpdate extends LuceneTestCase {
private static final class MockIndexWriter extends IndexWriter {
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
@Override
boolean testPoint(String name) {
if (LuceneTestCase.random().nextInt(4) == 2)
Thread.yield();
return true;
}
}
private static abstract class TimedThread extends Thread { private static abstract class TimedThread extends Thread {
volatile boolean failed; volatile boolean failed;
@ -124,7 +113,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
TEST_VERSION_CURRENT, new MockAnalyzer(random())) TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(7); .setMaxBufferedDocs(7);
((TieredMergePolicy) conf.getMergePolicy()).setMaxMergeAtOnce(3); ((TieredMergePolicy) conf.getMergePolicy()).setMaxMergeAtOnce(3);
IndexWriter writer = new MockIndexWriter(directory, conf); IndexWriter writer = RandomIndexWriter.mockIndexWriter(directory, conf, random());
// Establish a base index of 100 docs: // Establish a base index of 100 docs:
for(int i=0;i<100;i++) { for(int i=0;i<100;i++) {

View File

@ -210,15 +210,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
ThreadLocal<Thread> doFail = new ThreadLocal<Thread>(); ThreadLocal<Thread> doFail = new ThreadLocal<Thread>();
private class MockIndexWriter extends IndexWriter { private class TestPoint1 implements RandomIndexWriter.TestPoint {
Random r = new Random(random().nextLong()); Random r = new Random(random().nextLong());
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
@Override @Override
boolean testPoint(String name) { public void apply(String name) {
if (doFail.get() != null && !name.equals("startDoFlush") && r.nextInt(40) == 17) { if (doFail.get() != null && !name.equals("startDoFlush") && r.nextInt(40) == 17) {
if (VERBOSE) { if (VERBOSE) {
System.out.println(Thread.currentThread().getName() + ": NOW FAIL: " + name); System.out.println(Thread.currentThread().getName() + ": NOW FAIL: " + name);
@ -226,7 +221,6 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} }
throw new RuntimeException(Thread.currentThread().getName() + ": intentionally failing at " + name); throw new RuntimeException(Thread.currentThread().getName() + ": intentionally failing at " + name);
} }
return true;
} }
} }
@ -238,8 +232,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
MockAnalyzer analyzer = new MockAnalyzer(random()); MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases. analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler())); IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1());
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions(); ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10); //writer.setMaxBufferedDocs(10);
if (VERBOSE) { if (VERBOSE) {
@ -281,8 +276,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
MockAnalyzer analyzer = new MockAnalyzer(random()); MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases. analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer) IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler())); .setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1());
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions(); ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10); //writer.setMaxBufferedDocs(10);
writer.commit(); writer.commit();
@ -324,19 +319,13 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} }
// LUCENE-1198 // LUCENE-1198
private static final class MockIndexWriter2 extends IndexWriter { private static final class TestPoint2 implements RandomIndexWriter.TestPoint {
public MockIndexWriter2(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
boolean doFail; boolean doFail;
@Override @Override
boolean testPoint(String name) { public void apply(String name) {
if (doFail && name.equals("DocumentsWriterPerThread addDocument start")) if (doFail && name.equals("DocumentsWriterPerThread addDocument start"))
throw new RuntimeException("intentionally failing"); throw new RuntimeException("intentionally failing");
return true;
} }
} }
@ -367,11 +356,12 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testExceptionDocumentsWriterInit() throws IOException { public void testExceptionDocumentsWriterInit() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();
MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); TestPoint2 testPoint = new TestPoint2();
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())), testPoint);
Document doc = new Document(); Document doc = new Document();
doc.add(newTextField("field", "a field", Field.Store.YES)); doc.add(newTextField("field", "a field", Field.Store.YES));
w.addDocument(doc); w.addDocument(doc);
w.doFail = true; testPoint.doFail = true;
try { try {
w.addDocument(doc); w.addDocument(doc);
fail("did not hit exception"); fail("did not hit exception");
@ -385,7 +375,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// LUCENE-1208 // LUCENE-1208
public void testExceptionJustBeforeFlush() throws IOException { public void testExceptionJustBeforeFlush() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2)); IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2), new TestPoint1());
Document doc = new Document(); Document doc = new Document();
doc.add(newTextField("field", "a field", Field.Store.YES)); doc.add(newTextField("field", "a field", Field.Store.YES));
w.addDocument(doc); w.addDocument(doc);
@ -412,22 +402,15 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
dir.close(); dir.close();
} }
private static final class MockIndexWriter3 extends IndexWriter { private static final class TestPoint3 implements RandomIndexWriter.TestPoint {
public MockIndexWriter3(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
boolean doFail; boolean doFail;
boolean failed; boolean failed;
@Override @Override
boolean testPoint(String name) { public void apply(String name) {
if (doFail && name.equals("startMergeInit")) { if (doFail && name.equals("startMergeInit")) {
failed = true; failed = true;
throw new RuntimeException("intentionally failing"); throw new RuntimeException("intentionally failing");
} }
return true;
} }
} }
@ -441,8 +424,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
cms.setSuppressExceptions(); cms.setSuppressExceptions();
conf.setMergeScheduler(cms); conf.setMergeScheduler(cms);
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2); ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
MockIndexWriter3 w = new MockIndexWriter3(dir, conf); TestPoint3 testPoint = new TestPoint3();
w.doFail = true; IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, conf, testPoint);
testPoint.doFail = true;
Document doc = new Document(); Document doc = new Document();
doc.add(newTextField("field", "a field", Field.Store.YES)); doc.add(newTextField("field", "a field", Field.Store.YES));
for(int i=0;i<10;i++) for(int i=0;i<10;i++)
@ -453,7 +437,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} }
((ConcurrentMergeScheduler) w.getConfig().getMergeScheduler()).sync(); ((ConcurrentMergeScheduler) w.getConfig().getMergeScheduler()).sync();
assertTrue(w.failed); assertTrue(testPoint.failed);
w.close(); w.close();
dir.close(); dir.close();
} }
@ -1014,29 +998,26 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} }
// LUCENE-1347 // LUCENE-1347
private static final class MockIndexWriter4 extends IndexWriter { private static final class TestPoint4 implements RandomIndexWriter.TestPoint {
public MockIndexWriter4(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
boolean doFail; boolean doFail;
@Override @Override
boolean testPoint(String name) { public void apply(String name) {
if (doFail && name.equals("rollback before checkpoint")) if (doFail && name.equals("rollback before checkpoint"))
throw new RuntimeException("intentionally failing"); throw new RuntimeException("intentionally failing");
return true;
} }
} }
// LUCENE-1347 // LUCENE-1347
public void testRollbackExceptionHang() throws Throwable { public void testRollbackExceptionHang() throws Throwable {
Directory dir = newDirectory(); Directory dir = newDirectory();
MockIndexWriter4 w = new MockIndexWriter4(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); TestPoint4 testPoint = new TestPoint4();
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())), testPoint);
addDoc(w); addDoc(w);
w.doFail = true; testPoint.doFail = true;
try { try {
w.rollback(); w.rollback();
fail("did not hit intentional RuntimeException"); fail("did not hit intentional RuntimeException");
@ -1044,7 +1025,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// expected // expected
} }
w.doFail = false; testPoint.doFail = false;
w.rollback(); w.rollback();
dir.close(); dir.close();
} }

View File

@ -47,21 +47,16 @@ public class TestStressIndexing2 extends LuceneTestCase {
static int maxBufferedDocs=3; static int maxBufferedDocs=3;
static int seed=0; static int seed=0;
public class MockIndexWriter extends IndexWriter { public final class YieldTestPoint implements RandomIndexWriter.TestPoint {
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
@Override @Override
boolean testPoint(String name) { public void apply(String name) {
// if (name.equals("startCommit")) { // if (name.equals("startCommit")) {
if (random().nextInt(4) == 2) if (random().nextInt(4) == 2)
Thread.yield(); Thread.yield();
return true;
} }
} }
//
public void testRandomIWReader() throws Throwable { public void testRandomIWReader() throws Throwable {
Directory dir = newDirectory(); Directory dir = newDirectory();
@ -151,9 +146,9 @@ public class TestStressIndexing2 extends LuceneTestCase {
public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException { public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
Map<String,Document> docs = new HashMap<String,Document>(); Map<String,Document> docs = new HashMap<String,Document>();
IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB(
0.1).setMaxBufferedDocs(maxBufferedDocs).setMergePolicy(newLogMergePolicy())); 0.1).setMaxBufferedDocs(maxBufferedDocs).setMergePolicy(newLogMergePolicy()), new YieldTestPoint());
w.commit(); w.commit();
LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy(); LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy();
lmp.setUseCompoundFile(false); lmp.setUseCompoundFile(false);
@ -202,10 +197,10 @@ public class TestStressIndexing2 extends LuceneTestCase {
public Map<String,Document> indexRandom(int nThreads, int iterations, int range, Directory dir, int maxThreadStates, public Map<String,Document> indexRandom(int nThreads, int iterations, int range, Directory dir, int maxThreadStates,
boolean doReaderPooling) throws IOException, InterruptedException { boolean doReaderPooling) throws IOException, InterruptedException {
Map<String,Document> docs = new HashMap<String,Document>(); Map<String,Document> docs = new HashMap<String,Document>();
IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE) TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)
.setRAMBufferSizeMB(0.1).setMaxBufferedDocs(maxBufferedDocs).setIndexerThreadPool(new ThreadAffinityDocumentsWriterThreadPool(maxThreadStates)) .setRAMBufferSizeMB(0.1).setMaxBufferedDocs(maxBufferedDocs).setIndexerThreadPool(new ThreadAffinityDocumentsWriterThreadPool(maxThreadStates))
.setReaderPooling(doReaderPooling).setMergePolicy(newLogMergePolicy())); .setReaderPooling(doReaderPooling).setMergePolicy(newLogMergePolicy()), new YieldTestPoint());
LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy(); LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy();
lmp.setUseCompoundFile(false); lmp.setUseCompoundFile(false);
lmp.setMergeFactor(mergeFactor); lmp.setMergeFactor(mergeFactor);

View File

@ -25,17 +25,12 @@ import java.util.Random;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.IndexWriter; // javadoc import org.apache.lucene.index.IndexWriter; // javadoc
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.NullInfoStream;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
@ -55,23 +50,22 @@ public class RandomIndexWriter implements Closeable {
private boolean getReaderCalled; private boolean getReaderCalled;
private final Codec codec; // sugar private final Codec codec; // sugar
// Randomly calls Thread.yield so we mixup thread scheduling
private static final class MockIndexWriter extends IndexWriter { public static IndexWriter mockIndexWriter(Directory dir, IndexWriterConfig conf, Random r) throws IOException {
// Randomly calls Thread.yield so we mixup thread scheduling
private final Random r; final Random random = new Random(r.nextLong());
return mockIndexWriter(dir, conf, new TestPoint() {
public MockIndexWriter(Random r, Directory dir, IndexWriterConfig conf) throws IOException { @Override
super(dir, conf); public void apply(String message) {
// TODO: this should be solved in a different way; Random should not be shared (!). if (random.nextInt(4) == 2)
this.r = new Random(r.nextLong()); Thread.yield();
} }
});
@Override }
boolean testPoint(String name) {
if (r.nextInt(4) == 2) public static IndexWriter mockIndexWriter(Directory dir, IndexWriterConfig conf, TestPoint testPoint) throws IOException {
Thread.yield(); conf.setInfoStream(new TestPointInfoStream(conf.getInfoStream(), testPoint));
return true; return new IndexWriter(dir, conf);
}
} }
/** create a RandomIndexWriter with a random config: Uses TEST_VERSION_CURRENT and MockAnalyzer */ /** create a RandomIndexWriter with a random config: Uses TEST_VERSION_CURRENT and MockAnalyzer */
@ -93,7 +87,7 @@ public class RandomIndexWriter implements Closeable {
public RandomIndexWriter(Random r, Directory dir, IndexWriterConfig c) throws IOException { public RandomIndexWriter(Random r, Directory dir, IndexWriterConfig c) throws IOException {
// TODO: this should be solved in a different way; Random should not be shared (!). // TODO: this should be solved in a different way; Random should not be shared (!).
this.r = new Random(r.nextLong()); this.r = new Random(r.nextLong());
w = new MockIndexWriter(r, dir, c); w = mockIndexWriter(dir, c, r);
flushAt = _TestUtil.nextInt(r, 10, 1000); flushAt = _TestUtil.nextInt(r, 10, 1000);
codec = w.getConfig().getCodec(); codec = w.getConfig().getCodec();
if (LuceneTestCase.VERBOSE) { if (LuceneTestCase.VERBOSE) {
@ -345,4 +339,42 @@ public class RandomIndexWriter implements Closeable {
public void forceMerge(int maxSegmentCount) throws IOException { public void forceMerge(int maxSegmentCount) throws IOException {
w.forceMerge(maxSegmentCount); w.forceMerge(maxSegmentCount);
} }
private static final class TestPointInfoStream extends InfoStream {
private final InfoStream delegate;
private final TestPoint testPoint;
public TestPointInfoStream(InfoStream delegate, TestPoint testPoint) {
this.delegate = delegate == null ? new NullInfoStream(): delegate;
this.testPoint = testPoint;
}
@Override
public void close() throws IOException {
delegate.close();
}
@Override
public void message(String component, String message) {
if ("TP".equals(component)) {
testPoint.apply(message);
}
if (delegate.isEnabled(component)) {
delegate.message(component, message);
}
}
@Override
public boolean isEnabled(String component) {
return "TP".equals(component) || delegate.isEnabled(component);
}
}
/**
* Simple interface that is executed for each <tt>TP</tt> {@link InfoStream} component
* message. See also {@link RandomIndexWriter#mockIndexWriter(Directory, IndexWriterConfig, TestPoint)}
*/
public static interface TestPoint {
public abstract void apply(String message);
}
} }