LUCENE-5039: Refactor IndexWriter#testPoint() to use InfoStream instead

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1490533 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Simon Willnauer 2013-06-07 08:13:23 +00:00
parent 3ff9c73184
commit 1f2c01754f
8 changed files with 110 additions and 103 deletions

View File

@ -112,7 +112,7 @@ class DocumentsWriterPerThread {
// Only called by asserts
public boolean testPoint(String name) {
return docWriter.writer.testPoint(name);
return docWriter.testPoint(name);
}
public void clear() {
@ -232,6 +232,13 @@ class DocumentsWriterPerThread {
aborting = true;
}
final boolean testPoint(String message) {
if (infoStream.isEnabled("TP")) {
infoStream.message("TP", message);
}
return true;
}
boolean checkAndResetHasAborted() {
final boolean retval = hasAborted;
hasAborted = false;
@ -239,7 +246,7 @@ class DocumentsWriterPerThread {
}
public void updateDocument(IndexDocument doc, Analyzer analyzer, Term delTerm) throws IOException {
assert writer.testPoint("DocumentsWriterPerThread addDocument start");
assert testPoint("DocumentsWriterPerThread addDocument start");
assert deleteQueue != null;
docState.doc = doc;
docState.analyzer = analyzer;
@ -292,7 +299,7 @@ class DocumentsWriterPerThread {
}
public int updateDocuments(Iterable<? extends IndexDocument> docs, Analyzer analyzer, Term delTerm) throws IOException {
assert writer.testPoint("DocumentsWriterPerThread addDocuments start");
assert testPoint("DocumentsWriterPerThread addDocuments start");
assert deleteQueue != null;
docState.analyzer = analyzer;
if (segmentInfo == null) {

View File

@ -4162,7 +4162,10 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
// startCommitMergeDeletes
// startMergeInit
// DocumentsWriter.ThreadState.init start
boolean testPoint(String name) {
private final boolean testPoint(String message) {
if (infoStream.isEnabled("TP")) {
infoStream.message("TP", message);
}
return true;
}

View File

@ -114,7 +114,7 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
@Override
void finishDocument() throws IOException {
assert docWriter.writer.testPoint("StoredFieldsWriter.finishDocument start");
assert docWriter.testPoint("StoredFieldsWriter.finishDocument start");
initFieldsWriter(IOContext.DEFAULT);
fill(docState.docID);
@ -129,7 +129,7 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
}
reset();
assert docWriter.writer.testPoint("StoredFieldsWriter.finishDocument end");
assert docWriter.testPoint("StoredFieldsWriter.finishDocument end");
}
@Override

View File

@ -94,7 +94,7 @@ final class TermVectorsConsumer extends TermsHashConsumer {
@Override
void finishDocument(TermsHash termsHash) throws IOException {
assert docWriter.writer.testPoint("TermVectorsTermsWriter.finishDocument start");
assert docWriter.testPoint("TermVectorsTermsWriter.finishDocument start");
if (!hasVectors) {
return;
@ -117,7 +117,7 @@ final class TermVectorsConsumer extends TermsHashConsumer {
termsHash.reset();
reset();
assert docWriter.writer.testPoint("TermVectorsTermsWriter.finishDocument end");
assert docWriter.testPoint("TermVectorsTermsWriter.finishDocument end");
}
@Override

View File

@ -25,18 +25,7 @@ import org.apache.lucene.store.*;
import org.apache.lucene.util.*;
public class TestAtomicUpdate extends LuceneTestCase {
private static final class MockIndexWriter extends IndexWriter {
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
@Override
boolean testPoint(String name) {
if (LuceneTestCase.random().nextInt(4) == 2)
Thread.yield();
return true;
}
}
private static abstract class TimedThread extends Thread {
volatile boolean failed;
@ -124,7 +113,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(7);
((TieredMergePolicy) conf.getMergePolicy()).setMaxMergeAtOnce(3);
IndexWriter writer = new MockIndexWriter(directory, conf);
IndexWriter writer = RandomIndexWriter.mockIndexWriter(directory, conf, random());
// Establish a base index of 100 docs:
for(int i=0;i<100;i++) {

View File

@ -210,15 +210,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
ThreadLocal<Thread> doFail = new ThreadLocal<Thread>();
private class MockIndexWriter extends IndexWriter {
private class TestPoint1 implements RandomIndexWriter.TestPoint {
Random r = new Random(random().nextLong());
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
@Override
boolean testPoint(String name) {
public void apply(String name) {
if (doFail.get() != null && !name.equals("startDoFlush") && r.nextInt(40) == 17) {
if (VERBOSE) {
System.out.println(Thread.currentThread().getName() + ": NOW FAIL: " + name);
@ -226,7 +221,6 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
throw new RuntimeException(Thread.currentThread().getName() + ": intentionally failing at " + name);
}
return true;
}
}
@ -238,8 +232,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler()));
IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1());
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10);
if (VERBOSE) {
@ -281,8 +276,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
Directory dir = newDirectory();
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler()));
IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1());
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10);
writer.commit();
@ -324,19 +319,13 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
// LUCENE-1198
private static final class MockIndexWriter2 extends IndexWriter {
public MockIndexWriter2(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
private static final class TestPoint2 implements RandomIndexWriter.TestPoint {
boolean doFail;
@Override
boolean testPoint(String name) {
public void apply(String name) {
if (doFail && name.equals("DocumentsWriterPerThread addDocument start"))
throw new RuntimeException("intentionally failing");
return true;
}
}
@ -367,11 +356,12 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testExceptionDocumentsWriterInit() throws IOException {
Directory dir = newDirectory();
MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
TestPoint2 testPoint = new TestPoint2();
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())), testPoint);
Document doc = new Document();
doc.add(newTextField("field", "a field", Field.Store.YES));
w.addDocument(doc);
w.doFail = true;
testPoint.doFail = true;
try {
w.addDocument(doc);
fail("did not hit exception");
@ -385,7 +375,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// LUCENE-1208
public void testExceptionJustBeforeFlush() throws IOException {
Directory dir = newDirectory();
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2), new TestPoint1());
Document doc = new Document();
doc.add(newTextField("field", "a field", Field.Store.YES));
w.addDocument(doc);
@ -412,22 +402,15 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
dir.close();
}
private static final class MockIndexWriter3 extends IndexWriter {
public MockIndexWriter3(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
private static final class TestPoint3 implements RandomIndexWriter.TestPoint {
boolean doFail;
boolean failed;
@Override
boolean testPoint(String name) {
public void apply(String name) {
if (doFail && name.equals("startMergeInit")) {
failed = true;
throw new RuntimeException("intentionally failing");
}
return true;
}
}
@ -441,8 +424,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
cms.setSuppressExceptions();
conf.setMergeScheduler(cms);
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
MockIndexWriter3 w = new MockIndexWriter3(dir, conf);
w.doFail = true;
TestPoint3 testPoint = new TestPoint3();
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, conf, testPoint);
testPoint.doFail = true;
Document doc = new Document();
doc.add(newTextField("field", "a field", Field.Store.YES));
for(int i=0;i<10;i++)
@ -453,7 +437,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
((ConcurrentMergeScheduler) w.getConfig().getMergeScheduler()).sync();
assertTrue(w.failed);
assertTrue(testPoint.failed);
w.close();
dir.close();
}
@ -1014,29 +998,26 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
// LUCENE-1347
private static final class MockIndexWriter4 extends IndexWriter {
public MockIndexWriter4(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
private static final class TestPoint4 implements RandomIndexWriter.TestPoint {
boolean doFail;
@Override
boolean testPoint(String name) {
public void apply(String name) {
if (doFail && name.equals("rollback before checkpoint"))
throw new RuntimeException("intentionally failing");
return true;
}
}
// LUCENE-1347
public void testRollbackExceptionHang() throws Throwable {
Directory dir = newDirectory();
MockIndexWriter4 w = new MockIndexWriter4(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
TestPoint4 testPoint = new TestPoint4();
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())), testPoint);
addDoc(w);
w.doFail = true;
testPoint.doFail = true;
try {
w.rollback();
fail("did not hit intentional RuntimeException");
@ -1044,7 +1025,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// expected
}
w.doFail = false;
testPoint.doFail = false;
w.rollback();
dir.close();
}

View File

@ -47,21 +47,16 @@ public class TestStressIndexing2 extends LuceneTestCase {
static int maxBufferedDocs=3;
static int seed=0;
public class MockIndexWriter extends IndexWriter {
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
public final class YieldTestPoint implements RandomIndexWriter.TestPoint {
@Override
boolean testPoint(String name) {
public void apply(String name) {
// if (name.equals("startCommit")) {
if (random().nextInt(4) == 2)
Thread.yield();
return true;
}
}
//
public void testRandomIWReader() throws Throwable {
Directory dir = newDirectory();
@ -151,9 +146,9 @@ public class TestStressIndexing2 extends LuceneTestCase {
public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
Map<String,Document> docs = new HashMap<String,Document>();
IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig(
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB(
0.1).setMaxBufferedDocs(maxBufferedDocs).setMergePolicy(newLogMergePolicy()));
0.1).setMaxBufferedDocs(maxBufferedDocs).setMergePolicy(newLogMergePolicy()), new YieldTestPoint());
w.commit();
LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy();
lmp.setUseCompoundFile(false);
@ -202,10 +197,10 @@ public class TestStressIndexing2 extends LuceneTestCase {
public Map<String,Document> indexRandom(int nThreads, int iterations, int range, Directory dir, int maxThreadStates,
boolean doReaderPooling) throws IOException, InterruptedException {
Map<String,Document> docs = new HashMap<String,Document>();
IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig(
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)
.setRAMBufferSizeMB(0.1).setMaxBufferedDocs(maxBufferedDocs).setIndexerThreadPool(new ThreadAffinityDocumentsWriterThreadPool(maxThreadStates))
.setReaderPooling(doReaderPooling).setMergePolicy(newLogMergePolicy()));
.setReaderPooling(doReaderPooling).setMergePolicy(newLogMergePolicy()), new YieldTestPoint());
LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy();
lmp.setUseCompoundFile(false);
lmp.setMergeFactor(mergeFactor);

View File

@ -25,17 +25,12 @@ import java.util.Random;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.IndexWriter; // javadoc
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.NullInfoStream;
import org.apache.lucene.util.Version;
import org.apache.lucene.util._TestUtil;
@ -55,23 +50,22 @@ public class RandomIndexWriter implements Closeable {
private boolean getReaderCalled;
private final Codec codec; // sugar
// Randomly calls Thread.yield so we mixup thread scheduling
private static final class MockIndexWriter extends IndexWriter {
private final Random r;
public MockIndexWriter(Random r, Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
// TODO: this should be solved in a different way; Random should not be shared (!).
this.r = new Random(r.nextLong());
}
@Override
boolean testPoint(String name) {
if (r.nextInt(4) == 2)
Thread.yield();
return true;
}
public static IndexWriter mockIndexWriter(Directory dir, IndexWriterConfig conf, Random r) throws IOException {
// Randomly calls Thread.yield so we mixup thread scheduling
final Random random = new Random(r.nextLong());
return mockIndexWriter(dir, conf, new TestPoint() {
@Override
public void apply(String message) {
if (random.nextInt(4) == 2)
Thread.yield();
}
});
}
public static IndexWriter mockIndexWriter(Directory dir, IndexWriterConfig conf, TestPoint testPoint) throws IOException {
conf.setInfoStream(new TestPointInfoStream(conf.getInfoStream(), testPoint));
return new IndexWriter(dir, conf);
}
/** create a RandomIndexWriter with a random config: Uses TEST_VERSION_CURRENT and MockAnalyzer */
@ -93,7 +87,7 @@ public class RandomIndexWriter implements Closeable {
public RandomIndexWriter(Random r, Directory dir, IndexWriterConfig c) throws IOException {
// TODO: this should be solved in a different way; Random should not be shared (!).
this.r = new Random(r.nextLong());
w = new MockIndexWriter(r, dir, c);
w = mockIndexWriter(dir, c, r);
flushAt = _TestUtil.nextInt(r, 10, 1000);
codec = w.getConfig().getCodec();
if (LuceneTestCase.VERBOSE) {
@ -345,4 +339,42 @@ public class RandomIndexWriter implements Closeable {
public void forceMerge(int maxSegmentCount) throws IOException {
w.forceMerge(maxSegmentCount);
}
private static final class TestPointInfoStream extends InfoStream {
private final InfoStream delegate;
private final TestPoint testPoint;
public TestPointInfoStream(InfoStream delegate, TestPoint testPoint) {
this.delegate = delegate == null ? new NullInfoStream(): delegate;
this.testPoint = testPoint;
}
@Override
public void close() throws IOException {
delegate.close();
}
@Override
public void message(String component, String message) {
if ("TP".equals(component)) {
testPoint.apply(message);
}
if (delegate.isEnabled(component)) {
delegate.message(component, message);
}
}
@Override
public boolean isEnabled(String component) {
return "TP".equals(component) || delegate.isEnabled(component);
}
}
/**
* Simple interface that is executed for each <tt>TP</tt> {@link InfoStream} component
* message. See also {@link RandomIndexWriter#mockIndexWriter(Directory, IndexWriterConfig, TestPoint)}
*/
public static interface TestPoint {
public abstract void apply(String message);
}
}