LUCENE-6019: detect when DocValuesType illegally changes; add -Dtests.asserts=true|false

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1633724 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2014-10-22 22:33:40 +00:00
parent 31523073ad
commit 0b39dd9e62
16 changed files with 142 additions and 72 deletions

View File

@ -192,6 +192,11 @@ Bug Fixes
behave properly when wrapping other ValueSources which do not exist for the specified document
(hossman)
* LUCENE-6019: Detect when DocValuesType illegally changes for the
same field name. Also added -Dtests.asserts=true|false so we can
run tests with and without assertions. (Simon Willnauer, Robert
Muir, Mike McCandless).
Documentation
* LUCENE-5392: Add/improve analysis package documentation to reflect

View File

@ -120,11 +120,15 @@
<property name="tests.monster" value="false" />
<property name="tests.slow" value="true" />
<property name="tests.cleanthreads.sysprop" value="perMethod"/>
<property name="tests.asserts.gracious" value="false"/>
<property name="tests.verbose" value="false"/>
<property name="tests.infostream" value="${tests.verbose}"/>
<property name="tests.filterstacks" value="true"/>
<property name="tests.luceneMatchVersion" value="${version.base}"/>
<property name="tests.asserts" value="true" />
<condition property="tests.asserts.args" value="-ea -esa" else="">
<istrue value="${tests.asserts}"/>
</condition>
<condition property="tests.heapsize" value="768M" else="512M">
<isset property="run.clover"/>
@ -963,16 +967,11 @@
<classpath refid="@{junit.classpath}"/>
<classpath refid="clover.classpath" />
<!-- Assertions. -->
<assertions>
<enable package="org.apache.lucene"/>
<enable package="org.apache.solr"/>
</assertions>
<!-- JVM arguments and system properties. -->
<jvmarg line="${args}"/>
<jvmarg line="${tests.heapdump.args}"/>
<jvmarg line="${tests.clover.args}"/>
<jvmarg line="${tests.asserts.args}"/>
<!-- set the number of times tests should run -->
<sysproperty key="tests.iters" value="${tests.iters}"/>
@ -1010,7 +1009,7 @@
<sysproperty key="tests.slow" value="@{tests.slow}"/>
<!-- set whether tests framework should not require java assertions enabled -->
<sysproperty key="tests.asserts.gracious" value="${tests.asserts.gracious}"/>
<sysproperty key="tests.asserts" value="${tests.asserts}"/>
<!-- TODO: create propertyset for test properties, so each project can have its own set -->
<sysproperty key="tests.multiplier" value="@{tests.multiplier}"/>

View File

@ -78,9 +78,8 @@ class DocumentsWriterPerThread {
this.infoStream = infoStream;
}
// Only called by asserts
public boolean testPoint(String name) {
return docWriter.testPoint(name);
public void testPoint(String name) {
docWriter.testPoint(name);
}
public void clear() {
@ -203,11 +202,10 @@ class DocumentsWriterPerThread {
return retval;
}
final boolean testPoint(String message) {
final void testPoint(String message) {
if (infoStream.isEnabled("TP")) {
infoStream.message("TP", message);
}
return true;
}
/** Anything that will add N docs to the index should reserve first to
@ -221,7 +219,7 @@ class DocumentsWriterPerThread {
}
public void updateDocument(IndexDocument doc, Analyzer analyzer, Term delTerm) throws IOException {
assert testPoint("DocumentsWriterPerThread addDocument start");
testPoint("DocumentsWriterPerThread addDocument start");
assert deleteQueue != null;
docState.doc = doc;
docState.analyzer = analyzer;
@ -259,7 +257,7 @@ class DocumentsWriterPerThread {
}
public int updateDocuments(Iterable<? extends IndexDocument> docs, Analyzer analyzer, Term delTerm) throws IOException {
assert testPoint("DocumentsWriterPerThread addDocuments start");
testPoint("DocumentsWriterPerThread addDocuments start");
assert deleteQueue != null;
docState.analyzer = analyzer;
if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) {

View File

@ -70,9 +70,9 @@ final class DocumentsWriterStallControl {
if (stalled) { // react on the first wakeup call!
// don't loop here, higher level logic will re-stall!
try {
assert incWaiters();
incWaiters();
wait();
assert decrWaiters();
decrWaiters();
} catch (InterruptedException e) {
throw new ThreadInterruptedException(e);
}
@ -86,17 +86,16 @@ final class DocumentsWriterStallControl {
}
private boolean incWaiters() {
private void incWaiters() {
numWaiting++;
assert waiting.put(Thread.currentThread(), Boolean.TRUE) == null;
return numWaiting > 0;
assert numWaiting > 0;
}
private boolean decrWaiters() {
private void decrWaiters() {
numWaiting--;
assert waiting.remove(Thread.currentThread()) != null;
return numWaiting >= 0;
assert numWaiting >= 0;
}
synchronized boolean hasBlocked() { // for tests

View File

@ -220,11 +220,17 @@ public class FieldInfos implements Iterable<FieldInfo> {
return fieldNumber.intValue();
}
// used by assert
synchronized boolean containsConsistent(Integer number, String name, DocValuesType dvType) {
return name.equals(numberToName.get(number))
&& number.equals(nameToNumber.get(name)) &&
(dvType == null || docValuesType.get(name) == null || dvType == docValuesType.get(name));
synchronized void verifyConsistent(Integer number, String name, DocValuesType dvType) {
if (name.equals(numberToName.get(number)) == false) {
throw new IllegalArgumentException("field number " + number + " is already mapped to field name \"" + numberToName.get(number) + "\", not \"" + name + "\"");
}
if (number.equals(nameToNumber.get(name)) == false) {
throw new IllegalArgumentException("field name \"" + name + "\" is already mapped to field number \"" + nameToNumber.get(name) + "\", not \"" + number + "\"");
}
DocValuesType currentDVType = docValuesType.get(name);
if (dvType != null && currentDVType != null && dvType != currentDVType) {
throw new IllegalArgumentException("cannot change DocValues type from " + currentDVType + " to " + dvType + " for field \"" + name + "\"");
}
}
/**
@ -248,7 +254,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
}
synchronized void setDocValuesType(int number, String name, DocValuesType dvType) {
assert containsConsistent(number, name, dvType);
verifyConsistent(number, name, dvType);
docValuesType.put(name, dvType);
}
}
@ -302,7 +308,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
final int fieldNumber = globalFieldNumbers.addOrGet(name, preferredFieldNumber, docValues);
fi = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, -1, null);
assert !byName.containsKey(fi.name);
assert globalFieldNumbers.containsConsistent(Integer.valueOf(fi.number), fi.name, fi.getDocValuesType());
globalFieldNumbers.verifyConsistent(Integer.valueOf(fi.number), fi.name, fi.getDocValuesType());
byName.put(fi.name, fi);
} else {
fi.update(storeTermVector, omitNorms, storePayloads, indexOptions);

View File

@ -110,8 +110,6 @@ final class FreqProxTermsWriterPerField extends TermsHashPerField {
void newTerm(final int termID) {
// First time we're seeing this term since the last
// flush
assert docState.testPoint("FreqProxTermsWriterPerField.newTerm start");
final FreqProxPostingsArray postings = freqProxPostingsArray;
postings.lastDocIDs[termID] = docState.docID;
@ -136,9 +134,6 @@ final class FreqProxTermsWriterPerField extends TermsHashPerField {
@Override
void addTerm(final int termID) {
assert docState.testPoint("FreqProxTermsWriterPerField.addTerm start");
final FreqProxPostingsArray postings = freqProxPostingsArray;
assert !hasFreq || postings.termFreqs[termID] > 0;

View File

@ -2013,7 +2013,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
infoStream.message("IW", "rollback: infos=" + segString(segmentInfos));
}
assert testPoint("rollback before checkpoint");
testPoint("rollback before checkpoint");
// Ask deleter to locate unreferenced files & remove
// them:
@ -2711,7 +2711,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
}
doBeforeFlush();
assert testPoint("startDoFlush");
testPoint("startDoFlush");
SegmentInfos toCommit = null;
boolean anySegmentsFlushed = false;
@ -2997,7 +2997,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
}
doBeforeFlush();
assert testPoint("startDoFlush");
testPoint("startDoFlush");
boolean success = false;
try {
@ -3080,9 +3080,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
// for testing only
DocumentsWriter getDocsWriter() {
boolean test = false;
assert test = true;
return test ? docWriter : null;
return docWriter;
}
/** Expert: Return the number of documents currently
@ -3167,7 +3165,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
*/
synchronized private ReadersAndUpdates commitMergedDeletesAndUpdates(MergePolicy.OneMerge merge, MergeState mergeState) throws IOException {
assert testPoint("startCommitMergeDeletes");
testPoint("startCommitMergeDeletes");
final List<SegmentCommitInfo> sourceSegments = merge.segments;
@ -3352,7 +3350,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
synchronized private boolean commitMerge(MergePolicy.OneMerge merge, MergeState mergeState) throws IOException {
assert testPoint("startCommitMerge");
testPoint("startCommitMerge");
if (tragedy != null) {
throw new IllegalStateException("this writer hit an unrecoverable error; cannot complete merge", tragedy);
@ -3696,7 +3694,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
synchronized private void _mergeInit(MergePolicy.OneMerge merge) throws IOException {
assert testPoint("startMergeInit");
testPoint("startMergeInit");
assert merge.registerDone;
assert merge.maxNumSegments == -1 || merge.maxNumSegments > 0;
@ -4234,7 +4232,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
* it. */
private void startCommit(final SegmentInfos toSync) throws IOException {
assert testPoint("startStartCommit");
testPoint("startStartCommit");
assert pendingCommit == null;
if (tragedy != null) {
@ -4270,13 +4268,13 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
assert filesExist(toSync);
}
assert testPoint("midStartCommit");
testPoint("midStartCommit");
boolean pendingCommitSet = false;
try {
assert testPoint("midStartCommit2");
testPoint("midStartCommit2");
synchronized(this) {
@ -4314,7 +4312,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
infoStream.message("IW", "done all syncs: " + filesToSync);
}
assert testPoint("midStartCommitSuccess");
testPoint("midStartCommitSuccess");
} finally {
synchronized(this) {
@ -4338,7 +4336,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
} catch (OutOfMemoryError oom) {
tragicEvent(oom, "startCommit");
}
assert testPoint("finishStartCommit");
testPoint("finishStartCommit");
}
/**
@ -4415,7 +4413,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
IOUtils.reThrowUnchecked(tragedy);
}
// Used only by assert for testing. Current points:
// Used for testing. Current points:
// startDoFlush
// startCommitMerge
// startStartCommit
@ -4426,11 +4424,10 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
// startCommitMergeDeletes
// startMergeInit
// DocumentsWriter.ThreadState.init start
private final boolean testPoint(String message) {
private final void testPoint(String message) {
if (infoStream.isEnabled("TP")) {
infoStream.message("TP", message);
}
return true;
}
synchronized boolean nrtIsCurrent(SegmentInfos infos) {

View File

@ -93,8 +93,6 @@ final class TermVectorsConsumer extends TermsHash {
@Override
void finishDocument() throws IOException {
assert docWriter.testPoint("TermVectorsTermsWriter.finishDocument start");
if (!hasVectors) {
return;
}
@ -119,7 +117,6 @@ final class TermVectorsConsumer extends TermsHash {
super.reset();
resetFields();
assert docWriter.testPoint("TermVectorsTermsWriter.finishDocument end");
}
@Override

View File

@ -63,8 +63,6 @@ final class TermVectorsConsumerPerField extends TermsHashPerField {
doVectors = false;
assert docState.testPoint("TermVectorsTermsWriterPerField.finish start");
final int numPostings = bytesHash.size();
final BytesRef flushTerm = termsWriter.flushTerm;
@ -226,7 +224,6 @@ final class TermVectorsConsumerPerField extends TermsHashPerField {
@Override
void newTerm(final int termID) {
assert docState.testPoint("TermVectorsTermsWriterPerField.newTerm start");
TermVectorsPostingsArray postings = termVectorsPostingsArray;
postings.freqs[termID] = 1;
@ -238,7 +235,6 @@ final class TermVectorsConsumerPerField extends TermsHashPerField {
@Override
void addTerm(final int termID) {
assert docState.testPoint("TermVectorsTermsWriterPerField.addTerm start");
TermVectorsPostingsArray postings = termVectorsPostingsArray;
postings.freqs[termID]++;

View File

@ -17,8 +17,8 @@ package org.apache.lucene;
* limitations under the License.
*/
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.util.LuceneTestCase;
/**
* validate that assertions are enabled during tests
@ -43,13 +43,14 @@ public class TestAssertions extends LuceneTestCase {
public void testTokenStreams() {
new TestTokenStream1();
new TestTokenStream2();
boolean doFail = false;
try {
new TestTokenStream3();
doFail = true;
if (assertsAreEnabled) {
fail("TestTokenStream3 should fail assertion");
}
} catch (AssertionError e) {
// expected
e.printStackTrace(System.out);
}
assertFalse("TestTokenStream3 should fail assertion", doFail);
}
}

View File

@ -231,6 +231,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
try {
w.addDocument(doc);
fail("didn't hit expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
@ -253,6 +254,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
try {
w.addDocument(doc);
fail("didn't hit expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
@ -420,6 +422,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
try {
w.addDocument(doc);
fail("did not get expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
@ -456,12 +459,58 @@ public class TestDocValuesIndexing extends LuceneTestCase {
iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
w = new IndexWriter(dir, iwc);
doc = new Document();
doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
w.addDocument(doc);
w.close();
dir.close();
}
public void testMixedTypesAfterReopenAppend1() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new NumericDocValuesField("foo", 0));
w.addDocument(doc);
w.close();
w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
doc = new Document();
doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
try {
w.addDocument(doc);
fail("did not get expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
w.close();
dir.close();
}
public void testMixedTypesAfterReopenAppend2() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))) ;
Document doc = new Document();
doc.add(new SortedSetDocValuesField("foo", new BytesRef("foo")));
w.addDocument(doc);
w.close();
doc = new Document();
w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
doc.add(new StringField("foo", "bar", Field.Store.NO));
doc.add(new BinaryDocValuesField("foo", new BytesRef("foo")));
try {
// NOTE: this case follows a different code path inside
// DefaultIndexingChain/FieldInfos, because the field (foo)
// is first added without DocValues:
w.addDocument(doc);
fail("did not get expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
w.forceMerge(1);
w.close();
dir.close();
}
// Two documents with same field as different types, added
// from separate threads:
public void testMixedTypesDifferentThreads() throws Exception {
@ -528,6 +577,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
try {
w.addIndexes(new Directory[] {dir2});
fail("didn't hit expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
@ -535,6 +585,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
IndexReader r = DirectoryReader.open(dir2);
try {
w.addIndexes(new IndexReader[] {r});
fail("didn't hit expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
@ -819,5 +870,4 @@ public class TestDocValuesIndexing extends LuceneTestCase {
dir.close();
}
}

View File

@ -54,7 +54,7 @@ public class RandomIndexWriter implements Closeable {
public static IndexWriter mockIndexWriter(Directory dir, IndexWriterConfig conf, Random r) throws IOException {
// Randomly calls Thread.yield so we mixup thread scheduling
final Random random = new Random(r.nextLong());
return mockIndexWriter(dir, conf, new TestPoint() {
return mockIndexWriter(dir, conf, new TestPoint() {
@Override
public void apply(String message) {
if (random.nextInt(4) == 2)

View File

@ -859,7 +859,7 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions;
}
assert false : "unreferenced files: before delete:\n " + Arrays.toString(startFiles) + "\n after delete:\n " + Arrays.toString(endFiles) + extras;
throw new RuntimeException("unreferenced files: before delete:\n " + Arrays.toString(startFiles) + "\n after delete:\n " + Arrays.toString(endFiles) + extras);
}
DirectoryReader ir1 = DirectoryReader.open(this);

View File

@ -78,7 +78,6 @@ import org.apache.lucene.index.IndexReader.ReaderClosedListener;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.LiveIndexWriterConfig;
@ -387,6 +386,8 @@ public abstract class LuceneTestCase extends Assert {
*/
public static final int RANDOM_MULTIPLIER = systemPropertyAsInt("tests.multiplier", 1);
public static final boolean TEST_ASSERTS_ENABLED = systemPropertyAsBoolean("tests.asserts", true);
/** TODO: javadoc? */
public static final String DEFAULT_LINE_DOCS_FILE = "europarl.lines.txt.gz";
@ -2461,4 +2462,13 @@ public abstract class LuceneTestCase extends Assert {
public static Path createTempFile() throws IOException {
return createTempFile("tempFile", ".tmp");
}
/** True if assertions (-ea) are enabled (at least for this class). */
public static final boolean assertsAreEnabled;
static {
boolean enabled = false;
assert enabled = true; // Intentional side-effect!!!
assertsAreEnabled = enabled;
}
}

View File

@ -177,6 +177,12 @@ public final class RunListenerPrintReproduceInfo extends RunListener {
}
}
if (LuceneTestCase.assertsAreEnabled) {
addVmOpt(b, "tests.asserts", "true");
} else {
addVmOpt(b, "tests.asserts", "false");
}
addVmOpt(b, "tests.file.encoding", System.getProperty("file.encoding"));
System.err.println(b.toString());

View File

@ -31,11 +31,22 @@ public class TestRuleAssertionsRequired implements TestRule {
@Override
public void evaluate() throws Throwable {
try {
assert false;
String msg = "Test class requires enabled assertions, enable globally (-ea)" +
" or for Solr/Lucene subpackages only: " + description.getClassName();
System.err.println(msg);
throw new Exception(msg);
// Make sure -ea matches -Dtests.asserts, to catch accidental mis-use:
if (LuceneTestCase.assertsAreEnabled != LuceneTestCase.TEST_ASSERTS_ENABLED) {
String msg = "Assertions mismatch: ";
if (LuceneTestCase.assertsAreEnabled) {
msg += "-ea was specified";
} else {
msg += "-ea was not specified";
}
if (LuceneTestCase.TEST_ASSERTS_ENABLED) {
msg += " but -Dtests.asserts=true";
} else {
msg += " but -Dtests.asserts=false";
}
System.err.println(msg);
throw new Exception(msg);
}
} catch (AssertionError e) {
// Ok, enabled.
}