mirror of https://github.com/apache/lucene.git
LUCENE-6019: add another [failing and then fixed] test; do not set FieldInfo.docValueType when it disgrees with low-schema
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1633770 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0b39dd9e62
commit
ca5063df99
|
@ -387,12 +387,12 @@ final class DefaultIndexingChain extends DocConsumer {
|
|||
|
||||
boolean hasDocValues = fp.fieldInfo.hasDocValues();
|
||||
|
||||
// This will throw an exc if the caller tried to
|
||||
// change the DV type for the field:
|
||||
fp.fieldInfo.setDocValuesType(dvType);
|
||||
if (hasDocValues == false) {
|
||||
// This will throw an exc if the caller tried to
|
||||
// change the DV type for the field:
|
||||
fieldInfos.globalFieldNumbers.setDocValuesType(fp.fieldInfo.number, fp.fieldInfo.name, dvType);
|
||||
}
|
||||
fp.fieldInfo.setDocValuesType(dvType);
|
||||
|
||||
int docID = docState.docID;
|
||||
|
||||
|
|
|
@ -314,14 +314,16 @@ public class FieldInfos implements Iterable<FieldInfo> {
|
|||
fi.update(storeTermVector, omitNorms, storePayloads, indexOptions);
|
||||
|
||||
if (docValues != null) {
|
||||
// only pay the synchronization cost if fi does not already have a DVType
|
||||
// Only pay the synchronization cost if fi does not already have a DVType
|
||||
boolean updateGlobal = !fi.hasDocValues();
|
||||
fi.setDocValuesType(docValues); // this will also perform the consistency check.
|
||||
if (updateGlobal) {
|
||||
// must also update docValuesType map so it's
|
||||
// aware of this field's DocValueType
|
||||
// Must also update docValuesType map so it's
|
||||
// aware of this field's DocValueType. This will throw IllegalArgumentException if
|
||||
// an illegal type change was attempted.
|
||||
globalFieldNumbers.setDocValuesType(fi.number, name, docValues);
|
||||
}
|
||||
|
||||
fi.setDocValuesType(docValues); // this will also perform the consistency check.
|
||||
}
|
||||
}
|
||||
return fi;
|
||||
|
|
|
@ -3931,8 +3931,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
// we pass merge.getMergeReaders() instead of merge.readers to allow the
|
||||
// OneMerge to return a view over the actual segments to merge
|
||||
final SegmentMerger merger = new SegmentMerger(merge.getMergeReaders(),
|
||||
merge.info.info, infoStream, dirWrapper,
|
||||
checkAbort, globalFieldNumberMap,
|
||||
merge.info.info, infoStream, dirWrapper,
|
||||
checkAbort, globalFieldNumberMap,
|
||||
context);
|
||||
|
||||
merge.checkAborted(directory);
|
||||
|
|
|
@ -1,17 +1,5 @@
|
|||
package org.apache.lucene.index;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.codecs.DocValuesFormat;
|
||||
import org.apache.lucene.codecs.DocValuesProducer;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.RefCount;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
|
@ -29,6 +17,18 @@ import org.apache.lucene.util.RefCount;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.codecs.DocValuesFormat;
|
||||
import org.apache.lucene.codecs.DocValuesProducer;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.RefCount;
|
||||
|
||||
/**
|
||||
* Manages the {@link DocValuesProducer} held by {@link SegmentReader} and
|
||||
* keeps track of their reference counting.
|
||||
|
|
|
@ -511,6 +511,34 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
|||
dir.close();
|
||||
}
|
||||
|
||||
public void testMixedTypesAfterReopenAppend3() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))) ;
|
||||
Document doc = new Document();
|
||||
doc.add(new SortedSetDocValuesField("foo", new BytesRef("foo")));
|
||||
w.addDocument(doc);
|
||||
w.close();
|
||||
|
||||
doc = new Document();
|
||||
w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
doc.add(new StringField("foo", "bar", Field.Store.NO));
|
||||
doc.add(new BinaryDocValuesField("foo", new BytesRef("foo")));
|
||||
try {
|
||||
// NOTE: this case follows a different code path inside
|
||||
// DefaultIndexingChain/FieldInfos, because the field (foo)
|
||||
// is first added without DocValues:
|
||||
w.addDocument(doc);
|
||||
fail("did not get expected exception");
|
||||
} catch (IllegalArgumentException iae) {
|
||||
// expected
|
||||
}
|
||||
// Also add another document so there is a segment to write here:
|
||||
w.addDocument(new Document());
|
||||
w.forceMerge(1);
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
// Two documents with same field as different types, added
|
||||
// from separate threads:
|
||||
public void testMixedTypesDifferentThreads() throws Exception {
|
||||
|
|
Loading…
Reference in New Issue