mirror of https://github.com/apache/lucene.git
LUCENE-5611: don't abort segment if term vector settings are wrong
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1591700 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
fff01837d7
commit
30006cff7e
|
@ -321,51 +321,39 @@ final class DefaultIndexingChain extends DocConsumer {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add stored fields:
|
// Add stored fields:
|
||||||
// TODO: if these hit exc today ->>> corrumption!
|
|
||||||
fillStoredFields(docState.docID);
|
fillStoredFields(docState.docID);
|
||||||
startStoredFields();
|
startStoredFields();
|
||||||
|
|
||||||
// TODO: clean up this loop, it's complicated because dv exceptions are non-aborting,
|
// TODO: clean up this loop, it's bogus that docvalues are treated as stored fields...
|
||||||
// but storedfields are. Its also bogus that docvalues are treated as stored fields...
|
boolean abort = false;
|
||||||
for (StorableField field : docState.doc.storableFields()) {
|
|
||||||
final String fieldName = field.name();
|
|
||||||
IndexableFieldType fieldType = field.fieldType();
|
|
||||||
PerField fp = null;
|
|
||||||
|
|
||||||
success = false;
|
|
||||||
try {
|
try {
|
||||||
// TODO: make this non-aborting and change the test to confirm that!!!
|
for (StorableField field : docState.doc.storableFields()) {
|
||||||
|
String fieldName = field.name();
|
||||||
|
IndexableFieldType fieldType = field.fieldType();
|
||||||
|
|
||||||
verifyFieldType(fieldName, fieldType);
|
verifyFieldType(fieldName, fieldType);
|
||||||
|
|
||||||
fp = getOrAddField(fieldName, fieldType, false);
|
PerField fp = getOrAddField(fieldName, fieldType, false);
|
||||||
if (fieldType.stored()) {
|
if (fieldType.stored()) {
|
||||||
|
abort = true;
|
||||||
storedFieldsWriter.writeField(fp.fieldInfo, field);
|
storedFieldsWriter.writeField(fp.fieldInfo, field);
|
||||||
}
|
abort = false;
|
||||||
success = true;
|
|
||||||
} finally {
|
|
||||||
if (!success) {
|
|
||||||
docWriter.setAborting();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
success = false;
|
|
||||||
try {
|
|
||||||
DocValuesType dvType = fieldType.docValueType();
|
DocValuesType dvType = fieldType.docValueType();
|
||||||
if (dvType != null) {
|
if (dvType != null) {
|
||||||
indexDocValue(fp, dvType, field);
|
indexDocValue(fp, dvType, field);
|
||||||
}
|
}
|
||||||
success = true;
|
}
|
||||||
} finally {
|
} finally {
|
||||||
if (!success) {
|
if (abort) {
|
||||||
// dv failed: so just try to bail on the current doc by calling finishDocument()...
|
docWriter.setAborting();
|
||||||
|
} else {
|
||||||
finishStoredFields();
|
finishStoredFields();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
finishStoredFields();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void verifyFieldType(String name, IndexableFieldType ft) {
|
private static void verifyFieldType(String name, IndexableFieldType ft) {
|
||||||
if (ft.indexed() == false) {
|
if (ft.indexed() == false) {
|
||||||
if (ft.storeTermVectors()) {
|
if (ft.storeTermVectors()) {
|
||||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.index;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.CachingTokenFilter;
|
import org.apache.lucene.analysis.CachingTokenFilter;
|
||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
@ -27,6 +28,7 @@ import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
|
import org.apache.lucene.document.StoredField;
|
||||||
import org.apache.lucene.document.StringField;
|
import org.apache.lucene.document.StringField;
|
||||||
import org.apache.lucene.document.TextField;
|
import org.apache.lucene.document.TextField;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
|
@ -660,4 +662,32 @@ public class TestTermVectorsWriter extends LuceneTestCase {
|
||||||
iw.close();
|
iw.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LUCENE-5611: don't abort segment when term vector settings are wrong
|
||||||
|
public void testNoAbortOnBadTVSettings() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
// Don't use RandomIndexWriter because we want to be sure both docs go to 1 seg:
|
||||||
|
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
|
||||||
|
IndexWriter iw = new IndexWriter(dir, iwc);
|
||||||
|
|
||||||
|
Document doc = new Document();
|
||||||
|
iw.addDocument(doc);
|
||||||
|
FieldType ft = new FieldType(StoredField.TYPE);
|
||||||
|
ft.setStoreTermVectors(true);
|
||||||
|
ft.freeze();
|
||||||
|
doc.add(new Field("field", "value", ft));
|
||||||
|
try {
|
||||||
|
iw.addDocument(doc);
|
||||||
|
fail("should have hit exc");
|
||||||
|
} catch (IllegalArgumentException iae) {
|
||||||
|
// expected
|
||||||
|
}
|
||||||
|
IndexReader r = DirectoryReader.open(iw, true);
|
||||||
|
|
||||||
|
// Make sure the exc didn't lose our first document:
|
||||||
|
assertEquals(1, r.numDocs());
|
||||||
|
iw.close();
|
||||||
|
r.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue