fix nocommit

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4547@1419840 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2012-12-10 22:32:11 +00:00
parent 7af602f969
commit b9daf3aede
6 changed files with 21 additions and 21 deletions

View File

@ -168,7 +168,7 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
public NumericDocValuesConsumer addNumericField(FieldInfo field, final long minValue, long maxValue) throws IOException {
assert fieldSeen(field.name);
assert (field.getDocValuesType() != null && (DocValues.isNumber(field.getDocValuesType()) || DocValues.isFloat(field.getDocValuesType()))) ||
(field.getNormType() != null && (DocValues.isNumber(field.getNormType()) || DocValues.isFloat(field.getNormType())));
(field.getNormType() != null && (DocValues.isNumber(field.getNormType()) || DocValues.isFloat(field.getNormType()))): "field=" + field.name;
writeFieldEntry(field);
// write our minimum value to the .dat, all entries are deltas from that
@ -505,16 +505,7 @@ public class SimpleTextSimpleDocValuesFormat extends SimpleDocValuesFormat {
@Override
public NumericDocValues getNumeric(FieldInfo fieldInfo) throws IOException {
final OneField field = fields.get(fieldInfo.name);
// This can happen, in exceptional cases, where the
// only doc containing a field hit a non-aborting
// exception. The field then appears in FieldInfos,
// marked as indexed and !omitNorms, and then merging
// will try to retrieve it:
// nocommit can we somehow avoid this ...?
if (field == null) {
return null;
}
assert field != null;
// SegmentCoreReaders already verifies this field is
// valid:

View File

@ -28,8 +28,6 @@ import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.DocValuesConsumer;
import org.apache.lucene.codecs.FieldInfosWriter;
import org.apache.lucene.codecs.PerDocConsumer;
import org.apache.lucene.codecs.SimpleDVConsumer;
import org.apache.lucene.codecs.SimpleDocValuesFormat;
import org.apache.lucene.index.DocumentsWriterPerThread.DocState;
import org.apache.lucene.index.TypePromoter.TypeCompatibility;
import org.apache.lucene.store.IOContext;
@ -381,9 +379,6 @@ final class DocFieldProcessor extends DocConsumer {
}
}
DocValuesConsumer docValuesConsumer = perDocConsumer.addValuesField(valueType, fieldInfo);
assert fieldInfo.getDocValuesType() == null || fieldInfo.getDocValuesType() == valueType;
fieldInfo.setDocValuesType(valueType);
docValuesConsumerAndDocID = new DocValuesConsumerHolder(docValuesConsumer);
docValuesConsumerAndDocID.docID = docState.docID;
docValues.put(fieldInfo.name, docValuesConsumerAndDocID);

View File

@ -49,6 +49,12 @@ final class DocValuesProcessor extends StoredFieldsConsumer {
public void addField(int docID, StorableField field, FieldInfo fieldInfo) {
final DocValues.Type dvType = field.fieldType().docValueType();
if (dvType != null) {
DocValues.Type currentDVType = fieldInfo.getDocValuesType();
if (currentDVType == null) {
fieldInfo.setDocValuesType(dvType);
} else if (currentDVType != dvType) {
throw new IllegalArgumentException("cannot change DocValues type from " + currentDVType + " to " + dvType + " for field \"" + fieldInfo.name + "\"");
}
if (DocValues.isBytes(dvType)) {
addBinaryField(fieldInfo, docID, field.binaryValue());
} else if (DocValues.isSortedBytes(dvType)) {

View File

@ -313,6 +313,8 @@ public class FieldInfos implements Iterable<FieldInfo> {
final int fieldNumber = globalFieldNumbers.addOrGet(name, preferredFieldNumber, docValues);
fi = addInternal(name, fieldNumber, isIndexed, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, normType);
} else {
fi.update(isIndexed, storeTermVector, omitNorms, storePayloads, indexOptions);
if (docValues != null) {
DocValues.Type currentDVType = fi.getDocValuesType();
if (currentDVType == null) {
@ -320,12 +322,16 @@ public class FieldInfos implements Iterable<FieldInfo> {
} else if (currentDVType != docValues) {
throw new IllegalArgumentException("cannot change DocValues type from " + currentDVType + " to " + docValues + " for field \"" + name + "\"");
}
}
fi.update(isIndexed, storeTermVector, omitNorms, storePayloads, indexOptions);
if (docValues != null) {
fi.setDocValuesType(docValues);
}
if (!fi.omitsNorms() && normType != null) {
DocValues.Type currentDVType = fi.getNormType();
if (currentDVType == null) {
fi.setNormValueType(docValues);
} else if (currentDVType != normType) {
throw new IllegalArgumentException("cannot change Norm type from " + currentDVType + " to " + normType + " for field \"" + name + "\"");
}
fi.setNormValueType(normType);
}
}

View File

@ -95,7 +95,9 @@ final class NormsConsumerPerField extends InvertedDocEndConsumerPerField impleme
private DocValuesConsumer getConsumer(Type type) throws IOException {
if (consumer == null) {
assert fieldInfo.getNormType() == null || fieldInfo.getNormType() == type;
if (fieldInfo.getNormType() != null && fieldInfo.getNormType() != type) {
throw new IllegalArgumentException("cannot change Norm type from " + fieldInfo.getNormType() + " to " + type + " for field \"" + fieldInfo.name + "\"");
}
fieldInfo.setNormValueType(type);
consumer = parent.newConsumer(docState.docWriter.newPerDocWriteState(""), fieldInfo, type);
this.initType = type;

View File

@ -190,7 +190,7 @@ final class SegmentMerger {
boolean success = false;
try {
for (FieldInfo field : mergeState.fieldInfos) {
if (field.isIndexed() && !field.omitsNorms()) {
if (field.isIndexed() && !field.omitsNorms() && field.getNormType() != null) {
List<NumericDocValues> toMerge = new ArrayList<NumericDocValues>();
for (AtomicReader reader : mergeState.readers) {
NumericDocValues norms = reader.simpleNormValues(field.name);