mirror of https://github.com/apache/lucene.git
add file names to corrumption messages
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1470185 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
5fb07458d4
commit
834f7da9d9
|
@ -92,47 +92,47 @@ class DiskDocValuesProducer extends DocValuesProducer {
|
|||
} else if (type == DiskDocValuesFormat.SORTED) {
|
||||
// sorted = binary + numeric
|
||||
if (meta.readVInt() != fieldNumber) {
|
||||
throw new CorruptIndexException("sorted entry for field: " + fieldNumber + " is corrupt");
|
||||
throw new CorruptIndexException("sorted entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")");
|
||||
}
|
||||
if (meta.readByte() != DiskDocValuesFormat.BINARY) {
|
||||
throw new CorruptIndexException("sorted entry for field: " + fieldNumber + " is corrupt");
|
||||
throw new CorruptIndexException("sorted entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")");
|
||||
}
|
||||
BinaryEntry b = readBinaryEntry(meta);
|
||||
binaries.put(fieldNumber, b);
|
||||
|
||||
if (meta.readVInt() != fieldNumber) {
|
||||
throw new CorruptIndexException("sorted entry for field: " + fieldNumber + " is corrupt");
|
||||
throw new CorruptIndexException("sorted entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")");
|
||||
}
|
||||
if (meta.readByte() != DiskDocValuesFormat.NUMERIC) {
|
||||
throw new CorruptIndexException("sorted entry for field: " + fieldNumber + " is corrupt");
|
||||
throw new CorruptIndexException("sorted entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")");
|
||||
}
|
||||
NumericEntry n = readNumericEntry(meta);
|
||||
ords.put(fieldNumber, n);
|
||||
} else if (type == DiskDocValuesFormat.SORTED_SET) {
|
||||
// sortedset = binary + numeric + ordIndex
|
||||
if (meta.readVInt() != fieldNumber) {
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt");
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")");
|
||||
}
|
||||
if (meta.readByte() != DiskDocValuesFormat.BINARY) {
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt");
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")");
|
||||
}
|
||||
BinaryEntry b = readBinaryEntry(meta);
|
||||
binaries.put(fieldNumber, b);
|
||||
|
||||
if (meta.readVInt() != fieldNumber) {
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt");
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")");
|
||||
}
|
||||
if (meta.readByte() != DiskDocValuesFormat.NUMERIC) {
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt");
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")");
|
||||
}
|
||||
NumericEntry n1 = readNumericEntry(meta);
|
||||
ords.put(fieldNumber, n1);
|
||||
|
||||
if (meta.readVInt() != fieldNumber) {
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt");
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")");
|
||||
}
|
||||
if (meta.readByte() != DiskDocValuesFormat.NUMERIC) {
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt");
|
||||
throw new CorruptIndexException("sortedset entry for field: " + fieldNumber + " is corrupt (resource=" + meta + ")");
|
||||
}
|
||||
NumericEntry n2 = readNumericEntry(meta);
|
||||
ordIndexes.put(fieldNumber, n2);
|
||||
|
|
|
@ -165,7 +165,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
|
|||
try {
|
||||
bd = (BigDecimal) decoder.parse(scratch.utf8ToString());
|
||||
} catch (ParseException pe) {
|
||||
CorruptIndexException e = new CorruptIndexException("failed to parse BigDecimal value");
|
||||
CorruptIndexException e = new CorruptIndexException("failed to parse BigDecimal value (resource=" + in + ")");
|
||||
e.initCause(pe);
|
||||
throw e;
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
|
|||
try {
|
||||
len = decoder.parse(new String(scratch.bytes, scratch.offset + LENGTH.length, scratch.length - LENGTH.length, "UTF-8")).intValue();
|
||||
} catch (ParseException pe) {
|
||||
CorruptIndexException e = new CorruptIndexException("failed to parse int length");
|
||||
CorruptIndexException e = new CorruptIndexException("failed to parse int length (resource=" + in + ")");
|
||||
e.initCause(pe);
|
||||
throw e;
|
||||
}
|
||||
|
@ -243,7 +243,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
|
|||
try {
|
||||
return ordDecoder.parse(scratch.utf8ToString()).intValue();
|
||||
} catch (ParseException pe) {
|
||||
CorruptIndexException e = new CorruptIndexException("failed to parse ord");
|
||||
CorruptIndexException e = new CorruptIndexException("failed to parse ord (resource=" + in + ")");
|
||||
e.initCause(pe);
|
||||
throw e;
|
||||
}
|
||||
|
@ -265,7 +265,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
|
|||
try {
|
||||
len = decoder.parse(new String(scratch.bytes, scratch.offset + LENGTH.length, scratch.length - LENGTH.length, "UTF-8")).intValue();
|
||||
} catch (ParseException pe) {
|
||||
CorruptIndexException e = new CorruptIndexException("failed to parse int length");
|
||||
CorruptIndexException e = new CorruptIndexException("failed to parse int length (resource=" + in + ")");
|
||||
e.initCause(pe);
|
||||
throw e;
|
||||
}
|
||||
|
@ -343,7 +343,7 @@ class SimpleTextDocValuesReader extends DocValuesProducer {
|
|||
try {
|
||||
len = decoder.parse(new String(scratch.bytes, scratch.offset + LENGTH.length, scratch.length - LENGTH.length, "UTF-8")).intValue();
|
||||
} catch (ParseException pe) {
|
||||
CorruptIndexException e = new CorruptIndexException("failed to parse int length");
|
||||
CorruptIndexException e = new CorruptIndexException("failed to parse int length (resource=" + in + ")");
|
||||
e.initCause(pe);
|
||||
throw e;
|
||||
}
|
||||
|
|
|
@ -82,7 +82,7 @@ public final class CompressingStoredFieldsIndexReader implements Closeable, Clon
|
|||
avgChunkDocs[blockCount] = fieldsIndexIn.readVInt();
|
||||
final int bitsPerDocBase = fieldsIndexIn.readVInt();
|
||||
if (bitsPerDocBase > 32) {
|
||||
throw new CorruptIndexException("Corrupted");
|
||||
throw new CorruptIndexException("Corrupted bitsPerDocBase (resource=" + fieldsIndexIn + ")");
|
||||
}
|
||||
docBasesDeltas[blockCount] = PackedInts.getReaderNoHeader(fieldsIndexIn, PackedInts.Format.PACKED, packedIntsVersion, numChunks, bitsPerDocBase);
|
||||
|
||||
|
@ -91,7 +91,7 @@ public final class CompressingStoredFieldsIndexReader implements Closeable, Clon
|
|||
avgChunkSizes[blockCount] = fieldsIndexIn.readVLong();
|
||||
final int bitsPerStartPointer = fieldsIndexIn.readVInt();
|
||||
if (bitsPerStartPointer > 64) {
|
||||
throw new CorruptIndexException("Corrupted");
|
||||
throw new CorruptIndexException("Corrupted bitsPerStartPointer (resource=" + fieldsIndexIn + ")");
|
||||
}
|
||||
startPointersDeltas[blockCount] = PackedInts.getReaderNoHeader(fieldsIndexIn, PackedInts.Format.PACKED, packedIntsVersion, numChunks, bitsPerStartPointer);
|
||||
|
||||
|
|
|
@ -202,7 +202,7 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
|
|||
|| docBase + chunkDocs > numDocs) {
|
||||
throw new CorruptIndexException("Corrupted: docID=" + docID
|
||||
+ ", docBase=" + docBase + ", chunkDocs=" + chunkDocs
|
||||
+ ", numDocs=" + numDocs);
|
||||
+ ", numDocs=" + numDocs + " (resource=" + fieldsStream + ")");
|
||||
}
|
||||
|
||||
final int numStoredFields, offset, length, totalLength;
|
||||
|
@ -216,7 +216,7 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
|
|||
if (bitsPerStoredFields == 0) {
|
||||
numStoredFields = fieldsStream.readVInt();
|
||||
} else if (bitsPerStoredFields > 31) {
|
||||
throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields);
|
||||
throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields + " (resource=" + fieldsStream + ")");
|
||||
} else {
|
||||
final long filePointer = fieldsStream.getFilePointer();
|
||||
final PackedInts.Reader reader = PackedInts.getDirectReaderNoHeader(fieldsStream, PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerStoredFields);
|
||||
|
@ -230,7 +230,7 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
|
|||
offset = (docID - docBase) * length;
|
||||
totalLength = chunkDocs * length;
|
||||
} else if (bitsPerStoredFields > 31) {
|
||||
throw new CorruptIndexException("bitsPerLength=" + bitsPerLength);
|
||||
throw new CorruptIndexException("bitsPerLength=" + bitsPerLength + " (resource=" + fieldsStream + ")");
|
||||
} else {
|
||||
final PackedInts.ReaderIterator it = PackedInts.getReaderIteratorNoHeader(fieldsStream, PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerLength, 1);
|
||||
int off = 0;
|
||||
|
@ -248,7 +248,7 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
|
|||
}
|
||||
|
||||
if ((length == 0) != (numStoredFields == 0)) {
|
||||
throw new CorruptIndexException("length=" + length + ", numStoredFields=" + numStoredFields);
|
||||
throw new CorruptIndexException("length=" + length + ", numStoredFields=" + numStoredFields + " (resource=" + fieldsStream + ")");
|
||||
}
|
||||
if (numStoredFields == 0) {
|
||||
// nothing to do
|
||||
|
@ -338,7 +338,7 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
|
|||
|| docBase + chunkDocs > numDocs) {
|
||||
throw new CorruptIndexException("Corrupted: current docBase=" + this.docBase
|
||||
+ ", current numDocs=" + this.chunkDocs + ", new docBase=" + docBase
|
||||
+ ", new numDocs=" + chunkDocs);
|
||||
+ ", new numDocs=" + chunkDocs + " (resource=" + fieldsStream + ")");
|
||||
}
|
||||
this.docBase = docBase;
|
||||
this.chunkDocs = chunkDocs;
|
||||
|
@ -357,7 +357,7 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
|
|||
if (bitsPerStoredFields == 0) {
|
||||
Arrays.fill(numStoredFields, 0, chunkDocs, fieldsStream.readVInt());
|
||||
} else if (bitsPerStoredFields > 31) {
|
||||
throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields);
|
||||
throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields + " (resource=" + fieldsStream + ")");
|
||||
} else {
|
||||
final PackedInts.ReaderIterator it = PackedInts.getReaderIteratorNoHeader(fieldsStream, PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerStoredFields, 1);
|
||||
for (int i = 0; i < chunkDocs; ++i) {
|
||||
|
@ -387,7 +387,7 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
|
|||
final int chunkSize = chunkSize();
|
||||
decompressor.decompress(fieldsStream, chunkSize, 0, chunkSize, bytes);
|
||||
if (bytes.length != chunkSize) {
|
||||
throw new CorruptIndexException("Corrupted: expected chunk size = " + chunkSize() + ", got " + bytes.length);
|
||||
throw new CorruptIndexException("Corrupted: expected chunk size = " + chunkSize() + ", got " + bytes.length + " (resource=" + fieldsStream + ")");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -187,7 +187,7 @@ public final class CompressingTermVectorsReader extends TermVectorsReader implem
|
|||
final int docBase = vectorsStream.readVInt();
|
||||
final int chunkDocs = vectorsStream.readVInt();
|
||||
if (doc < docBase || doc >= docBase + chunkDocs || docBase + chunkDocs > numDocs) {
|
||||
throw new CorruptIndexException("docBase=" + docBase + ",chunkDocs=" + chunkDocs + ",doc=" + doc);
|
||||
throw new CorruptIndexException("docBase=" + docBase + ",chunkDocs=" + chunkDocs + ",doc=" + doc + " (resource=" + vectorsStream + ")");
|
||||
}
|
||||
|
||||
final int skip; // number of fields to skip
|
||||
|
|
|
@ -134,7 +134,7 @@ public abstract class CompressionMode {
|
|||
}
|
||||
final int decompressedLength = LZ4.decompress(in, offset + length, bytes.bytes, 0);
|
||||
if (decompressedLength > originalLength) {
|
||||
throw new CorruptIndexException("Corrupted: lengths mismatch: " + decompressedLength + " > " + originalLength);
|
||||
throw new CorruptIndexException("Corrupted: lengths mismatch: " + decompressedLength + " > " + originalLength + " (resource=" + in + ")");
|
||||
}
|
||||
bytes.offset = offset;
|
||||
bytes.length = length;
|
||||
|
@ -222,7 +222,7 @@ public abstract class CompressionMode {
|
|||
}
|
||||
}
|
||||
if (bytes.length != originalLength) {
|
||||
throw new CorruptIndexException("Lengths mismatch: " + bytes.length + " != " + originalLength);
|
||||
throw new CorruptIndexException("Lengths mismatch: " + bytes.length + " != " + originalLength + " (resource=" + in + ")");
|
||||
}
|
||||
bytes.offset = offset;
|
||||
bytes.length = length;
|
||||
|
|
|
@ -99,7 +99,7 @@ class Lucene40FieldInfosReader extends FieldInfosReader {
|
|||
}
|
||||
if (oldNormsType.mapping != null) {
|
||||
if (oldNormsType.mapping != DocValuesType.NUMERIC) {
|
||||
throw new CorruptIndexException("invalid norm type: " + oldNormsType);
|
||||
throw new CorruptIndexException("invalid norm type: " + oldNormsType + " (resource=" + input + ")");
|
||||
}
|
||||
attributes.put(LEGACY_NORM_TYPE_KEY, oldNormsType.name());
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue