HBASE-22445 Add file info when throw exceptions in HFileReaderImpl
This commit is contained in:
parent
e3c27d33a7
commit
df6df68900
|
@ -331,14 +331,14 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
// We can read v3 or v2 versions of hfile.
|
||||
throw new IllegalArgumentException("Invalid HFile version: major=" +
|
||||
trailer.getMajorVersion() + ", minor=" + trailer.getMinorVersion() + ": expected at least " +
|
||||
"major=2 and minor=" + MAX_MINOR_VERSION);
|
||||
"major=2 and minor=" + MAX_MINOR_VERSION + ", path=" + path);
|
||||
}
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public static class BlockIndexNotLoadedException extends IllegalStateException {
|
||||
public BlockIndexNotLoadedException() {
|
||||
public BlockIndexNotLoadedException(Path path) {
|
||||
// Add a message in case anyone relies on it as opposed to class name.
|
||||
super("Block index not loaded");
|
||||
super(path + " block index not loaded");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -389,7 +389,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
@Override
|
||||
public Optional<Cell> getFirstKey() {
|
||||
if (dataBlockIndexReader == null) {
|
||||
throw new BlockIndexNotLoadedException();
|
||||
throw new BlockIndexNotLoadedException(path);
|
||||
}
|
||||
return dataBlockIndexReader.isEmpty() ? Optional.empty()
|
||||
: Optional.of(dataBlockIndexReader.getRootBlockKey(0));
|
||||
|
@ -479,8 +479,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
*/
|
||||
@SuppressWarnings("serial")
|
||||
public static class NotSeekedException extends IllegalStateException {
|
||||
public NotSeekedException() {
|
||||
super("Not seeked to a key/value");
|
||||
public NotSeekedException(Path path) {
|
||||
super(path + " not seeked to a key/value");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -572,7 +572,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
|
||||
protected void assertSeeked() {
|
||||
if (!isSeeked())
|
||||
throw new NotSeekedException();
|
||||
throw new NotSeekedException(reader.getPath());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -642,7 +642,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
throw new IllegalStateException("Invalid currTagsLen " + this.currTagsLen +
|
||||
". Block offset: " + curBlock.getOffset() + ", block length: " +
|
||||
this.blockBuffer.limit() +
|
||||
", position: " + this.blockBuffer.position() + " (without header).");
|
||||
", position: " + this.blockBuffer.position() + " (without header)." +
|
||||
" path=" + reader.getPath());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -729,7 +730,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
throw new IllegalStateException("Invalid klen " + klen + " or vlen "
|
||||
+ vlen + ". Block offset: "
|
||||
+ curBlock.getOffset() + ", block length: " + blockBuffer.limit() + ", position: "
|
||||
+ blockBuffer.position() + " (without header).");
|
||||
+ blockBuffer.position() + " (without header)."
|
||||
+ " path=" + reader.getPath());
|
||||
}
|
||||
offsetFromPos += Bytes.SIZEOF_LONG;
|
||||
blockBuffer.asSubByteBuffer(blockBuffer.position() + offsetFromPos, klen, pair);
|
||||
|
@ -744,7 +746,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
if (checkLen(tlen)) {
|
||||
throw new IllegalStateException("Invalid tlen " + tlen + ". Block offset: "
|
||||
+ curBlock.getOffset() + ", block length: " + blockBuffer.limit() + ", position: "
|
||||
+ blockBuffer.position() + " (without header).");
|
||||
+ blockBuffer.position() + " (without header)."
|
||||
+ " path=" + reader.getPath());
|
||||
}
|
||||
// add the two bytes read for the tags.
|
||||
offsetFromPos += tlen + (Bytes.SIZEOF_SHORT);
|
||||
|
@ -759,7 +762,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
throw new IllegalStateException("blockSeek with seekBefore "
|
||||
+ "at the first key of the block: key=" + CellUtil.getCellKeyAsString(key)
|
||||
+ ", blockOffset=" + curBlock.getOffset() + ", onDiskSize="
|
||||
+ curBlock.getOnDiskSizeWithHeader());
|
||||
+ curBlock.getOnDiskSizeWithHeader()
|
||||
+ ", path=" + reader.getPath());
|
||||
}
|
||||
blockBuffer.moveBack(lastKeyValueSize);
|
||||
readKeyValueLen();
|
||||
|
@ -922,7 +926,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
}
|
||||
|
||||
if (block.getOffset() < 0) {
|
||||
throw new IOException("Invalid block file offset: " + block);
|
||||
throw new IOException(
|
||||
"Invalid block file offset: " + block + ", path=" + reader.getPath());
|
||||
}
|
||||
|
||||
// We are reading the next block without block type validation, because
|
||||
|
@ -1037,8 +1042,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
LOG.error("Current pos = " + blockBuffer.position()
|
||||
+ "; currKeyLen = " + currKeyLen + "; currValLen = "
|
||||
+ currValueLen + "; block limit = " + blockBuffer.limit()
|
||||
+ "; HFile name = " + reader.getName()
|
||||
+ "; currBlock currBlockOffset = " + this.curBlock.getOffset());
|
||||
+ "; currBlock currBlockOffset = " + this.curBlock.getOffset()
|
||||
+ "; path=" + reader.getPath());
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
@ -1136,7 +1141,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
HFileBlock newBlock = reader.readBlock(firstDataBlockOffset, -1, cacheBlocks, pread,
|
||||
isCompaction, true, BlockType.DATA, getEffectiveDataBlockEncoding());
|
||||
if (newBlock.getOffset() < 0) {
|
||||
throw new IOException("Invalid block offset: " + newBlock.getOffset());
|
||||
throw new IOException(
|
||||
"Invalid block offset: " + newBlock.getOffset() + ", path=" + reader.getPath());
|
||||
}
|
||||
updateCurrentBlock(newBlock);
|
||||
}
|
||||
|
@ -1180,7 +1186,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
+ " or currValueLen " + this.currValueLen + ". Block offset: "
|
||||
+ this.curBlock.getOffset() + ", block length: "
|
||||
+ this.blockBuffer.limit() + ", position: " + this.blockBuffer.position()
|
||||
+ " (without header).");
|
||||
+ " (without header)." + ", path=" + reader.getPath());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1195,7 +1201,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
// sanity check
|
||||
if (newBlock.getBlockType() != BlockType.DATA) {
|
||||
throw new IllegalStateException("ScannerV2 works only on data " + "blocks, got "
|
||||
+ newBlock.getBlockType() + "; " + "fileName=" + reader.getName()
|
||||
+ newBlock.getBlockType() + "; " + "HFileName=" + reader.getPath()
|
||||
+ ", " + "dataBlockEncoder=" + reader.getDataBlockEncoding() + ", " + "isCompaction="
|
||||
+ isCompaction);
|
||||
}
|
||||
|
@ -1344,9 +1350,10 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
// so blocks with the old encoding still linger in cache for some
|
||||
// period of time. This event should be rare as it only happens on
|
||||
// schema definition change.
|
||||
LOG.info("Evicting cached block with key " + cacheKey +
|
||||
" because of a data block encoding mismatch" + "; expected: " +
|
||||
expectedDataBlockEncoding + ", actual: " + actualDataBlockEncoding);
|
||||
LOG.info("Evicting cached block with key " + cacheKey
|
||||
+ " because of a data block encoding mismatch" + "; expected: "
|
||||
+ expectedDataBlockEncoding + ", actual: " + actualDataBlockEncoding + ", path="
|
||||
+ path);
|
||||
// This is an error scenario. so here we need to decrement the
|
||||
// count.
|
||||
cache.returnBlock(cacheKey, cachedBlock);
|
||||
|
@ -1373,7 +1380,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
return null; // there are no meta blocks
|
||||
}
|
||||
if (metaBlockIndexReader == null) {
|
||||
throw new IOException("Meta index not loaded");
|
||||
throw new IOException(path + " meta index not loaded");
|
||||
}
|
||||
|
||||
byte[] mbname = Bytes.toBytes(metaBlockName);
|
||||
|
@ -1423,13 +1430,14 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
DataBlockEncoding expectedDataBlockEncoding)
|
||||
throws IOException {
|
||||
if (dataBlockIndexReader == null) {
|
||||
throw new IOException("Block index not loaded");
|
||||
throw new IOException(path + " block index not loaded");
|
||||
}
|
||||
long trailerOffset = trailer.getLoadOnOpenDataOffset();
|
||||
if (dataBlockOffset < 0 || dataBlockOffset >= trailerOffset) {
|
||||
throw new IOException("Requested block is out of range: " + dataBlockOffset +
|
||||
", lastDataBlockOffset: " + trailer.getLastDataBlockOffset() +
|
||||
", trailer.getLoadOnOpenDataOffset: " + trailerOffset);
|
||||
", trailer.getLoadOnOpenDataOffset: " + trailerOffset +
|
||||
", path=" + path);
|
||||
}
|
||||
// For any given block from any given file, synchronize reads for said
|
||||
// block.
|
||||
|
@ -1468,7 +1476,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
if (cachedBlock.getDataBlockEncoding() != dataBlockEncoder.getDataBlockEncoding()) {
|
||||
throw new IOException("Cached block under key " + cacheKey + " "
|
||||
+ "has wrong encoding: " + cachedBlock.getDataBlockEncoding() + " (expected: "
|
||||
+ dataBlockEncoder.getDataBlockEncoding() + ")");
|
||||
+ dataBlockEncoder.getDataBlockEncoding() + ")"
|
||||
+ ", path=" + path);
|
||||
}
|
||||
}
|
||||
// Cache-hit. Return!
|
||||
|
@ -1540,7 +1549,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
}
|
||||
if (actualBlockType != expectedBlockType) {
|
||||
throw new IOException("Expected block type " + expectedBlockType + ", " +
|
||||
"but got " + actualBlockType + ": " + block);
|
||||
"but got " + actualBlockType + ": " + block + ", path=" + path);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1641,7 +1650,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
String encoderCls = dataBlockEncoder.getClass().getName();
|
||||
throw new CorruptHFileException("Encoder " + encoderCls
|
||||
+ " doesn't support data block encoding "
|
||||
+ DataBlockEncoding.getNameFromId(dataBlockEncoderId));
|
||||
+ DataBlockEncoding.getNameFromId(dataBlockEncoderId)
|
||||
+ ", path=" + reader.getPath());
|
||||
}
|
||||
updateCurrBlockRef(newBlock);
|
||||
ByteBuff encodedBuffer = getEncodedBuffer(newBlock);
|
||||
|
@ -1715,7 +1725,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
|
||||
private void assertValidSeek() {
|
||||
if (this.curBlock == null) {
|
||||
throw new NotSeekedException();
|
||||
throw new NotSeekedException(reader.getPath());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1762,7 +1772,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
if (blockType != BlockType.GENERAL_BLOOM_META &&
|
||||
blockType != BlockType.DELETE_FAMILY_BLOOM_META) {
|
||||
throw new RuntimeException("Block Type: " + blockType.toString() +
|
||||
" is not supported") ;
|
||||
" is not supported, path=" + path) ;
|
||||
}
|
||||
|
||||
for (HFileBlock b : loadOnOpenBlocks)
|
||||
|
@ -1807,7 +1817,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
|
|||
// Use the algorithm the key wants
|
||||
Cipher cipher = Encryption.getCipher(conf, key.getAlgorithm());
|
||||
if (cipher == null) {
|
||||
throw new IOException("Cipher '" + key.getAlgorithm() + "' is not available");
|
||||
throw new IOException("Cipher '" + key.getAlgorithm() + "' is not available"
|
||||
+ ", path=" + path);
|
||||
}
|
||||
cryptoContext.setCipher(cipher);
|
||||
cryptoContext.setKey(key);
|
||||
|
|
Loading…
Reference in New Issue