HBASE-21917 Make the HFileBlock#validateChecksum can accept ByteBuff as an input. (addendum)

This commit is contained in:
huzheng 2019-03-07 10:19:32 +08:00
parent 259f8610ca
commit 287e014b44
1 changed files with 2 additions and 3 deletions

View File

@ -91,7 +91,7 @@ public class ChecksumUtil {
*/
private static boolean verifyChunkedSums(DataChecksum dataChecksum, ByteBuff data,
ByteBuff checksums, String pathName) {
// Almost all of the HFile Block are about 64KB, so it would be a SingleByteBuff, use the
// Almost all of the HFile Block are about 64KB, and it would be a SingleByteBuff, use the
// Hadoop's verify checksum directly, because it'll use the native checksum, which has no extra
// byte[] allocation or copying. (HBASE-21917)
if (data instanceof SingleByteBuff && checksums instanceof SingleByteBuff) {
@ -108,8 +108,7 @@ public class ChecksumUtil {
}
}
// Only when the dataBlock is larger than 4MB (default buffer size in BucketCache), the block
// will be an MultiByteBuff. we use a small byte[] to update the checksum many times for
// If the block is a MultiByteBuff. we use a small byte[] to update the checksum many times for
// reducing GC pressure. it's a rare case.
int checksumTypeSize = dataChecksum.getChecksumType().size;
if (checksumTypeSize == 0) {