diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index f57e27b3a4c..a067e50f30a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -933,7 +933,7 @@ public class HFileBlock implements Cacheable { // We need to cache the unencoded/uncompressed size before changing the block state int rawBlockSize = 0; if (this.getEncodingState() != null) { - rawBlockSize = blockSizeWritten(); + rawBlockSize = encodedBlockSizeWritten(); } // We need to set state before we can package the block up for cache-on-write. In a way, the // block is ready, but not yet encoded or compressed. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index d617492e86d..fa41768084f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -1216,8 +1216,8 @@ public class TestHStoreFile { public void testDataBlockSizeCompressed() throws Exception { conf.set(BLOCK_COMPRESSED_SIZE_PREDICATOR, PreviousBlockCompressionRatePredicator.class.getName()); - testDataBlockSizeWithCompressionRatePredicator(11, - (s, c) -> (c > 1 && c < 11) ? s >= BLOCKSIZE_SMALL * 10 : true); + testDataBlockSizeWithCompressionRatePredicator(12, + (s, c) -> (c > 2 && c < 11) ? s >= BLOCKSIZE_SMALL * 10 : true); } @Test