diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 0b131f5937e..436e967bf5a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -324,10 +324,11 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat confPrintThreshold = 10; } this.parallelPutCountPrintThreshold = confPrintThreshold; - LOG.info("Store={}, memstore type={}, storagePolicy={}, verifyBulkLoads={}, " + - "parallelPutCountPrintThreshold={}", getColumnFamilyName(), - this.memstore.getClass().getSimpleName(), policyName, - this.verifyBulkLoads, this.parallelPutCountPrintThreshold); + LOG.info("Store={}, memstore type={}, storagePolicy={}, verifyBulkLoads={}, " + + "parallelPutCountPrintThreshold={}, encoding={}, compression={}", + getColumnFamilyName(), memstore.getClass().getSimpleName(), policyName, verifyBulkLoads, + parallelPutCountPrintThreshold, family.getDataBlockEncoding(), + family.getCompressionType()); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java index d6e55105fcb..83690a95c7e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java @@ -199,12 +199,14 @@ public abstract class Compactor { } tmp = fileInfo.get(TIMERANGE_KEY); fd.latestPutTs = tmp == null ? HConstants.LATEST_TIMESTAMP: TimeRangeTracker.parseFrom(tmp).getMax(); - LOG.debug("Compacting {}, keycount={}, bloomtype={}, size={}, encoding={}, seqNum={}{}", + LOG.debug("Compacting {}, keycount={}, bloomtype={}, size={}, " + + "encoding={}, compression={}, seqNum={}{}", (file.getPath() == null? null: file.getPath().getName()), keyCount, r.getBloomFilterType().toString(), TraditionalBinaryPrefix.long2String(r.length(), "", 1), r.getHFileReader().getDataBlockEncoding(), + compactionCompression, seqNum, (allFiles? ", earliestPutTs=" + earliestPutTs: "")); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java index dd8ebb35677..de28422570f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java @@ -299,11 +299,9 @@ public class TestChecksum { long expectedChunks = ChecksumUtil.numChunks( dataSize + HConstants.HFILEBLOCK_HEADER_SIZE, bytesPerChecksum); - LOG.info("testChecksumChunks: pread=" + pread + - ", bytesPerChecksum=" + bytesPerChecksum + - ", fileSize=" + totalSize + - ", dataSize=" + dataSize + - ", expectedChunks=" + expectedChunks); + LOG.info("testChecksumChunks: pread={}, bytesPerChecksum={}, fileSize={}, " + + "dataSize={}, expectedChunks={}, compression={}", pread, bytesPerChecksum, + totalSize, dataSize, expectedChunks, algo.toString()); // Verify hbase checksums. assertEquals(true, hfs.useHBaseChecksum()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java index f4e6696d7d6..a588341478a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java @@ -376,6 +376,8 @@ public class TestHFileBlock { for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) { for (boolean pread : new boolean[] { false, true }) { for (DataBlockEncoding encoding : DataBlockEncoding.values()) { + LOG.info("testDataBlockEncoding: Compression algorithm={}, pread={}, dataBlockEncoder={}", + algo.toString(), pread, encoding); Path path = new Path(TEST_UTIL.getDataTestDir(), "blocks_v2_" + algo + "_" + encoding.toString()); FSDataOutputStream os = fs.create(path); @@ -534,9 +536,8 @@ public class TestHFileBlock { for (boolean pread : BOOLEAN_VALUES) { for (boolean cacheOnWrite : BOOLEAN_VALUES) { Random rand = defaultRandom(); - LOG.info("testPreviousOffset:Compression algorithm: " + algo + - ", pread=" + pread + - ", cacheOnWrite=" + cacheOnWrite); + LOG.info("testPreviousOffset: Compression algorithm={}, pread={}, cacheOnWrite={}", + algo.toString(), pread, cacheOnWrite); Path path = new Path(TEST_UTIL.getDataTestDir(), "prev_offset"); List expectedOffsets = new ArrayList<>(); List expectedPrevOffsets = new ArrayList<>(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index be35c74140f..890ea72d2c1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -202,7 +202,7 @@ public class TestHFileBlockIndex { private void readIndex(boolean useTags) throws IOException { long fileSize = fs.getFileStatus(path).getLen(); - LOG.info("Size of " + path + ": " + fileSize); + LOG.info("Size of {}: {} compression={}", path, fileSize, compr.toString()); FSDataInputStream istream = fs.open(path); HFileContext meta = new HFileContextBuilder() diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java index 21c6d6e2df9..8a512e62dfc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java @@ -232,7 +232,7 @@ public class CreateRandomStoreFile { Path storeFilePath = sfw.getPath(); long fileSize = fs.getFileStatus(storeFilePath).getLen(); - LOG.info("Created " + storeFilePath + ", " + fileSize + " bytes"); + LOG.info("Created {}, {} bytes, compression={}", storeFilePath, fileSize, compr.toString()); return true; }