diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java index a4ddd621424..6771e328516 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java @@ -45,10 +45,7 @@ public class NettyHBaseSaslRpcClient extends AbstractHBaseSaslRpcClient { public void setupSaslHandler(ChannelPipeline p) { String qop = (String) saslClient.getNegotiatedProperty(Sasl.QOP); - if (LOG.isDebugEnabled()) { - LOG.debug("SASL client context established. Negotiated QoP: " + qop); - } - + LOG.trace("SASL client context established. Negotiated QoP {}", qop); if (qop == null || "auth".equalsIgnoreCase(qop)) { return; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java index afc15ab4e3d..8da3fdef52b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java @@ -71,9 +71,7 @@ public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler< } private void writeResponse(ChannelHandlerContext ctx, byte[] response) { - if (LOG.isDebugEnabled()) { - LOG.debug("Will send token of size " + response.length + " from initSASLContext."); - } + LOG.trace("Sending token size={} from initSASLContext.", response.length); ctx.writeAndFlush( ctx.alloc().buffer(4 + response.length).writeInt(response.length).writeBytes(response)); } @@ -133,9 +131,7 @@ public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler< } return; } - if (LOG.isDebugEnabled()) { - LOG.debug("Will read input token of size " + len + " for processing by initSASLContext"); - } + LOG.trace("Reading input token size={} for processing by initSASLContext", len); final byte[] challenge = new byte[len]; msg.readBytes(challenge); byte[] response = ugi.doAs(new PrivilegedExceptionAction() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java index 768f32bd1cc..86f0a3ff591 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java @@ -291,8 +291,11 @@ public class AssignProcedure extends RegionTransitionProcedure { " transition openSeqNum=" + openSeqNum + ", " + regionNode); } if (openSeqNum < regionNode.getOpenSeqNum()) { - LOG.warn("Skipping update of open seqnum with " + openSeqNum + - " because current seqnum=" + regionNode.getOpenSeqNum()); + // Don't bother logging if openSeqNum == 0 + if (openSeqNum != 0) { + LOG.warn("Skipping update of open seqnum with " + openSeqNum + + " because current seqnum=" + regionNode.getOpenSeqNum()); + } } else { regionNode.setOpenSeqNum(openSeqNum); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java index 07e9600e5bb..fb7731fa756 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java @@ -220,8 +220,7 @@ class RegionLocationFinder { tableDescriptor = this.services.getTableDescriptors().get(tableName); } } catch (FileNotFoundException fnfe) { - LOG.debug("FileNotFoundException during getTableDescriptors." + " Current table name = " - + tableName, fnfe); + LOG.debug("tableName={}", tableName, fnfe); } return tableDescriptor; @@ -277,8 +276,7 @@ class RegionLocationFinder { blockDistbn = cache.get(hri); return blockDistbn; } else { - LOG.debug("HDFSBlocksDistribution not found in cache for region " - + hri.getRegionNameAsString()); + LOG.trace("HDFSBlocksDistribution not found in cache for {}", hri.getRegionNameAsString()); blockDistbn = internalGetTopBlockLocation(hri); cache.put(hri, blockDistbn); return blockDistbn; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index bcb23ac29e6..cb202c82ff4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -272,13 +272,9 @@ public abstract class CleanerChore extends Schedu try { POOL.latchCountUp(); if (runCleaner()) { - if (LOG.isTraceEnabled()) { - LOG.trace("Cleaned all WALs under {}", oldFileDir); - } + LOG.trace("Cleaned all WALs under {}", oldFileDir); } else { - if (LOG.isTraceEnabled()) { - LOG.trace("WALs outstanding under {}", oldFileDir); - } + LOG.trace("WALs outstanding under {}", oldFileDir); } } finally { POOL.latchCountDown(); @@ -291,9 +287,7 @@ public abstract class CleanerChore extends Schedu POOL.updatePool((long) (0.8 * getTimeUnit().toMillis(getPeriod()))); } } else { - if (LOG.isTraceEnabled()) { - LOG.trace("Cleaner chore disabled! Not cleaning."); - } + LOG.trace("Cleaner chore disabled! Not cleaning."); } } @@ -472,7 +466,7 @@ public abstract class CleanerChore extends Schedu @Override protected Boolean compute() { - LOG.debug("Cleaning under {}", dir); + LOG.trace("Cleaning under {}", dir); List subDirs; List files; try { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java index b85b56963b6..a5e87ae0eb9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java @@ -235,7 +235,7 @@ public class HFileCleaner extends CleanerChore { break; } if (task != null) { - LOG.debug("Removing {}", task.filePath); + LOG.trace("Removing {}", task.filePath); boolean succeed; try { succeed = this.fs.delete(task.filePath, false); @@ -258,13 +258,13 @@ public class HFileCleaner extends CleanerChore { private void countDeletedFiles(boolean isLargeFile, boolean fromLargeQueue) { if (isLargeFile) { if (deletedLargeFiles.get() == Long.MAX_VALUE) { - LOG.info("Deleted more than Long.MAX_VALUE large files, reset counter to 0"); + LOG.debug("Deleted more than Long.MAX_VALUE large files, reset counter to 0"); deletedLargeFiles.set(0L); } deletedLargeFiles.incrementAndGet(); } else { if (deletedSmallFiles.get() == Long.MAX_VALUE) { - LOG.info("Deleted more than Long.MAX_VALUE small files, reset counter to 0"); + LOG.debug("Deleted more than Long.MAX_VALUE small files, reset counter to 0"); deletedSmallFiles.set(0L); } if (fromLargeQueue) {