diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java index f624608e0d1..cc4253c99ac 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java @@ -44,9 +44,7 @@ public class NettyHBaseSaslRpcClient extends AbstractHBaseSaslRpcClient { public void setupSaslHandler(ChannelPipeline p) { String qop = (String) saslClient.getNegotiatedProperty(Sasl.QOP); - if (LOG.isDebugEnabled()) { - LOG.debug("SASL client context established. Negotiated QoP: " + qop); - } + LOG.trace("SASL client context established. Negotiated QoP " + qop); if (qop == null || "auth".equalsIgnoreCase(qop)) { return; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java index 50609b4b3e1..5abf085e9e6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java @@ -61,9 +61,7 @@ public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler< } private void writeResponse(ChannelHandlerContext ctx, byte[] response) { - if (LOG.isDebugEnabled()) { - LOG.debug("Will send token of size " + response.length + " from initSASLContext."); - } + LOG.trace("Will send token of size " + response.length + " from initSASLContext."); ctx.writeAndFlush( ctx.alloc().buffer(4 + response.length).writeInt(response.length).writeBytes(response)); } @@ -109,9 +107,7 @@ public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler< } return; } - if (LOG.isDebugEnabled()) { - LOG.debug("Will read input token of size " + len + " for processing by initSASLContext"); - } + LOG.trace("Will read input token of size " + len + " for processing by initSASLContext"); final byte[] challenge = new byte[len]; msg.readBytes(challenge); byte[] response = ugi.doAs(new PrivilegedExceptionAction() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java index 06ab3b53928..39da01bec36 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java @@ -285,8 +285,7 @@ class RegionLocationFinder { blockDistbn = cache.get(hri); return blockDistbn; } else { - LOG.debug("HDFSBlocksDistribution not found in cache for region " - + hri.getRegionNameAsString()); + LOG.trace("HDFSBlocksDistribution not found in cache for region " + hri.getRegionNameAsString()); blockDistbn = internalGetTopBlockLocation(hri); cache.put(hri, blockDistbn); return blockDistbn; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 8d95cd413ec..6feff90435c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -292,9 +292,7 @@ public abstract class CleanerChore extends Schedu POOL.updatePool((long) (0.8 * getTimeUnit().toMillis(getPeriod()))); } } else { - if (LOG.isTraceEnabled()) { - LOG.trace("Cleaner chore disabled! Not cleaning."); - } + LOG.trace("Cleaner chore disabled! Not cleaning."); } } @@ -432,7 +430,7 @@ public abstract class CleanerChore extends Schedu @Override protected Boolean compute() { - LOG.debug("Cleaning under " + dir); + LOG.trace("Cleaning under " + dir); List subDirs; final List files; try {