diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java index d8a516e13ea..945e92f68c0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java @@ -31,14 +31,14 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.StandbyException; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.io.retry.MultiException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A FailoverProxyProvider implementation that technically does not "failover" @@ -51,8 +51,8 @@ import org.apache.hadoop.io.retry.MultiException; public class RequestHedgingProxyProvider extends ConfiguredFailoverProxyProvider { - private static final Log LOG = - LogFactory.getLog(RequestHedgingProxyProvider.class); + public static final Logger LOG = + LoggerFactory.getLogger(RequestHedgingProxyProvider.class); class RequestHedgingInvocationHandler implements InvocationHandler { @@ -100,6 +100,8 @@ public class RequestHedgingProxyProvider extends Callable c = new Callable() { @Override public Object call() throws Exception { + LOG.trace("Invoking method {} on proxy {}", method, + pEntry.getValue().proxyInfo); return method.invoke(pEntry.getValue().proxy, args); } }; @@ -114,15 +116,14 @@ public class RequestHedgingProxyProvider extends try { retVal = callResultFuture.get(); successfulProxy = proxyMap.get(callResultFuture); - if (LOG.isDebugEnabled()) { - LOG.debug("Invocation successful on [" - + successfulProxy.proxyInfo + "]"); - } + LOG.debug("Invocation successful on [{}]", + successfulProxy.proxyInfo); return retVal; } catch (Exception ex) { ProxyInfo tProxyInfo = proxyMap.get(callResultFuture); logProxyException(ex, tProxyInfo.proxyInfo); badResults.put(tProxyInfo.proxyInfo, ex); + LOG.trace("Unsuccessful invocation on [{}]", tProxyInfo.proxyInfo); numAttempts--; } } @@ -136,6 +137,7 @@ public class RequestHedgingProxyProvider extends } } finally { if (executor != null) { + LOG.trace("Shutting down threadpool executor"); executor.shutdownNow(); } } @@ -193,12 +195,9 @@ public class RequestHedgingProxyProvider extends */ private void logProxyException(Exception ex, String proxyInfo) { if (isStandbyException(ex)) { - if (LOG.isDebugEnabled()) { - LOG.debug("Invocation returned standby exception on [" + - proxyInfo + "]"); - } + LOG.debug("Invocation returned standby exception on [{}]", proxyInfo); } else { - LOG.warn("Invocation returned exception on [" + proxyInfo + "]"); + LOG.warn("Invocation returned exception on [{}]", proxyInfo); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java index 32f807a1e43..2ccfd86dcbc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java @@ -31,9 +31,12 @@ import org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.io.retry.MultiException; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; +import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; @@ -47,6 +50,11 @@ public class TestRequestHedgingProxyProvider { private URI nnUri; private String ns; + @BeforeClass + public static void setupClass() throws Exception { + GenericTestUtils.setLogLevel(RequestHedgingProxyProvider.LOG, Level.TRACE); + } + @Before public void setup() throws URISyntaxException { ns = "mycluster-" + Time.monotonicNow(); @@ -66,13 +74,19 @@ public class TestRequestHedgingProxyProvider { @Test public void testHedgingWhenOneFails() throws Exception { final NamenodeProtocols goodMock = Mockito.mock(NamenodeProtocols.class); - Mockito.when(goodMock.getStats()).thenReturn(new long[] {1}); + Mockito.when(goodMock.getStats()).thenAnswer(new Answer() { + @Override + public long[] answer(InvocationOnMock invocation) throws Throwable { + Thread.sleep(1000); + return new long[]{1}; + } + }); final NamenodeProtocols badMock = Mockito.mock(NamenodeProtocols.class); Mockito.when(badMock.getStats()).thenThrow(new IOException("Bad mock !!")); RequestHedgingProxyProvider provider = new RequestHedgingProxyProvider<>(conf, nnUri, NamenodeProtocols.class, - createFactory(goodMock, badMock)); + createFactory(badMock, goodMock)); long[] stats = provider.getProxy().proxy.getStats(); Assert.assertTrue(stats.length == 1); Mockito.verify(badMock).getStats();