From cdc8146bd46ae8bf3395498c529513730d446824 Mon Sep 17 00:00:00 2001 From: Todd Lipcon Date: Wed, 15 Feb 2012 18:20:11 +0000 Subject: [PATCH] HADOOP-8068. void methods can swallow exceptions when going through failover path. Contributed by Todd Lipcon. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1244628 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.HDFS-1623.txt | 2 ++ .../io/retry/RetryInvocationHandler.java | 10 ++++---- .../apache/hadoop/io/retry/RetryPolicies.java | 24 +++++-------------- .../apache/hadoop/io/retry/RetryPolicy.java | 8 ++++++- .../hadoop/io/retry/TestFailoverProxy.java | 23 +++++++++++++++++- .../hadoop/io/retry/TestRetryProxy.java | 14 ----------- .../io/retry/UnreliableImplementation.java | 12 ++++++++++ .../hadoop/io/retry/UnreliableInterface.java | 3 +++ 8 files changed, 57 insertions(+), 39 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.HDFS-1623.txt b/hadoop-common-project/hadoop-common/CHANGES.HDFS-1623.txt index 1d8ce4dd255..c9dd46062f4 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.HDFS-1623.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.HDFS-1623.txt @@ -47,3 +47,5 @@ HADOOP-8038. Add 'ipc.client.connect.max.retries.on.timeouts' entry in core-default.xml file. (Uma Maheswara Rao G via atm) HADOOP-8041. Log a warning when a failover is first attempted (todd) + +HADOOP-8068. void methods can swallow exceptions when going through failover path (todd) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java index 3d702d9879d..323542cbd39 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java @@ -90,12 +90,12 @@ public Object invoke(Object proxy, Method method, Object[] args) RetryAction action = policy.shouldRetry(e, retries++, invocationFailoverCount, isMethodIdempotent); if (action.action == RetryAction.RetryDecision.FAIL) { - LOG.warn("Exception while invoking " + method.getName() - + " of " + currentProxy.getClass() + ". Not retrying.", e); - if (!method.getReturnType().equals(Void.TYPE)) { - throw e; // non-void methods can't fail without an exception + if (action.reason != null) { + LOG.warn("Exception while invoking " + + currentProxy.getClass() + "." + method.getName() + + ". Not retrying because " + action.reason, e); } - return null; + throw e; } else { // retry or failover // avoid logging the failover if this is the first call on this // proxy object, and we successfully achieve the failover without diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java index a96dc9ee0bc..2be8b759998 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java @@ -54,14 +54,6 @@ public class RetryPolicies { */ public static final RetryPolicy TRY_ONCE_THEN_FAIL = new TryOnceThenFail(); - /** - *

- * Try once, and fail silently for void methods, or by - * re-throwing the exception for non-void methods. - *

- */ - public static final RetryPolicy TRY_ONCE_DONT_FAIL = new TryOnceDontFail(); - /** *

* Keep trying forever. @@ -152,12 +144,6 @@ public static final RetryPolicy failoverOnNetworkException( } static class TryOnceThenFail implements RetryPolicy { - public RetryAction shouldRetry(Exception e, int retries, int failovers, - boolean isMethodIdempotent) throws Exception { - throw e; - } - } - static class TryOnceDontFail implements RetryPolicy { public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { return RetryAction.FAIL; @@ -185,7 +171,7 @@ public RetryLimited(int maxRetries, long sleepTime, TimeUnit timeUnit) { public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { if (retries >= maxRetries) { - throw e; + return RetryAction.FAIL; } return new RetryAction(RetryAction.RetryDecision.RETRY, timeUnit.toMillis(calculateSleepTime(retries))); @@ -325,9 +311,9 @@ public FailoverOnNetworkExceptionRetry(RetryPolicy fallbackPolicy, public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { if (failovers >= maxFailovers) { - LOG.info("Failovers (" + failovers + ") exceeded maximum allowed (" + return new RetryAction(RetryAction.RetryDecision.FAIL, 0, + "failovers (" + failovers + ") exceeded maximum allowed (" + maxFailovers + ")"); - return RetryAction.FAIL; } if (e instanceof ConnectException || @@ -345,7 +331,9 @@ public RetryAction shouldRetry(Exception e, int retries, if (isMethodIdempotent) { return RetryAction.FAILOVER_AND_RETRY; } else { - return RetryAction.FAIL; + return new RetryAction(RetryAction.RetryDecision.FAIL, 0, + "the invoked method is not idempotent, and unable to determine " + + "whether it was invoked"); } } else { return fallbackPolicy.shouldRetry(e, retries, failovers, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java index 90e5eaea671..ed673e950f8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java @@ -44,14 +44,20 @@ public static class RetryAction { public final RetryDecision action; public final long delayMillis; + public final String reason; public RetryAction(RetryDecision action) { - this(action, 0); + this(action, 0, null); } public RetryAction(RetryDecision action, long delayTime) { + this(action, delayTime, null); + } + + public RetryAction(RetryDecision action, long delayTime, String reason) { this.action = action; this.delayMillis = delayTime; + this.reason = reason; } public enum RetryDecision { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java index 2a6dc2622fd..4949ef31406 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java @@ -128,7 +128,7 @@ public void testNeverFailOver() throws UnreliableException, new FlipFlopProxyProvider(UnreliableInterface.class, new UnreliableImplementation("impl1"), new UnreliableImplementation("impl2")), - RetryPolicies.TRY_ONCE_DONT_FAIL); + RetryPolicies.TRY_ONCE_THEN_FAIL); unreliable.succeedsOnceThenFailsReturningString(); try { @@ -196,6 +196,27 @@ public void testFailoverOnNetworkExceptionIdempotentOperation() assertEquals("impl2", unreliable.succeedsOnceThenFailsReturningStringIdempotent()); } + /** + * Test that if a non-idempotent void function is called, and there is an exception, + * the exception is properly propagated + */ + @Test + public void testExceptionPropagatedForNonIdempotentVoid() throws Exception { + UnreliableInterface unreliable = (UnreliableInterface)RetryProxy + .create(UnreliableInterface.class, + new FlipFlopProxyProvider(UnreliableInterface.class, + new UnreliableImplementation("impl1", TypeOfExceptionToFailWith.IO_EXCEPTION), + new UnreliableImplementation("impl2", TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION)), + RetryPolicies.failoverOnNetworkException(1)); + + try { + unreliable.nonIdempotentVoidFailsIfIdentifierDoesntMatch("impl2"); + fail("did not throw an exception"); + } catch (Exception e) { + } + + } + private static class SynchronizedUnreliableImplementation extends UnreliableImplementation { private CountDownLatch methodLatch; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java index c48e87b7dd9..696f40d8376 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java @@ -19,7 +19,6 @@ package org.apache.hadoop.io.retry; import static org.apache.hadoop.io.retry.RetryPolicies.RETRY_FOREVER; -import static org.apache.hadoop.io.retry.RetryPolicies.TRY_ONCE_DONT_FAIL; import static org.apache.hadoop.io.retry.RetryPolicies.TRY_ONCE_THEN_FAIL; import static org.apache.hadoop.io.retry.RetryPolicies.retryByException; import static org.apache.hadoop.io.retry.RetryPolicies.retryByRemoteException; @@ -59,19 +58,6 @@ public void testTryOnceThenFail() throws UnreliableException { } } - public void testTryOnceDontFail() throws UnreliableException { - UnreliableInterface unreliable = (UnreliableInterface) - RetryProxy.create(UnreliableInterface.class, unreliableImpl, TRY_ONCE_DONT_FAIL); - unreliable.alwaysSucceeds(); - unreliable.failsOnceThenSucceeds(); - try { - unreliable.failsOnceThenSucceedsWithReturnValue(); - fail("Should fail"); - } catch (UnreliableException e) { - // expected - } - } - public void testRetryForever() throws UnreliableException { UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create(UnreliableInterface.class, unreliableImpl, RETRY_FOREVER); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java index 185ed2a4426..54fe6778440 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java @@ -136,6 +136,18 @@ public String failsIfIdentifierDoesntMatch(String identifier) return null; } } + + @Override + public void nonIdempotentVoidFailsIfIdentifierDoesntMatch(String identifier) + throws UnreliableException, StandbyException, IOException { + if (this.identifier.equals(identifier)) { + return; + } else { + String message = "expected '" + this.identifier + "' but received '" + + identifier + "'"; + throwAppropriateException(exceptionToFailWith, message); + } + } private static void throwAppropriateException(TypeOfExceptionToFailWith eType, String message) throws UnreliableException, StandbyException, IOException { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java index e794c1686c2..66a8b853606 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java @@ -67,4 +67,7 @@ public String succeedsTenTimesThenFailsReturningString() @Idempotent public String failsIfIdentifierDoesntMatch(String identifier) throws UnreliableException, StandbyException, IOException; + + void nonIdempotentVoidFailsIfIdentifierDoesntMatch(String identifier) + throws UnreliableException, StandbyException, IOException; }