diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index c8fbe66b9db..9abf3150dbf 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -44,6 +44,9 @@ Release 2.7.1 - UNRELEASED HADOOP-12058. Fix dead links to DistCp and Hadoop Archives pages. (Kazuho Fujii via aajisaka) + HADOOP-12078. The default retry policy does not handle RetriableException + correctly. (Arpit Agarwal) + Release 2.7.0 - 2015-04-20 INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java index 14ded8ea24d..5668ad1ca30 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java @@ -626,7 +626,7 @@ public class RetryPolicies { return unwrapped instanceof StandbyException; } - private static RetriableException getWrappedRetriableException(Exception e) { + static RetriableException getWrappedRetriableException(Exception e) { if (!(e instanceof RemoteException)) { return null; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java index e6f4519e78c..fab406d5f9b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java @@ -25,6 +25,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.RemoteException; import com.google.protobuf.ServiceException; +import org.apache.hadoop.ipc.RetriableException; public class RetryUtils { public static final Log LOG = LogFactory.getLog(RetryUtils.class); @@ -92,7 +93,11 @@ public class RetryUtils { //see (1) and (2) in the javadoc of this method. final RetryPolicy p; - if (e instanceof RemoteException) { + if (e instanceof RetriableException + || RetryPolicies.getWrappedRetriableException(e) != null) { + // RetriableException or RetriableException wrapped + p = multipleLinearRandomRetry; + } else if (e instanceof RemoteException) { final RemoteException re = (RemoteException)e; p = remoteExceptionToRetry.getName().equals(re.getClassName())? multipleLinearRandomRetry: RetryPolicies.TRY_ONCE_THEN_FAIL; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java new file mode 100644 index 00000000000..8a61c04fee5 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java @@ -0,0 +1,101 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.io.retry; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.ipc.RemoteException; +import org.apache.hadoop.ipc.RetriableException; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.Timeout; + +import java.io.IOException; + +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertThat; + +/** + * Test the behavior of the default retry policy. + */ +public class TestDefaultRetryPolicy { + @Rule + public Timeout timeout = new Timeout(300000); + + /** + * Verify that the default retry policy correctly retries + * RetriableException when defaultRetryPolicyEnabled is enabled. + * + * @throws IOException + */ + @Test + public void testWithRetriable() throws Exception { + Configuration conf = new Configuration(); + RetryPolicy policy = RetryUtils.getDefaultRetryPolicy( + conf, "Test.No.Such.Key", + true, // defaultRetryPolicyEnabled = true + "Test.No.Such.Key", "10000,6", + null); + RetryPolicy.RetryAction action = policy.shouldRetry( + new RetriableException("Dummy exception"), 0, 0, true); + assertThat(action.action, + is(RetryPolicy.RetryAction.RetryDecision.RETRY)); + } + + /** + * Verify that the default retry policy correctly retries + * a RetriableException wrapped in a RemoteException when + * defaultRetryPolicyEnabled is enabled. + * + * @throws IOException + */ + @Test + public void testWithWrappedRetriable() throws Exception { + Configuration conf = new Configuration(); + RetryPolicy policy = RetryUtils.getDefaultRetryPolicy( + conf, "Test.No.Such.Key", + true, // defaultRetryPolicyEnabled = true + "Test.No.Such.Key", "10000,6", + null); + RetryPolicy.RetryAction action = policy.shouldRetry( + new RemoteException(RetriableException.class.getName(), + "Dummy exception"), 0, 0, true); + assertThat(action.action, + is(RetryPolicy.RetryAction.RetryDecision.RETRY)); + } + + /** + * Verify that the default retry policy does *not* retry + * RetriableException when defaultRetryPolicyEnabled is disabled. + * + * @throws IOException + */ + @Test + public void testWithRetriableAndRetryDisabled() throws Exception { + Configuration conf = new Configuration(); + RetryPolicy policy = RetryUtils.getDefaultRetryPolicy( + conf, "Test.No.Such.Key", + false, // defaultRetryPolicyEnabled = false + "Test.No.Such.Key", "10000,6", + null); + RetryPolicy.RetryAction action = policy.shouldRetry( + new RetriableException("Dummy exception"), 0, 0, true); + assertThat(action.action, + is(RetryPolicy.RetryAction.RetryDecision.FAIL)); + } +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java index 79bf2fdf6e7..c057f3e94c1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java @@ -143,6 +143,7 @@ import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; +import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.ipc.RetryCache; import org.apache.hadoop.ipc.RetryCache.CacheEntry; import org.apache.hadoop.ipc.RetryCache.CacheEntryWithPayload; @@ -1882,7 +1883,7 @@ class NameNodeRpcServer implements NamenodeProtocols { private void checkNNStartup() throws IOException { if (!this.nn.isStarted()) { - throw new IOException(this.nn.getRole() + " still not started"); + throw new RetriableException(this.nn.getRole() + " still not started"); } }