HADOOP-7972. HAServiceProtocol exceptions need to be unwrapped. Contributed by Hari Mankude.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1230861 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2012-01-13 02:30:29 +00:00
parent 8610a9231a
commit 72207596be
4 changed files with 74 additions and 9 deletions

View File

@ -63,7 +63,7 @@ private static void preFailoverChecks(HAServiceProtocol toSvc,
"Can't failover to an active service");
}
try {
toSvc.monitorHealth();
HAServiceProtocolHelper.monitorHealth(toSvc);
} catch (HealthCheckFailedException hce) {
throw new FailoverFailedException(
"Can't failover to an unhealthy service", hce);
@ -91,7 +91,7 @@ public static void failover(HAServiceProtocol fromSvc, String fromSvcName,
// Try to make fromSvc standby
try {
fromSvc.transitionToStandby();
HAServiceProtocolHelper.transitionToStandby(fromSvc);
} catch (ServiceFailedException sfe) {
LOG.warn("Unable to make " + fromSvcName + " standby (" +
sfe.getMessage() + ")");
@ -105,7 +105,7 @@ public static void failover(HAServiceProtocol fromSvc, String fromSvcName,
boolean failed = false;
Throwable cause = null;
try {
toSvc.transitionToActive();
HAServiceProtocolHelper.transitionToActive(toSvc);
} catch (ServiceFailedException sfe) {
LOG.error("Unable to make " + toSvcName + " active (" +
sfe.getMessage() + "). Failing back");
@ -122,7 +122,7 @@ public static void failover(HAServiceProtocol fromSvc, String fromSvcName,
if (failed) {
String msg = "Unable to failover to " + toSvcName;
try {
fromSvc.transitionToActive();
HAServiceProtocolHelper.transitionToActive(fromSvc);
} catch (ServiceFailedException sfe) {
msg = "Failback to " + fromSvcName + " failed (" +
sfe.getMessage() + ")";

View File

@ -92,7 +92,7 @@ private int transitionToActive(final String[] argv)
}
HAServiceProtocol proto = getProtocol(argv[1]);
proto.transitionToActive();
HAServiceProtocolHelper.transitionToActive(proto);
return 0;
}
@ -105,7 +105,7 @@ private int transitionToStandby(final String[] argv)
}
HAServiceProtocol proto = getProtocol(argv[1]);
proto.transitionToStandby();
HAServiceProtocolHelper.transitionToStandby(proto);
return 0;
}
@ -139,7 +139,7 @@ private int checkHealth(final String[] argv)
HAServiceProtocol proto = getProtocol(argv[1]);
try {
proto.monitorHealth();
HAServiceProtocolHelper.monitorHealth(proto);
} catch (HealthCheckFailedException e) {
errOut.println("Health check failed: " + e.getLocalizedMessage());
return 1;

View File

@ -0,0 +1,64 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ha;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
import org.apache.hadoop.ipc.RemoteException;
/**
* Helper for making {@link HAServiceProtocol} RPC calls. This helper
* unwraps the {@link RemoteException} to specific exceptions.
*
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class HAServiceProtocolHelper {
public static void monitorHealth(HAServiceProtocol svc)
throws IOException {
try {
svc.monitorHealth();
} catch (RemoteException e) {
throw e.unwrapRemoteException(HealthCheckFailedException.class);
}
}
public static void transitionToActive(HAServiceProtocol svc)
throws IOException {
try {
svc.transitionToActive();
} catch (RemoteException e) {
throw e.unwrapRemoteException(ServiceFailedException.class);
}
}
public static void transitionToStandby(HAServiceProtocol svc)
throws IOException {
try {
svc.transitionToStandby();
} catch (RemoteException e) {
throw e.unwrapRemoteException(ServiceFailedException.class);
}
}
}

View File

@ -47,6 +47,7 @@
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
import org.apache.hadoop.ha.HAServiceProtocol;
import org.apache.hadoop.ha.HAServiceProtocolHelper;
import org.apache.hadoop.ha.ServiceFailedException;
import org.apache.hadoop.hdfs.MiniDFSNNTopology.NNConf;
import org.apache.hadoop.hdfs.protocol.Block;
@ -1590,12 +1591,12 @@ private HAServiceProtocol getHaServiceClient(int nnIndex) throws IOException {
public void transitionToActive(int nnIndex) throws IOException,
ServiceFailedException {
getHaServiceClient(nnIndex).transitionToActive();
HAServiceProtocolHelper.transitionToActive(getHaServiceClient(nnIndex));
}
public void transitionToStandby(int nnIndex) throws IOException,
ServiceFailedException {
getHaServiceClient(nnIndex).transitionToStandby();
HAServiceProtocolHelper.transitionToStandby(getHaServiceClient(nnIndex));
}