HDFS-2841. HAAdmin does not work if security is enabled. Contributed by Aaron T. Myers.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1237534 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Aaron Myers 2012-01-30 02:52:27 +00:00
parent 30dd704e6c
commit 6122357da5
5 changed files with 20 additions and 15 deletions

View File

@ -36,7 +36,6 @@ function print_usage(){
echo " classpath prints the class path needed to get the" echo " classpath prints the class path needed to get the"
echo " Hadoop jar and the required libraries" echo " Hadoop jar and the required libraries"
echo " daemonlog get/set the log level for each daemon" echo " daemonlog get/set the log level for each daemon"
echo " haadmin run a HA admin client"
echo " or" echo " or"
echo " CLASSNAME run the class named CLASSNAME" echo " CLASSNAME run the class named CLASSNAME"
echo "" echo ""
@ -112,10 +111,6 @@ case $COMMAND in
CLASS=org.apache.hadoop.tools.HadoopArchives CLASS=org.apache.hadoop.tools.HadoopArchives
CLASSPATH=${CLASSPATH}:${TOOL_PATH} CLASSPATH=${CLASSPATH}:${TOOL_PATH}
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "haadmin" ] ; then
CLASS=org.apache.hadoop.ha.HAAdmin
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [[ "$COMMAND" = -* ]] ; then elif [[ "$COMMAND" = -* ]] ; then
# class and package names cannot begin with a - # class and package names cannot begin with a -
echo "Error: No command named \`$COMMAND' was found. Perhaps you meant \`hadoop ${COMMAND#-}'" echo "Error: No command named \`$COMMAND' was found. Perhaps you meant \`hadoop ${COMMAND#-}'"

View File

@ -37,7 +37,7 @@
* mode, or to trigger a health-check. * mode, or to trigger a health-check.
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class HAAdmin extends Configured implements Tool { public abstract class HAAdmin extends Configured implements Tool {
private static Map<String, UsageInfo> USAGE = private static Map<String, UsageInfo> USAGE =
ImmutableMap.<String, UsageInfo>builder() ImmutableMap.<String, UsageInfo>builder()
@ -171,7 +171,6 @@ protected HAServiceProtocol getProtocol(String target)
addr, getConf()); addr, getConf());
} }
@Override @Override
public int run(String[] argv) throws Exception { public int run(String[] argv) throws Exception {
if (argv.length < 1) { if (argv.length < 1) {
@ -227,12 +226,6 @@ private int help(String[] argv) {
return 1; return 1;
} }
public static void main(String[] argv) throws Exception {
int res = ToolRunner.run(new HAAdmin(), argv);
System.exit(res);
}
private static class UsageInfo { private static class UsageInfo {
private final String args; private final String args;
private final String help; private final String help;

View File

@ -135,3 +135,5 @@ HDFS-2809. Add test to verify that delegation tokens are honored after failover.
HDFS-2838. NPE in FSNamesystem when in safe mode. (Gregory Chanan via eli) HDFS-2838. NPE in FSNamesystem when in safe mode. (Gregory Chanan via eli)
HDFS-2805. Add a test for a federated cluster with HA NNs. (Brandon Li via jitendra) HDFS-2805. Add a test for a federated cluster with HA NNs. (Brandon Li via jitendra)
HDFS-2841. HAAdmin does not work if security is enabled. (atm)

View File

@ -31,6 +31,7 @@ function print_usage(){
echo " namenode run the DFS namenode" echo " namenode run the DFS namenode"
echo " datanode run a DFS datanode" echo " datanode run a DFS datanode"
echo " dfsadmin run a DFS admin client" echo " dfsadmin run a DFS admin client"
echo " haadmin run a DFS HA admin client"
echo " fsck run a DFS filesystem checking utility" echo " fsck run a DFS filesystem checking utility"
echo " balancer run a cluster balancing utility" echo " balancer run a cluster balancing utility"
echo " jmxget get JMX exported values from NameNode or DataNode." echo " jmxget get JMX exported values from NameNode or DataNode."
@ -85,6 +86,10 @@ elif [ "$COMMAND" = "dfs" ] ; then
elif [ "$COMMAND" = "dfsadmin" ] ; then elif [ "$COMMAND" = "dfsadmin" ] ; then
CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "haadmin" ] ; then
CLASS=org.apache.hadoop.hdfs.tools.DFSHAAdmin
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "fsck" ] ; then elif [ "$COMMAND" = "fsck" ] ; then
CLASS=org.apache.hadoop.hdfs.tools.DFSck CLASS=org.apache.hadoop.hdfs.tools.DFSck
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS" HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"

View File

@ -26,6 +26,8 @@
import java.util.Collection; import java.util.Collection;
import java.util.Map; import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSClient.Conf; import org.apache.hadoop.hdfs.DFSClient.Conf;
@ -34,11 +36,16 @@
import org.apache.hadoop.io.retry.FailoverProxyProvider; import org.apache.hadoop.io.retry.FailoverProxyProvider;
import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.io.retry.RetryPolicies;
import org.apache.hadoop.io.retry.RetryProxy; import org.apache.hadoop.io.retry.RetryProxy;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
public class HAUtil { public class HAUtil {
private static final Log LOG =
LogFactory.getLog(HAUtil.class);
private HAUtil() { /* Hidden constructor */ } private HAUtil() { /* Hidden constructor */ }
/** /**
@ -171,11 +178,14 @@ public static <T> FailoverProxyProvider<T> createFailoverProxyProvider(
xface); xface);
return (FailoverProxyProvider<T>) provider; return (FailoverProxyProvider<T>) provider;
} catch (Exception e) { } catch (Exception e) {
String message = "Couldn't create proxy provider " + failoverProxyProviderClass;
if (LOG.isDebugEnabled()) {
LOG.debug(message, e);
}
if (e.getCause() instanceof IOException) { if (e.getCause() instanceof IOException) {
throw (IOException) e.getCause(); throw (IOException) e.getCause();
} else { } else {
throw new IOException( throw new IOException(message, e);
"Couldn't create proxy provider " + failoverProxyProviderClass, e);
} }
} }
} }