HDFS-2841. HAAdmin does not work if security is enabled. Contributed by Aaron T. Myers.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1237534 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
30dd704e6c
commit
6122357da5
|
@ -36,7 +36,6 @@ function print_usage(){
|
|||
echo " classpath prints the class path needed to get the"
|
||||
echo " Hadoop jar and the required libraries"
|
||||
echo " daemonlog get/set the log level for each daemon"
|
||||
echo " haadmin run a HA admin client"
|
||||
echo " or"
|
||||
echo " CLASSNAME run the class named CLASSNAME"
|
||||
echo ""
|
||||
|
@ -112,10 +111,6 @@ case $COMMAND in
|
|||
CLASS=org.apache.hadoop.tools.HadoopArchives
|
||||
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
|
||||
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
|
||||
elif [ "$COMMAND" = "haadmin" ] ; then
|
||||
CLASS=org.apache.hadoop.ha.HAAdmin
|
||||
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
|
||||
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
|
||||
elif [[ "$COMMAND" = -* ]] ; then
|
||||
# class and package names cannot begin with a -
|
||||
echo "Error: No command named \`$COMMAND' was found. Perhaps you meant \`hadoop ${COMMAND#-}'"
|
||||
|
|
|
@ -37,7 +37,7 @@ import com.google.common.collect.ImmutableMap;
|
|||
* mode, or to trigger a health-check.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class HAAdmin extends Configured implements Tool {
|
||||
public abstract class HAAdmin extends Configured implements Tool {
|
||||
|
||||
private static Map<String, UsageInfo> USAGE =
|
||||
ImmutableMap.<String, UsageInfo>builder()
|
||||
|
@ -171,7 +171,6 @@ public class HAAdmin extends Configured implements Tool {
|
|||
addr, getConf());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int run(String[] argv) throws Exception {
|
||||
if (argv.length < 1) {
|
||||
|
@ -226,12 +225,6 @@ public class HAAdmin extends Configured implements Tool {
|
|||
errOut.println(cmd + " [" + usageInfo.args + "]: " + usageInfo.help);
|
||||
return 1;
|
||||
}
|
||||
|
||||
public static void main(String[] argv) throws Exception {
|
||||
int res = ToolRunner.run(new HAAdmin(), argv);
|
||||
System.exit(res);
|
||||
}
|
||||
|
||||
|
||||
private static class UsageInfo {
|
||||
private final String args;
|
||||
|
|
|
@ -135,3 +135,5 @@ HDFS-2809. Add test to verify that delegation tokens are honored after failover.
|
|||
HDFS-2838. NPE in FSNamesystem when in safe mode. (Gregory Chanan via eli)
|
||||
|
||||
HDFS-2805. Add a test for a federated cluster with HA NNs. (Brandon Li via jitendra)
|
||||
|
||||
HDFS-2841. HAAdmin does not work if security is enabled. (atm)
|
||||
|
|
|
@ -31,6 +31,7 @@ function print_usage(){
|
|||
echo " namenode run the DFS namenode"
|
||||
echo " datanode run a DFS datanode"
|
||||
echo " dfsadmin run a DFS admin client"
|
||||
echo " haadmin run a DFS HA admin client"
|
||||
echo " fsck run a DFS filesystem checking utility"
|
||||
echo " balancer run a cluster balancing utility"
|
||||
echo " jmxget get JMX exported values from NameNode or DataNode."
|
||||
|
@ -85,6 +86,10 @@ elif [ "$COMMAND" = "dfs" ] ; then
|
|||
elif [ "$COMMAND" = "dfsadmin" ] ; then
|
||||
CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
|
||||
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
|
||||
elif [ "$COMMAND" = "haadmin" ] ; then
|
||||
CLASS=org.apache.hadoop.hdfs.tools.DFSHAAdmin
|
||||
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
|
||||
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
|
||||
elif [ "$COMMAND" = "fsck" ] ; then
|
||||
CLASS=org.apache.hadoop.hdfs.tools.DFSck
|
||||
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
|
||||
|
|
|
@ -26,6 +26,8 @@ import java.util.ArrayList;
|
|||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.DFSClient.Conf;
|
||||
|
@ -34,11 +36,16 @@ import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
|||
import org.apache.hadoop.io.retry.FailoverProxyProvider;
|
||||
import org.apache.hadoop.io.retry.RetryPolicies;
|
||||
import org.apache.hadoop.io.retry.RetryProxy;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
public class HAUtil {
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(HAUtil.class);
|
||||
|
||||
private HAUtil() { /* Hidden constructor */ }
|
||||
|
||||
/**
|
||||
|
@ -171,11 +178,14 @@ public class HAUtil {
|
|||
xface);
|
||||
return (FailoverProxyProvider<T>) provider;
|
||||
} catch (Exception e) {
|
||||
String message = "Couldn't create proxy provider " + failoverProxyProviderClass;
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug(message, e);
|
||||
}
|
||||
if (e.getCause() instanceof IOException) {
|
||||
throw (IOException) e.getCause();
|
||||
} else {
|
||||
throw new IOException(
|
||||
"Couldn't create proxy provider " + failoverProxyProviderClass, e);
|
||||
throw new IOException(message, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue