HDFS-3400. svn merge -c 1337017 from trunk

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1337018 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2012-05-11 03:16:28 +00:00
parent c68efee78c
commit 6795b5ad8f
3 changed files with 23 additions and 16 deletions

View File

@ -312,6 +312,9 @@ Release 2.0.0 - UNRELEASED
HDFS-3401. Cleanup DatanodeDescriptor creation in the tests. (eli) HDFS-3401. Cleanup DatanodeDescriptor creation in the tests. (eli)
HDFS-3400. DNs should be able start with jsvc even if security is disabled.
(atm via eli)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-2477. Optimize computing the diff between a block report and the HDFS-2477. Optimize computing the diff between a block report and the

View File

@ -56,6 +56,7 @@ shift
# Determine if we're starting a secure datanode, and if so, redefine appropriate variables # Determine if we're starting a secure datanode, and if so, redefine appropriate variables
if [ "$COMMAND" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then if [ "$COMMAND" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
if [ -n "$JSVC_HOME" ]; then
if [ -n "$HADOOP_SECURE_DN_PID_DIR" ]; then if [ -n "$HADOOP_SECURE_DN_PID_DIR" ]; then
HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
fi fi
@ -66,6 +67,10 @@ if [ "$COMMAND" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_
HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER
starting_secure_dn="true" starting_secure_dn="true"
else
echo "It looks like you're trying to start a secure DN, but \$JSVC_HOME"\
"isn't set. Falling back to starting insecure DN."
fi
fi fi
if [ "$COMMAND" = "namenode" ] ; then if [ "$COMMAND" = "namenode" ] ; then

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.security.UserGroupInformation;
import org.mortbay.jetty.nio.SelectChannelConnector; import org.mortbay.jetty.nio.SelectChannelConnector;
/** /**
@ -60,10 +61,7 @@ public class SecureDataNodeStarter implements Daemon {
@Override @Override
public void init(DaemonContext context) throws Exception { public void init(DaemonContext context) throws Exception {
System.err.println("Initializing secure datanode resources"); System.err.println("Initializing secure datanode resources");
// We should only start up a secure datanode in a Kerberos-secured cluster Configuration conf = new Configuration();
Configuration conf = new Configuration(); // Skip UGI method to not log in
if(!conf.get(HADOOP_SECURITY_AUTHENTICATION).equals("kerberos"))
throw new RuntimeException("Cannot start secure datanode in unsecure cluster");
// Stash command-line arguments for regular datanode // Stash command-line arguments for regular datanode
args = context.getArguments(); args = context.getArguments();
@ -98,7 +96,8 @@ public class SecureDataNodeStarter implements Daemon {
System.err.println("Successfully obtained privileged resources (streaming port = " System.err.println("Successfully obtained privileged resources (streaming port = "
+ ss + " ) (http listener port = " + listener.getConnection() +")"); + ss + " ) (http listener port = " + listener.getConnection() +")");
if (ss.getLocalPort() >= 1023 || listener.getPort() >= 1023) { if ((ss.getLocalPort() >= 1023 || listener.getPort() >= 1023) &&
UserGroupInformation.isSecurityEnabled()) {
throw new RuntimeException("Cannot start secure datanode with unprivileged ports"); throw new RuntimeException("Cannot start secure datanode with unprivileged ports");
} }
System.err.println("Opened streaming server at " + streamingAddr); System.err.println("Opened streaming server at " + streamingAddr);