HDFS-7386. Replace check "port number < 1024" with shared isPrivilegedPort method. Contributed by Yongjun Zhang.

This commit is contained in:
cnauroth 2014-11-14 16:43:09 -08:00
parent 4fb96dbe3b
commit 1925e2a4ae
5 changed files with 24 additions and 3 deletions

View File

@ -621,4 +621,19 @@ public class SecurityUtil {
conf.set(HADOOP_SECURITY_AUTHENTICATION, conf.set(HADOOP_SECURITY_AUTHENTICATION,
authenticationMethod.toString().toLowerCase(Locale.ENGLISH)); authenticationMethod.toString().toLowerCase(Locale.ENGLISH));
} }
/*
* Check if a given port is privileged.
* The ports with number smaller than 1024 are treated as privileged ports in
* unix/linux system. For other operating systems, use this method with care.
* For example, Windows doesn't have the concept of privileged ports.
* However, it may be used at Windows client to check port of linux server.
*
* @param port the port number
* @return true for privileged ports, false otherwise
*
*/
public static boolean isPrivilegedPort(final int port) {
return port < 1024;
}
} }

View File

@ -356,6 +356,9 @@ Release 2.7.0 - UNRELEASED
HDFS-7375. Move FSClusterStats to o.a.h.h.hdfs.server.blockmanagement. HDFS-7375. Move FSClusterStats to o.a.h.h.hdfs.server.blockmanagement.
(wheat9) (wheat9)
HDFS-7386. Replace check "port number < 1024" with shared isPrivilegedPort
method. (Yongjun Zhang via cnauroth)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -52,6 +52,7 @@ import org.apache.hadoop.hdfs.protocol.datatransfer.TrustedChannelResolver;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.security.SaslPropertiesResolver; import org.apache.hadoop.security.SaslPropertiesResolver;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -245,7 +246,7 @@ public class SaslDataTransferClient {
"SASL client skipping handshake in unsecured configuration for " "SASL client skipping handshake in unsecured configuration for "
+ "addr = {}, datanodeId = {}", addr, datanodeId); + "addr = {}, datanodeId = {}", addr, datanodeId);
return null; return null;
} else if (datanodeId.getXferPort() < 1024) { } else if (SecurityUtil.isPrivilegedPort(datanodeId.getXferPort())) {
LOG.debug( LOG.debug(
"SASL client skipping handshake in secured configuration with " "SASL client skipping handshake in secured configuration with "
+ "privileged port for addr = {}, datanodeId = {}", addr, datanodeId); + "privileged port for addr = {}, datanodeId = {}", addr, datanodeId);

View File

@ -50,6 +50,7 @@ import org.apache.hadoop.hdfs.security.token.block.BlockPoolTokenSecretManager;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.hdfs.server.datanode.DNConf; import org.apache.hadoop.hdfs.server.datanode.DNConf;
import org.apache.hadoop.security.SaslPropertiesResolver; import org.apache.hadoop.security.SaslPropertiesResolver;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -112,7 +113,7 @@ public class SaslDataTransferServer {
"SASL server skipping handshake in unsecured configuration for " "SASL server skipping handshake in unsecured configuration for "
+ "peer = {}, datanodeId = {}", peer, datanodeId); + "peer = {}, datanodeId = {}", peer, datanodeId);
return new IOStreamPair(underlyingIn, underlyingOut); return new IOStreamPair(underlyingIn, underlyingOut);
} else if (xferPort < 1024) { } else if (SecurityUtil.isPrivilegedPort(xferPort)) {
LOG.debug( LOG.debug(
"SASL server skipping handshake in secured configuration for " "SASL server skipping handshake in secured configuration for "
+ "peer = {}, datanodeId = {}", peer, datanodeId); + "peer = {}, datanodeId = {}", peer, datanodeId);

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.mortbay.jetty.Connector; import org.mortbay.jetty.Connector;
@ -110,7 +111,7 @@ public class SecureDataNodeStarter implements Daemon {
+ ss.getLocalPort()); + ss.getLocalPort());
} }
if (ss.getLocalPort() > 1023 && isSecure) { if (!SecurityUtil.isPrivilegedPort(ss.getLocalPort()) && isSecure) {
throw new RuntimeException( throw new RuntimeException(
"Cannot start secure datanode with unprivileged RPC ports"); "Cannot start secure datanode with unprivileged RPC ports");
} }