diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 496c2b614ac..facb8cf43df 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -1267,6 +1267,9 @@ Release 2.6.0 - UNRELEASED HDFS-7226. Fix TestDNFencing.testQueueingWithAppend. (Yongjun Zhang via jing9) + HDFS-7382. DataNode in secure mode may throw NullPointerException if client + connects before DataNode registers itself with NameNode. (cnauroth) + Release 2.5.2 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java index 9f94534db67..3fa7727f00d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java @@ -94,12 +94,14 @@ public SaslDataTransferServer(DNConf dnConf, * @param peer connection peer * @param underlyingOut connection output stream * @param underlyingIn connection input stream + * @param int xferPort data transfer port of DataNode accepting connection * @param datanodeId ID of DataNode accepting connection * @return new pair of streams, wrapped after SASL negotiation * @throws IOException for any error */ public IOStreamPair receive(Peer peer, OutputStream underlyingOut, - InputStream underlyingIn, DatanodeID datanodeId) throws IOException { + InputStream underlyingIn, int xferPort, DatanodeID datanodeId) + throws IOException { if (dnConf.getEncryptDataTransfer()) { LOG.debug( "SASL server doing encrypted handshake for peer = {}, datanodeId = {}", @@ -110,16 +112,16 @@ public IOStreamPair receive(Peer peer, OutputStream underlyingOut, "SASL server skipping handshake in unsecured configuration for " + "peer = {}, datanodeId = {}", peer, datanodeId); return new IOStreamPair(underlyingIn, underlyingOut); - } else if (datanodeId.getXferPort() < 1024) { + } else if (xferPort < 1024) { LOG.debug( - "SASL server skipping handshake in unsecured configuration for " + "SASL server skipping handshake in secured configuration for " + "peer = {}, datanodeId = {}", peer, datanodeId); return new IOStreamPair(underlyingIn, underlyingOut); } else if (dnConf.getSaslPropsResolver() != null) { LOG.debug( "SASL server doing general handshake for peer = {}, datanodeId = {}", peer, datanodeId); - return getSaslStreams(peer, underlyingOut, underlyingIn, datanodeId); + return getSaslStreams(peer, underlyingOut, underlyingIn); } else if (dnConf.getIgnoreSecurePortsForTesting()) { // It's a secured cluster using non-privileged ports, but no SASL. The // only way this can happen is if the DataNode has @@ -271,12 +273,11 @@ private byte[] getEncryptionKeyFromUserName(String userName) * @param peer connection peer * @param underlyingOut connection output stream * @param underlyingIn connection input stream - * @param datanodeId ID of DataNode accepting connection * @return new pair of streams, wrapped after SASL negotiation * @throws IOException for any error */ private IOStreamPair getSaslStreams(Peer peer, OutputStream underlyingOut, - InputStream underlyingIn, final DatanodeID datanodeId) throws IOException { + InputStream underlyingIn) throws IOException { if (peer.hasSecureChannel() || dnConf.getTrustedChannelResolver().isTrusted(getPeerAddress(peer))) { return new IOStreamPair(underlyingIn, underlyingOut); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java index 2a45a425173..a235c20cc6e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java @@ -182,7 +182,8 @@ public void run() { InputStream input = socketIn; try { IOStreamPair saslStreams = datanode.saslServer.receive(peer, socketOut, - socketIn, datanode.getDatanodeId()); + socketIn, datanode.getXferAddress().getPort(), + datanode.getDatanodeId()); input = new BufferedInputStream(saslStreams.in, HdfsConstants.SMALL_BUFFER_SIZE); socketOut = saslStreams.out;