diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java index 42d153dc2ab..5365847a889 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java @@ -2926,9 +2926,7 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory, NetUtils.connect(sock, addr, getRandomLocalInterfaceAddr(), socketTimeout); peer = DFSUtilClient.peerFromSocketAndKey(saslClient, sock, this, - blockToken, datanodeId); - peer.setReadTimeout(socketTimeout); - peer.setWriteTimeout(socketTimeout); + blockToken, datanodeId, socketTimeout); success = true; return peer; } finally { diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java index 897a40ce1d9..af700c157cc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java @@ -579,12 +579,14 @@ public class DFSUtilClient { public static Peer peerFromSocketAndKey( SaslDataTransferClient saslClient, Socket s, DataEncryptionKeyFactory keyFactory, - Token blockToken, DatanodeID datanodeId) - throws IOException { + Token blockToken, DatanodeID datanodeId, + int socketTimeoutMs) throws IOException { Peer peer = null; boolean success = false; try { peer = peerFromSocket(s); + peer.setReadTimeout(socketTimeoutMs); + peer.setWriteTimeout(socketTimeoutMs); peer = saslClient.peerSend(peer, keyFactory, blockToken, datanodeId); success = true; return peer; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java index 66b7dac031c..c2b616e3e0d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java @@ -869,7 +869,7 @@ public class NamenodeFsck implements DataEncryptionKeyFactory { s.setSoTimeout(HdfsConstants.READ_TIMEOUT); peer = DFSUtilClient.peerFromSocketAndKey( dfs.getSaslDataTransferClient(), s, NamenodeFsck.this, - blockToken, datanodeId); + blockToken, datanodeId, HdfsConstants.READ_TIMEOUT); } finally { if (peer == null) { IOUtils.closeQuietly(s); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java index 2d4eb0d2b43..8555e5d0ad9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java @@ -25,6 +25,10 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.io.IOException; +import java.net.ServerSocket; +import java.net.Socket; +import java.net.SocketTimeoutException; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.BlockLocation; @@ -32,12 +36,18 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystemTestHelper; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSTestUtil; +import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; +import org.apache.hadoop.hdfs.net.Peer; +import org.apache.hadoop.hdfs.protocol.DatanodeID; +import org.apache.hadoop.hdfs.protocol.datatransfer.TrustedChannelResolver; +import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.security.token.Token; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils.LogCapturer; import org.junit.After; @@ -197,4 +207,42 @@ public class TestSaslDataTransfer extends SaslDataTransferTestCase { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); cluster.waitActive(); } + + /** + * Verifies that peerFromSocketAndKey honors socket read timeouts. + */ + @Test(timeout=60000) + public void TestPeerFromSocketAndKeyReadTimeout() throws Exception { + HdfsConfiguration conf = createSecureConfig( + "authentication,integrity,privacy"); + AtomicBoolean fallbackToSimpleAuth = new AtomicBoolean(false); + SaslDataTransferClient saslClient = new SaslDataTransferClient( + conf, DataTransferSaslUtil.getSaslPropertiesResolver(conf), + TrustedChannelResolver.getInstance(conf), fallbackToSimpleAuth); + DatanodeID fakeDatanodeId = new DatanodeID("127.0.0.1", "localhost", + "beefbeef-beef-beef-beef-beefbeefbeef", 1, 2, 3, 4); + DataEncryptionKeyFactory dataEncKeyFactory = + new DataEncryptionKeyFactory() { + @Override + public DataEncryptionKey newDataEncryptionKey() { + return new DataEncryptionKey(123, "456", new byte[8], + new byte[8], 1234567, "fakeAlgorithm"); + } + }; + ServerSocket serverSocket = null; + Socket socket = null; + try { + serverSocket = new ServerSocket(0, -1); + socket = new Socket(serverSocket.getInetAddress(), + serverSocket.getLocalPort()); + Peer peer = DFSUtilClient.peerFromSocketAndKey(saslClient, socket, + dataEncKeyFactory, new Token(), fakeDatanodeId, 1); + peer.close(); + Assert.fail("Expected DFSClient#peerFromSocketAndKey to time out."); + } catch (SocketTimeoutException e) { + GenericTestUtils.assertExceptionContains("Read timed out", e); + } finally { + IOUtils.cleanup(null, socket, serverSocket); + } + } }