diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java index a3e1fffae86..023c1ed52fd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java @@ -63,4 +63,43 @@ public class ServerSocketUtil { } } + /** + * Check whether port is available or not. + * + * @param port given port + * @return + */ + private static boolean isPortAvailable(int port) { + try (ServerSocket s = new ServerSocket(port)) { + return true; + } catch (IOException e) { + return false; + } + } + + /** + * Wait till the port available. + * + * @param port given port + * @param retries number of retries for given port + * @return + * @throws InterruptedException + * @throws IOException + */ + public static int waitForPort(int port, int retries) + throws InterruptedException, IOException { + int tries = 0; + while (true) { + if (isPortAvailable(port)) { + return port; + } else { + tries++; + if (tries >= retries) { + throw new IOException( + "Port is already in use; giving up after " + tries + " times."); + } + Thread.sleep(1000); + } + } + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java index a9791b360c2..77b04c332e1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java @@ -49,6 +49,7 @@ import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.BZip2Codec; import org.apache.hadoop.io.compress.CompressionCodec; +import org.apache.hadoop.net.ServerSocketUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.PathUtils; import org.apache.hadoop.util.ReflectionUtils; @@ -530,7 +531,8 @@ public class TestDFSShell { cluster = new MiniDFSCluster.Builder(conf) .format(true) .numDataNodes(2) - .nameNodePort(8020) + .nameNodePort(ServerSocketUtil.waitForPort( + HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT, 10)) .waitSafeMode(true) .build(); FileSystem srcFs = cluster.getFileSystem();