svn merge -c 1586079 from trunk for HDFS-6209. TestValidateConfigurationSettings should use random ports.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1586080 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2014-04-09 18:50:39 +00:00
parent 11a6feb8d3
commit 02163d1441
2 changed files with 46 additions and 17 deletions

View File

@ -105,6 +105,9 @@ Release 2.4.1 - UNRELEASED
HDFS-6204. Fix TestRBWBlockInvalidation: change the last sleep to a loop. HDFS-6204. Fix TestRBWBlockInvalidation: change the last sleep to a loop.
(szetszwo) (szetszwo)
HDFS-6209. TestValidateConfigurationSettings should use random ports.
(Arpit Agarwal via szetszwo)
Release 2.4.0 - 2014-04-07 Release 2.4.0 - 2014-04-07
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -17,12 +17,10 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.BindException; import java.net.BindException;
import java.util.Random;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -55,16 +53,26 @@ public void cleanUp() {
public void testThatMatchingRPCandHttpPortsThrowException() public void testThatMatchingRPCandHttpPortsThrowException()
throws IOException { throws IOException {
Configuration conf = new HdfsConfiguration(); NameNode nameNode = null;
File nameDir = new File(MiniDFSCluster.getBaseDirectory(), "name"); try {
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, Configuration conf = new HdfsConfiguration();
nameDir.getAbsolutePath()); File nameDir = new File(MiniDFSCluster.getBaseDirectory(), "name");
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY,
nameDir.getAbsolutePath());
// set both of these to port 9000, should fail Random rand = new Random();
FileSystem.setDefaultUri(conf, "hdfs://localhost:9000"); final int port = 30000 + rand.nextInt(30000);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:9000");
DFSTestUtil.formatNameNode(conf); // set both of these to the same port. It should fail.
new NameNode(conf); FileSystem.setDefaultUri(conf, "hdfs://localhost:" + port);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:" + port);
DFSTestUtil.formatNameNode(conf);
nameNode = new NameNode(conf);
} finally {
if (nameNode != null) {
nameNode.stop();
}
}
} }
/** /**
@ -80,11 +88,29 @@ public void testThatDifferentRPCandHttpPortsAreOK()
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY,
nameDir.getAbsolutePath()); nameDir.getAbsolutePath());
FileSystem.setDefaultUri(conf, "hdfs://localhost:8000"); Random rand = new Random();
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:9000");
DFSTestUtil.formatNameNode(conf); // A few retries in case the ports we choose are in use.
NameNode nameNode = new NameNode(conf); // should be OK! for (int i = 0; i < 5; ++i) {
nameNode.stop(); final int port1 = 30000 + rand.nextInt(10000);
final int port2 = port1 + 1 + rand.nextInt(10000);
FileSystem.setDefaultUri(conf, "hdfs://localhost:" + port1);
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:" + port2);
DFSTestUtil.formatNameNode(conf);
NameNode nameNode = null;
try {
nameNode = new NameNode(conf); // should be OK!
break;
} catch(BindException be) {
continue; // Port in use? Try another.
} finally {
if (nameNode != null) {
nameNode.stop();
}
}
}
} }
/** /**