HDFS-9407. TestFileTruncate should not use fixed NN port. Contributed by Brahma Reddy Battula.

This commit is contained in:
Konstantin V Shvachko 2015-11-25 13:52:09 -08:00
parent e3d673901b
commit fc799ab16c
2 changed files with 5 additions and 8 deletions

View File

@ -2378,8 +2378,8 @@ Release 2.8.0 - UNRELEASED
HDFS-9435. TestBlockRecovery#testRBWReplicas is failing intermittently. HDFS-9435. TestBlockRecovery#testRBWReplicas is failing intermittently.
(Rakesh R via waltersu4549) (Rakesh R via waltersu4549)
HDFS-9433. DFS getEZForPath API on a non-existent file should throw FileNotFoundException HDFS-9433. DFS getEZForPath API on a non-existent file should throw
(Rakesh R via umamahesh) FileNotFoundException (Rakesh R via umamahesh)
HDFS-6101. TestReplaceDatanodeOnFailure fails occasionally. HDFS-6101. TestReplaceDatanodeOnFailure fails occasionally.
(Wei-Chiu Chuang via cnauroth) (Wei-Chiu Chuang via cnauroth)
@ -2397,6 +2397,9 @@ Release 2.8.0 - UNRELEASED
HDFS-9459. hadoop-hdfs-native-client fails test build on Windows after HDFS-9459. hadoop-hdfs-native-client fails test build on Windows after
transition to ctest. (Chris Nauroth via wheat9) transition to ctest. (Chris Nauroth via wheat9)
HDFS-9407. TestFileTruncate should not use fixed NN port.
(Brahma Reddy Battula via shv)
Release 2.7.3 - UNRELEASED Release 2.7.3 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -50,7 +50,6 @@ import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
@ -60,7 +59,6 @@ import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.datanode.FsDatasetTestUtils; import org.apache.hadoop.hdfs.server.datanode.FsDatasetTestUtils;
import org.apache.hadoop.net.ServerSocketUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
@ -104,7 +102,6 @@ public class TestFileTruncate {
cluster = new MiniDFSCluster.Builder(conf) cluster = new MiniDFSCluster.Builder(conf)
.format(true) .format(true)
.numDataNodes(DATANODE_NUM) .numDataNodes(DATANODE_NUM)
.nameNodePort(HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT)
.waitSafeMode(true) .waitSafeMode(true)
.build(); .build();
fs = cluster.getFileSystem(); fs = cluster.getFileSystem();
@ -1230,9 +1227,6 @@ public class TestFileTruncate {
NameNode.doRollback(conf, false); NameNode.doRollback(conf, false);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(DATANODE_NUM) cluster = new MiniDFSCluster.Builder(conf).numDataNodes(DATANODE_NUM)
.format(false) .format(false)
.nameNodePort(
ServerSocketUtil.getPort(
HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT, 10))
.startupOption(o==StartupOption.ROLLBACK ? StartupOption.REGULAR : o) .startupOption(o==StartupOption.ROLLBACK ? StartupOption.REGULAR : o)
.dnStartupOption(o!=StartupOption.ROLLBACK ? StartupOption.REGULAR : o) .dnStartupOption(o!=StartupOption.ROLLBACK ? StartupOption.REGULAR : o)
.build(); .build();