diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index e90c4e57590..917e65901a9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -329,6 +329,9 @@ Release 2.0.0 - UNRELEASED HDFS-3417. Rename BalancerDatanode#getName to getDisplayName to be consistent with Datanode. (eli) + HDFS-3416. Cleanup DatanodeID and DatanodeRegistration + constructors used by testing. (eli) + OPTIMIZATIONS HDFS-2477. Optimize computing the diff between a block report and the diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java index 464fd614d23..f4c0715f553 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs.protocol; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hdfs.DFSConfigKeys; /** * This class represents the primary identifier for a Datanode. @@ -45,23 +44,6 @@ public class DatanodeID implements Comparable { protected int infoPort; // info server port protected int ipcPort; // IPC server port - public DatanodeID(String ipAddr, int xferPort) { - this(ipAddr, "", "", xferPort, - DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, - DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT); - } - - public DatanodeID(String ipAddr, String hostName, int xferPort) { - this(ipAddr, hostName, "", xferPort, - DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, - DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT); - } - - /** - * DatanodeID copy constructor - * - * @param from - */ public DatanodeID(DatanodeID from) { this(from.getIpAddr(), from.getHostName(), @@ -72,7 +54,7 @@ public class DatanodeID implements Comparable { } /** - * Create DatanodeID + * Create a DatanodeID * @param ipAddr IP * @param hostName hostname * @param storageID data storage ID @@ -94,22 +76,6 @@ public class DatanodeID implements Comparable { this.ipAddr = ipAddr; } - public void setHostName(String hostName) { - this.hostName = hostName; - } - - public void setXferPort(int xferPort) { - this.xferPort = xferPort; - } - - public void setInfoPort(int infoPort) { - this.infoPort = infoPort; - } - - public void setIpcPort(int ipcPort) { - this.ipcPort = ipcPort; - } - public void setStorageID(String storageID) { this.storageID = storageID; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java index 7f795cd4a84..2aee0eb5ee5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java @@ -100,11 +100,7 @@ public class DatanodeManager { * with the same storage id; and *
  • removed if and only if an existing datanode is restarted to serve a * different storage id.
  • - *
    - * The list of the {@link DatanodeDescriptor}s in the map is checkpointed - * in the namespace image file. Only the {@link DatanodeInfo} part is - * persistent, the list of blocks is restored from the datanode block - * reports. + *
    *

    * Mapping: StorageID -> DatanodeDescriptor */ @@ -832,7 +828,9 @@ public class DatanodeManager { if (InetAddresses.isInetAddress(hostStr)) { // The IP:port is sufficient for listing in a report - dnId = new DatanodeID(hostStr, "", port); + dnId = new DatanodeID(hostStr, "", "", port, + DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, + DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT); } else { String ipAddr = ""; try { @@ -840,7 +838,9 @@ public class DatanodeManager { } catch (UnknownHostException e) { LOG.warn("Invalid hostname " + hostStr + " in hosts file"); } - dnId = new DatanodeID(ipAddr, hostStr, port); + dnId = new DatanodeID(ipAddr, hostStr, "", port, + DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, + DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT); } return dnId; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 5dbb2ce1eee..67c57f5eb36 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -671,23 +671,16 @@ public class DataNode extends Configured * @param nsInfo the namespace info from the first part of the NN handshake */ DatanodeRegistration createBPRegistration(NamespaceInfo nsInfo) { - final String xferIp = streamingAddr.getAddress().getHostAddress(); - DatanodeRegistration bpRegistration = new DatanodeRegistration(xferIp, getXferPort()); - bpRegistration.setInfoPort(getInfoPort()); - bpRegistration.setIpcPort(getIpcPort()); - bpRegistration.setHostName(hostName); - bpRegistration.setStorageID(getStorageId()); - bpRegistration.setSoftwareVersion(VersionInfo.getVersion()); - StorageInfo storageInfo = storage.getBPStorage(nsInfo.getBlockPoolID()); if (storageInfo == null) { // it's null in the case of SimulatedDataSet - bpRegistration.getStorageInfo().layoutVersion = HdfsConstants.LAYOUT_VERSION; - bpRegistration.setStorageInfo(nsInfo); - } else { - bpRegistration.setStorageInfo(storageInfo); + storageInfo = new StorageInfo(nsInfo); } - return bpRegistration; + DatanodeID dnId = new DatanodeID( + streamingAddr.getAddress().getHostAddress(), hostName, + getStorageId(), getXferPort(), getInfoPort(), getIpcPort()); + return new DatanodeRegistration(dnId, storageInfo, + new ExportedBlockKeys(), VersionInfo.getVersion()); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration.java index dda0a6fbee0..b736d1280b0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration.java @@ -47,21 +47,6 @@ public class DatanodeRegistration extends DatanodeID this.softwareVersion = softwareVersion; } - public DatanodeRegistration(String ipAddr, int xferPort) { - this(ipAddr, xferPort, new StorageInfo(), new ExportedBlockKeys()); - } - - public DatanodeRegistration(String ipAddr, int xferPort, StorageInfo info, - ExportedBlockKeys keys) { - super(ipAddr, xferPort); - this.storageInfo = info; - this.exportedKeys = keys; - } - - public void setStorageInfo(StorageInfo storage) { - this.storageInfo = new StorageInfo(storage); - } - public StorageInfo getStorageInfo() { return storageInfo; } @@ -74,10 +59,6 @@ public class DatanodeRegistration extends DatanodeID return exportedKeys; } - public void setSoftwareVersion(String softwareVersion) { - this.softwareVersion = softwareVersion; - } - public String getSoftwareVersion() { return softwareVersion; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java index 5b50cefe102..af8507d5ef3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java @@ -67,19 +67,23 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.datatransfer.Sender; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; +import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; +import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.TestTransferRbw; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.NameNode; +import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.util.VersionInfo; import com.google.common.base.Joiner; @@ -708,13 +712,14 @@ public class DFSTestUtil { } private static DatanodeID getDatanodeID(String ipAddr) { - return new DatanodeID(ipAddr, "localhost", - DFSConfigKeys.DFS_DATANODE_DEFAULT_PORT); + return new DatanodeID(ipAddr, "localhost", "", + DFSConfigKeys.DFS_DATANODE_DEFAULT_PORT, + DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, + DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT); } public static DatanodeID getLocalDatanodeID() { - return new DatanodeID("127.0.0.1", "localhost", - DFSConfigKeys.DFS_DATANODE_DEFAULT_PORT); + return getDatanodeID("127.0.0.1"); } public static DatanodeID getLocalDatanodeID(int port) { @@ -740,12 +745,14 @@ public class DFSTestUtil { public static DatanodeInfo getDatanodeInfo(String ipAddr, String host, int port) { - return new DatanodeInfo(new DatanodeID(ipAddr, host, port)); + return new DatanodeInfo(new DatanodeID(ipAddr, host, "", + port, DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, + DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT)); } public static DatanodeInfo getLocalDatanodeInfo(String ipAddr, String hostname, AdminStates adminState) { - return new DatanodeInfo(ipAddr, hostname, "storage", + return new DatanodeInfo(ipAddr, hostname, "", DFSConfigKeys.DFS_DATANODE_DEFAULT_PORT, DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT, @@ -760,6 +767,14 @@ public class DFSTestUtil { public static DatanodeDescriptor getDatanodeDescriptor(String ipAddr, int port, String rackLocation) { - return new DatanodeDescriptor(new DatanodeID(ipAddr, port), rackLocation); + DatanodeID dnId = new DatanodeID(ipAddr, "host", "", port, + DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, + DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT); + return new DatanodeDescriptor(dnId, rackLocation); + } + + public static DatanodeRegistration getLocalDatanodeRegistration() { + return new DatanodeRegistration(getLocalDatanodeID(), + new StorageInfo(), new ExportedBlockKeys(), VersionInfo.getVersion()); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java index 78e05412c76..b37e4d71a0d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java @@ -29,6 +29,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsConstants; @@ -115,7 +116,7 @@ public class TestBPOfferService { 0, HdfsConstants.LAYOUT_VERSION)) .when(mock).versionRequest(); - Mockito.doReturn(new DatanodeRegistration("1.2.3.4", 100)) + Mockito.doReturn(DFSTestUtil.getLocalDatanodeRegistration()) .when(mock).registerDatanode(Mockito.any(DatanodeRegistration.class)); Mockito.doAnswer(new HeartbeatAnswer(nnIdx)) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java index ec5b8a72e2f..7644e054cbe 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java @@ -35,10 +35,12 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.BlockListAsLongs; +import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.LocatedBlock; +import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataStorage; @@ -765,6 +767,7 @@ public class NNThroughputBenchmark { ArrayList blocks; int nrBlocks; // actual number of blocks long[] blockReportList; + int dnIdx; /** * Return a a 6 digit integer port. @@ -780,11 +783,7 @@ public class NNThroughputBenchmark { } TinyDatanode(int dnIdx, int blockCapacity) throws IOException { - String ipAddr = DNS.getDefaultIP("default"); - String hostName = DNS.getDefaultHost("default", "default"); - dnRegistration = new DatanodeRegistration(ipAddr, getNodePort(dnIdx)); - dnRegistration.setHostName(hostName); - dnRegistration.setSoftwareVersion(VersionInfo.getVersion()); + this.dnIdx = dnIdx; this.blocks = new ArrayList(blockCapacity); this.nrBlocks = 0; } @@ -800,7 +799,14 @@ public class NNThroughputBenchmark { void register() throws IOException { // get versions from the namenode nsInfo = nameNodeProto.versionRequest(); - dnRegistration.setStorageInfo(new DataStorage(nsInfo, "")); + dnRegistration = new DatanodeRegistration( + new DatanodeID(DNS.getDefaultIP("default"), + DNS.getDefaultHost("default", "default"), + "", getNodePort(dnIdx), + DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT, + DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT), + new DataStorage(nsInfo, ""), + new ExportedBlockKeys(), VersionInfo.getVersion()); DataNode.setNewStorageID(dnRegistration); // register datanode dnRegistration = nameNodeProto.registerDatanode(dnRegistration); @@ -896,12 +902,9 @@ public class NNThroughputBenchmark { for(int t = 0; t < blockTargets.length; t++) { DatanodeInfo dnInfo = blockTargets[t]; DatanodeRegistration receivedDNReg; - receivedDNReg = - new DatanodeRegistration(dnInfo.getIpAddr(), dnInfo.getXferPort()); - receivedDNReg.setStorageInfo( - new DataStorage(nsInfo, dnInfo.getStorageID())); - receivedDNReg.setInfoPort(dnInfo.getInfoPort()); - receivedDNReg.setIpcPort(dnInfo.getIpcPort()); + receivedDNReg = new DatanodeRegistration(dnInfo, + new DataStorage(nsInfo, dnInfo.getStorageID()), + new ExportedBlockKeys(), VersionInfo.getVersion()); ReceivedDeletedBlockInfo[] rdBlocks = { new ReceivedDeletedBlockInfo( blocks[i], ReceivedDeletedBlockInfo.BlockStatus.RECEIVED_BLOCK,