From ff96fd8f1a2448ff0d2f4309816a2ca5e1dded86 Mon Sep 17 00:00:00 2001 From: Jitendra Nath Pandey Date: Wed, 8 Feb 2012 04:03:02 +0000 Subject: [PATCH] Merged r1241766 from trunk for HDFS-2786. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1241768 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 + .../java/org/apache/hadoop/hdfs/DFSUtil.java | 13 ---- .../hadoop/hdfs/server/common/JspHelper.java | 2 +- .../server/datanode/DatanodeJspHelper.java | 59 ++++++++++--------- .../apache/hadoop/hdfs/tools/DFSAdmin.java | 2 +- 5 files changed, 35 insertions(+), 44 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index a34c815d51a..887ed451fad 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -112,6 +112,9 @@ Release 0.23.1 - UNRELEASED HDFS-2868. Expose xceiver counts via the DataNode MXBean. (harsh) + HDFS-2786. Fix host-based token incompatibilities in DFSUtil. (Kihwal + Lee via jitendra) + OPTIMIZATIONS HDFS-2130. Switch default checksum to CRC32C. (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java index cadfcee5fed..f323c9706e6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java @@ -602,19 +602,6 @@ public class DFSUtil { return capacity <= 0 ? 0 : ((float)remaining * 100.0f)/(float)capacity; } - /** - * @param address address of format host:port - * @return InetSocketAddress for the address - */ - public static InetSocketAddress getSocketAddress(String address) { - int colon = address.indexOf(":"); - if (colon < 0) { - return new InetSocketAddress(address, 0); - } - return new InetSocketAddress(address.substring(0, colon), - Integer.parseInt(address.substring(colon + 1))); - } - /** * Round bytes to GiB (gibibyte) * @param bytes number of bytes diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java index bfc7e355cf1..e07aeeb375f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java @@ -498,7 +498,7 @@ public class JspHelper { String namenodeAddressInUrl = request.getParameter(NAMENODE_ADDRESS); InetSocketAddress namenodeAddress = null; if (namenodeAddressInUrl != null) { - namenodeAddress = DFSUtil.getSocketAddress(namenodeAddressInUrl); + namenodeAddress = NetUtils.createSocketAddr(namenodeAddressInUrl); } else if (context != null) { namenodeAddress = NameNodeHttpServer.getNameNodeAddressFromContext( context); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java index d017679e0a6..3682ccb7ada 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java @@ -53,18 +53,30 @@ import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Private public class DatanodeJspHelper { private static DFSClient getDFSClient(final UserGroupInformation user, - final InetSocketAddress addr, + final String addr, final Configuration conf ) throws IOException, InterruptedException { return user.doAs(new PrivilegedExceptionAction() { public DFSClient run() throws IOException { - return new DFSClient(addr, conf); + return new DFSClient(NetUtils.createSocketAddr(addr), conf); } }); } + /** + * Internal convenience method for canonicalizing host name. + * @param addr name:port or name + * @return canonicalized host name + */ + private static String canonicalize(String addr) { + // default port 1 is supplied to allow addr without port. + // the port will be ignored. + return NetUtils.createSocketAddr(addr, 1).getAddress() + .getCanonicalHostName(); + } + private static final SimpleDateFormat lsDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm"); @@ -102,8 +114,7 @@ public class DatanodeJspHelper { return; } - InetSocketAddress namenodeAddress = DFSUtil.getSocketAddress(nnAddr); - DFSClient dfs = getDFSClient(ugi, namenodeAddress, conf); + DFSClient dfs = getDFSClient(ugi, nnAddr, conf); String target = dir; final HdfsFileStatus targetStatus = dfs.getFileInfo(target); if (targetStatus == null) { // not exists @@ -125,8 +136,7 @@ public class DatanodeJspHelper { out.print("Empty file"); } else { DatanodeInfo chosenNode = JspHelper.bestNode(firstBlock, conf); - String fqdn = InetAddress.getByName(chosenNode.getHost()) - .getCanonicalHostName(); + String fqdn = canonicalize(chosenNode.getHost()); String datanodeAddr = chosenNode.getName(); int datanodePort = Integer.parseInt(datanodeAddr.substring( datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); @@ -210,9 +220,8 @@ public class DatanodeJspHelper { JspHelper.addTableFooter(out); } } - String namenodeHost = namenodeAddress.getHostName(); out.print("
Go back to DFS home"); dfs.close(); } @@ -282,8 +291,7 @@ public class DatanodeJspHelper { } long blockSize = Long.parseLong(blockSizeStr); - final InetSocketAddress namenodeAddress = DFSUtil.getSocketAddress(nnAddr); - final DFSClient dfs = getDFSClient(ugi, namenodeAddress, conf); + final DFSClient dfs = getDFSClient(ugi, nnAddr, conf); List blocks = dfs.getNamenode().getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); // Add the various links for looking at the file contents @@ -305,8 +313,7 @@ public class DatanodeJspHelper { dfs.close(); return; } - String fqdn = InetAddress.getByName(chosenNode.getHost()) - .getCanonicalHostName(); + String fqdn = canonicalize(chosenNode.getHost()); String tailUrl = "http://" + fqdn + ":" + chosenNode.getInfoPort() + "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8") + "&namenodeInfoPort=" + namenodeInfoPort @@ -345,9 +352,7 @@ public class DatanodeJspHelper { // generate a table and dump the info out.println("\n"); - String namenodeHost = namenodeAddress.getHostName(); - String namenodeHostName = InetAddress.getByName(namenodeHost).getCanonicalHostName(); - + String nnCanonicalName = canonicalize(nnAddr); for (LocatedBlock cur : blocks) { out.print(""); final String blockidstring = Long.toString(cur.getBlock().getBlockId()); @@ -358,7 +363,7 @@ public class DatanodeJspHelper { String datanodeAddr = locs[j].getName(); datanodePort = Integer.parseInt(datanodeAddr.substring(datanodeAddr .indexOf(':') + 1, datanodeAddr.length())); - fqdn = InetAddress.getByName(locs[j].getHost()).getCanonicalHostName(); + fqdn = canonicalize(locs[j].getHost()); String blockUrl = "http://" + fqdn + ":" + locs[j].getInfoPort() + "/browseBlock.jsp?blockId=" + blockidstring + "&blockSize=" + blockSize @@ -370,7 +375,7 @@ public class DatanodeJspHelper { + JspHelper.getDelegationTokenUrlParam(tokenString) + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr); - String blockInfoUrl = "http://" + namenodeHostName + ":" + String blockInfoUrl = "http://" + nnCanonicalName + ":" + namenodeInfoPort + "/block_info_xml.jsp?blockId=" + blockidstring; out.print("
 " @@ -382,7 +387,7 @@ public class DatanodeJspHelper { out.println("
"); out.print("
"); out.print("
Go back to DFS home"); dfs.close(); } @@ -419,8 +424,7 @@ public class DatanodeJspHelper { return; } - final DFSClient dfs = getDFSClient(ugi, - DFSUtil.getSocketAddress(nnAddr), conf); + final DFSClient dfs = getDFSClient(ugi, nnAddr, conf); String bpid = null; Token blockToken = BlockTokenSecretManager.DUMMY_TOKEN; @@ -518,8 +522,7 @@ public class DatanodeJspHelper { String datanodeAddr = d.getName(); nextDatanodePort = Integer.parseInt(datanodeAddr.substring( datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); - nextHost = InetAddress.getByName(d.getHost()) - .getCanonicalHostName(); + nextHost = d.getHost(); nextPort = d.getInfoPort(); } } @@ -533,7 +536,7 @@ public class DatanodeJspHelper { } String nextUrl = null; if (nextBlockIdStr != null) { - nextUrl = "http://" + nextHost + ":" + nextPort + nextUrl = "http://" + canonicalize(nextHost) + ":" + nextPort + "/browseBlock.jsp?blockId=" + nextBlockIdStr + "&blockSize=" + nextBlockSize + "&startOffset=" + nextStartOffset @@ -573,8 +576,7 @@ public class DatanodeJspHelper { String datanodeAddr = d.getName(); prevDatanodePort = Integer.parseInt(datanodeAddr.substring( datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); - prevHost = InetAddress.getByName(d.getHost()) - .getCanonicalHostName(); + prevHost = d.getHost(); prevPort = d.getInfoPort(); } } @@ -591,7 +593,7 @@ public class DatanodeJspHelper { String prevUrl = null; if (prevBlockIdStr != null) { - prevUrl = "http://" + prevHost + ":" + prevPort + prevUrl = "http://" + canonicalize(prevHost) + ":" + prevPort + "/browseBlock.jsp?blockId=" + prevBlockIdStr + "&blockSize=" + prevBlockSize + "&startOffset=" + prevStartOffset @@ -669,8 +671,7 @@ public class DatanodeJspHelper { + "\">"); // fetch the block from the datanode that has the last block for this file - final DFSClient dfs = getDFSClient(ugi, DFSUtil.getSocketAddress(nnAddr), - conf); + final DFSClient dfs = getDFSClient(ugi, nnAddr, conf); List blocks = dfs.getNamenode().getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); if (blocks == null || blocks.size() == 0) { @@ -710,6 +711,6 @@ public class DatanodeJspHelper { final DataNode datanode, final Configuration conf, final UserGroupInformation ugi) throws IOException, InterruptedException { final String nnAddr = request.getParameter(JspHelper.NAMENODE_ADDRESS); - return getDFSClient(ugi, DFSUtil.getSocketAddress(nnAddr), conf); + return getDFSClient(ugi, nnAddr, conf); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java index b651867ccd8..cca82a8f675 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java @@ -1139,7 +1139,7 @@ public class DFSAdmin extends FsShell { private ClientDatanodeProtocol getDataNodeProxy(String datanode) throws IOException { - InetSocketAddress datanodeAddr = DFSUtil.getSocketAddress(datanode); + InetSocketAddress datanodeAddr = NetUtils.createSocketAddr(datanode); // Get the current configuration Configuration conf = getConf();