From 0de66e4a77fa2f9053ffa43c9f43e8a488488a6e Mon Sep 17 00:00:00 2001 From: Jing Zhao Date: Sat, 5 Oct 2013 20:46:52 +0000 Subject: [PATCH] HDFS-5308. Merge change r1529512 from trunk. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1529516 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../server/datanode/DatanodeJspHelper.java | 27 +++++++------------ .../server/namenode/ClusterJspHelper.java | 2 +- .../server/namenode/NamenodeJspHelper.java | 2 +- .../hdfs/server/datanode/TestDatanodeJsp.java | 6 ++--- 5 files changed, 17 insertions(+), 23 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index a024183049b..4c9d2fdb8c0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -118,6 +118,9 @@ Release 2.1.2 - UNRELEASED HDFS-5256. Use guava LoadingCache to implement DFSClientCache. (Haohui Mai via brandonli) + HDFS-5308. Replace HttpConfig#getSchemePrefix with implicit schemes in HDFS + JSP. (Haohui Mai via jing9) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java index 639468bbc75..11b27131485 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java @@ -19,9 +19,7 @@ package org.apache.hadoop.hdfs.server.datanode; import java.io.File; import java.io.IOException; -import java.net.InetAddress; import java.net.InetSocketAddress; -import java.net.URI; import java.net.URL; import java.net.URLEncoder; import java.security.PrivilegedExceptionAction; @@ -39,7 +37,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; @@ -47,9 +44,6 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager; import org.apache.hadoop.hdfs.server.common.JspHelper; -import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; -import org.apache.hadoop.hdfs.server.namenode.NameNode; -import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer; import org.apache.hadoop.http.HtmlQuoting; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.net.NetUtils; @@ -225,7 +219,7 @@ public class DatanodeJspHelper { JspHelper.addTableFooter(out); } } - out.print("
Go back to DFS home"); dfs.close(); @@ -302,8 +296,7 @@ public class DatanodeJspHelper { Long.MAX_VALUE).getLocatedBlocks(); // Add the various links for looking at the file contents // URL for downloading the full file - String downloadUrl = HttpConfig.getSchemePrefix() + req.getServerName() + ":" - + req.getServerPort() + "/streamFile" + ServletUtil.encodePath(filename) + String downloadUrl = "/streamFile" + ServletUtil.encodePath(filename) + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr, true) + JspHelper.getDelegationTokenUrlParam(tokenString); out.print(""); @@ -320,7 +313,7 @@ public class DatanodeJspHelper { return; } String fqdn = canonicalize(chosenNode.getIpAddr()); - String tailUrl = HttpConfig.getSchemePrefix() + fqdn + ":" + chosenNode.getInfoPort() + String tailUrl = "///" + fqdn + ":" + chosenNode.getInfoPort() + "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8") + "&namenodeInfoPort=" + namenodeInfoPort + "&chunkSizeToView=" + chunkSizeToView @@ -369,7 +362,7 @@ public class DatanodeJspHelper { String datanodeAddr = locs[j].getXferAddr(); datanodePort = locs[j].getXferPort(); fqdn = canonicalize(locs[j].getIpAddr()); - String blockUrl = HttpConfig.getSchemePrefix() + fqdn + ":" + locs[j].getInfoPort() + String blockUrl = "///" + fqdn + ":" + locs[j].getInfoPort() + "/browseBlock.jsp?blockId=" + blockidstring + "&blockSize=" + blockSize + "&filename=" + URLEncoder.encode(filename, "UTF-8") @@ -380,7 +373,7 @@ public class DatanodeJspHelper { + JspHelper.getDelegationTokenUrlParam(tokenString) + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr); - String blockInfoUrl = HttpConfig.getSchemePrefix() + nnCanonicalName + ":" + String blockInfoUrl = "///" + nnCanonicalName + ":" + namenodeInfoPort + "/block_info_xml.jsp?blockId=" + blockidstring; out.print(" " @@ -391,7 +384,7 @@ public class DatanodeJspHelper { } out.println(""); out.print("
"); - out.print("
Go back to DFS home"); dfs.close(); @@ -491,9 +484,7 @@ public class DatanodeJspHelper { String parent = new File(filename).getParent(); JspHelper.printGotoForm(out, namenodeInfoPort, tokenString, parent, nnAddr); out.print("
"); - out.print("