From 267a606831f9b958f2d4a8e43b1779d1706fe50a Mon Sep 17 00:00:00 2001 From: Brandon Li Date: Mon, 7 Oct 2013 19:02:55 +0000 Subject: [PATCH] HDFS-5307. Support both HTTP and HTTPS in jsp pages. Contributed by Haohui Mai git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1530027 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 + .../hadoop/hdfs/server/common/JspHelper.java | 25 ++- .../server/datanode/DatanodeJspHelper.java | 201 ++++++++---------- .../server/namenode/ClusterJspHelper.java | 11 +- .../server/namenode/NamenodeJspHelper.java | 50 ++--- .../hdfs/server/datanode/TestDatanodeJsp.java | 4 + 6 files changed, 155 insertions(+), 139 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index e06919de27a..8ddd88f21c5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -394,6 +394,9 @@ Release 2.2.0 - 2013-10-13 HDFS-5259. Support client which combines appended data with old data before sends it to NFS server. (brandonli) + HDFS-5307. Support both HTTP and HTTPS in jsp pages (Haohui Mai via + branconli) + Release 2.1.1-beta - 2013-09-23 INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java index 2728353cc9f..a34f2cf217a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.common; import com.google.common.base.Charsets; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -55,6 +56,7 @@ import org.apache.hadoop.util.VersionInfo; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.jsp.JspWriter; + import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; @@ -101,7 +103,7 @@ public class JspHelper { return super.hashCode(); } } - + // compare two records based on their frequency private static class NodeRecordComparator implements Comparator { @@ -115,6 +117,27 @@ public class JspHelper { return 0; } } + + /** + * A helper class that generates the correct URL for different schema. + * + */ + public static final class Url { + public static String authority(String scheme, DatanodeID d) { + if (scheme.equals("http")) { + return d.getInfoAddr(); + } else if (scheme.equals("https")) { + return d.getInfoSecureAddr(); + } else { + throw new IllegalArgumentException("Unknown scheme:" + scheme); + } + } + + public static String url(String scheme, DatanodeID d) { + return scheme + "://" + authority(scheme, d); + } + } + public static DatanodeInfo bestNode(LocatedBlocks blks, Configuration conf) throws IOException { HashMap map = diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java index 11b27131485..c931698a32a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hdfs.server.datanode; import java.io.File; +import java.io.FileNotFoundException; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URL; @@ -35,6 +36,7 @@ import javax.servlet.jsp.JspWriter; import org.apache.commons.lang.StringEscapeUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; @@ -45,16 +47,22 @@ import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager; import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.http.HtmlQuoting; -import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.ServletUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.VersionInfo; +import com.google.common.base.Predicate; +import com.google.common.collect.Iterables; + @InterfaceAudience.Private public class DatanodeJspHelper { + private static final int PREV_BLOCK = -1; + private static final int NEXT_BLOCK = 1; + private static DFSClient getDFSClient(final UserGroupInformation user, final String addr, final Configuration conf @@ -137,10 +145,10 @@ public class DatanodeJspHelper { out.print("Empty file"); } else { DatanodeInfo chosenNode = JspHelper.bestNode(firstBlock, conf); - String fqdn = canonicalize(chosenNode.getIpAddr()); int datanodePort = chosenNode.getXferPort(); - String redirectLocation = HttpConfig.getSchemePrefix() + fqdn + ":" - + chosenNode.getInfoPort() + "/browseBlock.jsp?blockId=" + String redirectLocation = JspHelper.Url.url(req.getScheme(), + chosenNode) + + "/browseBlock.jsp?blockId=" + firstBlock.getBlock().getBlockId() + "&blockSize=" + firstBlock.getBlock().getNumBytes() + "&genstamp=" + firstBlock.getBlock().getGenerationStamp() + "&filename=" @@ -312,8 +320,8 @@ public class DatanodeJspHelper { dfs.close(); return; } - String fqdn = canonicalize(chosenNode.getIpAddr()); - String tailUrl = "///" + fqdn + ":" + chosenNode.getInfoPort() + + String tailUrl = "///" + JspHelper.Url.authority(req.getScheme(), chosenNode) + "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8") + "&namenodeInfoPort=" + namenodeInfoPort + "&chunkSizeToView=" + chunkSizeToView @@ -361,8 +369,7 @@ public class DatanodeJspHelper { for (int j = 0; j < locs.length; j++) { String datanodeAddr = locs[j].getXferAddr(); datanodePort = locs[j].getXferPort(); - fqdn = canonicalize(locs[j].getIpAddr()); - String blockUrl = "///" + fqdn + ":" + locs[j].getInfoPort() + String blockUrl = "///" + JspHelper.Url.authority(req.getScheme(), locs[j]) + "/browseBlock.jsp?blockId=" + blockidstring + "&blockSize=" + blockSize + "&filename=" + URLEncoder.encode(filename, "UTF-8") @@ -492,112 +499,23 @@ public class DatanodeJspHelper { out.print("Advanced view/download options
"); out.print("
"); - // Determine the prev & next blocks - long nextStartOffset = 0; - long nextBlockSize = 0; - String nextBlockIdStr = null; - String nextGenStamp = null; - String nextHost = req.getServerName(); - int nextPort = req.getServerPort(); - int nextDatanodePort = datanodePort; - // determine data for the next link - if (startOffset + chunkSizeToView >= blockSize) { - // we have to go to the next block from this point onwards - List blocks = dfs.getNamenode().getBlockLocations(filename, 0, - Long.MAX_VALUE).getLocatedBlocks(); - for (int i = 0; i < blocks.size(); i++) { - if (blocks.get(i).getBlock().getBlockId() == blockId) { - if (i != blocks.size() - 1) { - LocatedBlock nextBlock = blocks.get(i + 1); - nextBlockIdStr = Long.toString(nextBlock.getBlock().getBlockId()); - nextGenStamp = Long.toString(nextBlock.getBlock() - .getGenerationStamp()); - nextStartOffset = 0; - nextBlockSize = nextBlock.getBlock().getNumBytes(); - DatanodeInfo d = JspHelper.bestNode(nextBlock, conf); - nextDatanodePort = d.getXferPort(); - nextHost = d.getIpAddr(); - nextPort = d.getInfoPort(); - } - } - } - } else { - // we are in the same block - nextBlockIdStr = blockId.toString(); - nextStartOffset = startOffset + chunkSizeToView; - nextBlockSize = blockSize; - nextGenStamp = genStamp.toString(); - } - String nextUrl = null; - if (nextBlockIdStr != null) { - nextUrl = "///" + canonicalize(nextHost) + ":" + nextPort - + "/browseBlock.jsp?blockId=" + nextBlockIdStr - + "&blockSize=" + nextBlockSize - + "&startOffset=" + nextStartOffset - + "&genstamp=" + nextGenStamp - + "&filename=" + URLEncoder.encode(filename, "UTF-8") - + "&chunkSizeToView=" + chunkSizeToView - + "&datanodePort=" + nextDatanodePort - + "&namenodeInfoPort=" + namenodeInfoPort - + JspHelper.getDelegationTokenUrlParam(tokenString) - + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr); + String authority = req.getServerName() + ":" + req.getServerPort(); + String nextUrl = generateLinksForAdjacentBlock(NEXT_BLOCK, authority, + datanodePort, startOffset, chunkSizeToView, blockSize, blockId, + genStamp, dfs, filename, conf, req.getScheme(), tokenString, + namenodeInfoPort, nnAddr); + if (nextUrl != null) { out.print("View Next chunk  "); } - // determine data for the prev link - String prevBlockIdStr = null; - String prevGenStamp = null; - long prevStartOffset = 0; - long prevBlockSize = 0; - String prevHost = req.getServerName(); - int prevPort = req.getServerPort(); - int prevDatanodePort = datanodePort; - if (startOffset == 0) { - List blocks = dfs.getNamenode().getBlockLocations(filename, 0, - Long.MAX_VALUE).getLocatedBlocks(); - for (int i = 0; i < blocks.size(); i++) { - if (blocks.get(i).getBlock().getBlockId() == blockId) { - if (i != 0) { - LocatedBlock prevBlock = blocks.get(i - 1); - prevBlockIdStr = Long.toString(prevBlock.getBlock().getBlockId()); - prevGenStamp = Long.toString(prevBlock.getBlock() - .getGenerationStamp()); - prevStartOffset = prevBlock.getBlock().getNumBytes() - - chunkSizeToView; - if (prevStartOffset < 0) - prevStartOffset = 0; - prevBlockSize = prevBlock.getBlock().getNumBytes(); - DatanodeInfo d = JspHelper.bestNode(prevBlock, conf); - prevDatanodePort = d.getXferPort(); - prevHost = d.getIpAddr(); - prevPort = d.getInfoPort(); - } - } - } - } else { - // we are in the same block - prevBlockIdStr = blockId.toString(); - prevStartOffset = startOffset - chunkSizeToView; - if (prevStartOffset < 0) - prevStartOffset = 0; - prevBlockSize = blockSize; - prevGenStamp = genStamp.toString(); - } - String prevUrl = null; - if (prevBlockIdStr != null) { - prevUrl = "///" + canonicalize(prevHost) + ":" + prevPort - + "/browseBlock.jsp?blockId=" + prevBlockIdStr - + "&blockSize=" + prevBlockSize - + "&startOffset=" + prevStartOffset - + "&filename=" + URLEncoder.encode(filename, "UTF-8") - + "&chunkSizeToView=" + chunkSizeToView - + "&genstamp=" + prevGenStamp - + "&datanodePort=" + prevDatanodePort - + "&namenodeInfoPort=" + namenodeInfoPort - + JspHelper.getDelegationTokenUrlParam(tokenString) - + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr); + String prevUrl = generateLinksForAdjacentBlock(PREV_BLOCK, authority, + datanodePort, startOffset, chunkSizeToView, blockSize, blockId, + genStamp, dfs, filename, conf, req.getScheme(), tokenString, + namenodeInfoPort, nnAddr); + if (prevUrl != null) { out.print("View Prev chunk  "); } + out.print("
"); out.print("