From cae1ed9f6c438e20acdf385e95041f91ae349c71 Mon Sep 17 00:00:00 2001 From: Jing Zhao Date: Sat, 5 Oct 2013 20:40:48 +0000 Subject: [PATCH] HDFS-5308. Replace HttpConfig#getSchemePrefix with implicit schemes in HDFS JSP. Contributed by Haohui Mai. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1529512 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../server/datanode/DatanodeJspHelper.java | 27 +++++++------------ .../server/namenode/ClusterJspHelper.java | 2 +- .../server/namenode/NamenodeJspHelper.java | 2 +- .../hdfs/server/datanode/TestDatanodeJsp.java | 6 ++--- 5 files changed, 17 insertions(+), 23 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index b4024a9372..cf6eba46a4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -362,6 +362,9 @@ Release 2.1.2 - UNRELEASED HDFS-5256. Use guava LoadingCache to implement DFSClientCache. (Haohui Mai via brandonli) + HDFS-5308. Replace HttpConfig#getSchemePrefix with implicit schemes in HDFS + JSP. (Haohui Mai via jing9) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java index 639468bbc7..11b2713148 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java @@ -19,9 +19,7 @@ import java.io.File; import java.io.IOException; -import java.net.InetAddress; import java.net.InetSocketAddress; -import java.net.URI; import java.net.URL; import java.net.URLEncoder; import java.security.PrivilegedExceptionAction; @@ -39,7 +37,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; @@ -47,9 +44,6 @@ import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager; import org.apache.hadoop.hdfs.server.common.JspHelper; -import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; -import org.apache.hadoop.hdfs.server.namenode.NameNode; -import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer; import org.apache.hadoop.http.HtmlQuoting; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.net.NetUtils; @@ -225,7 +219,7 @@ static void generateDirectoryStructure(JspWriter out, JspHelper.addTableFooter(out); } } - out.print("
Go back to DFS home"); dfs.close(); @@ -302,8 +296,7 @@ static void generateFileDetails(JspWriter out, Long.MAX_VALUE).getLocatedBlocks(); // Add the various links for looking at the file contents // URL for downloading the full file - String downloadUrl = HttpConfig.getSchemePrefix() + req.getServerName() + ":" - + req.getServerPort() + "/streamFile" + ServletUtil.encodePath(filename) + String downloadUrl = "/streamFile" + ServletUtil.encodePath(filename) + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr, true) + JspHelper.getDelegationTokenUrlParam(tokenString); out.print(""); @@ -320,7 +313,7 @@ static void generateFileDetails(JspWriter out, return; } String fqdn = canonicalize(chosenNode.getIpAddr()); - String tailUrl = HttpConfig.getSchemePrefix() + fqdn + ":" + chosenNode.getInfoPort() + String tailUrl = "///" + fqdn + ":" + chosenNode.getInfoPort() + "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8") + "&namenodeInfoPort=" + namenodeInfoPort + "&chunkSizeToView=" + chunkSizeToView @@ -369,7 +362,7 @@ static void generateFileDetails(JspWriter out, String datanodeAddr = locs[j].getXferAddr(); datanodePort = locs[j].getXferPort(); fqdn = canonicalize(locs[j].getIpAddr()); - String blockUrl = HttpConfig.getSchemePrefix() + fqdn + ":" + locs[j].getInfoPort() + String blockUrl = "///" + fqdn + ":" + locs[j].getInfoPort() + "/browseBlock.jsp?blockId=" + blockidstring + "&blockSize=" + blockSize + "&filename=" + URLEncoder.encode(filename, "UTF-8") @@ -380,7 +373,7 @@ static void generateFileDetails(JspWriter out, + JspHelper.getDelegationTokenUrlParam(tokenString) + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr); - String blockInfoUrl = HttpConfig.getSchemePrefix() + nnCanonicalName + ":" + String blockInfoUrl = "///" + nnCanonicalName + ":" + namenodeInfoPort + "/block_info_xml.jsp?blockId=" + blockidstring; out.print(" " @@ -391,7 +384,7 @@ static void generateFileDetails(JspWriter out, } out.println(""); out.print("
"); - out.print("
Go back to DFS home"); dfs.close(); @@ -491,9 +484,7 @@ static void generateFileChunks(JspWriter out, HttpServletRequest req, String parent = new File(filename).getParent(); JspHelper.printGotoForm(out, namenodeInfoPort, tokenString, parent, nnAddr); out.print("
"); - out.print("