HDFS-5317. Go back to DFS Home link does not work on datanode webUI. Contributed by Haohui Mai
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1530114 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1fe1942328
commit
61c1dd96c3
@ -400,6 +400,9 @@ Release 2.2.0 - 2013-10-13
|
|||||||
HDFS-5291. Standby namenode after transition to active goes into safemode.
|
HDFS-5291. Standby namenode after transition to active goes into safemode.
|
||||||
(jing9)
|
(jing9)
|
||||||
|
|
||||||
|
HDFS-5317. Go back to DFS Home link does not work on datanode webUI
|
||||||
|
(Haohui Mai via brandonli)
|
||||||
|
|
||||||
Release 2.1.1-beta - 2013-09-23
|
Release 2.1.1-beta - 2013-09-23
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -23,6 +23,7 @@
|
|||||||
import java.lang.management.ManagementFactory;
|
import java.lang.management.ManagementFactory;
|
||||||
import java.lang.management.MemoryMXBean;
|
import java.lang.management.MemoryMXBean;
|
||||||
import java.lang.management.MemoryUsage;
|
import java.lang.management.MemoryUsage;
|
||||||
|
import java.net.InetAddress;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URLEncoder;
|
import java.net.URLEncoder;
|
||||||
@ -61,6 +62,7 @@
|
|||||||
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step;
|
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType;
|
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
||||||
|
import org.apache.hadoop.http.HttpConfig;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.net.NodeBase;
|
import org.apache.hadoop.net.NodeBase;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
@ -739,11 +741,11 @@ private String nodeHeaderStr(String name) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
|
private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
|
||||||
String suffix, boolean alive, int nnHttpPort, String nnaddr, String scheme)
|
String suffix, boolean alive, int nnInfoPort, String nnaddr, String scheme)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
// from nn_browsedfscontent.jsp:
|
// from nn_browsedfscontent.jsp:
|
||||||
String url = "///" + JspHelper.Url.authority(scheme, d)
|
String url = "///" + JspHelper.Url.authority(scheme, d)
|
||||||
+ "/browseDirectory.jsp?namenodeInfoPort=" + nnHttpPort + "&dir="
|
+ "/browseDirectory.jsp?namenodeInfoPort=" + nnInfoPort + "&dir="
|
||||||
+ URLEncoder.encode("/", "UTF-8")
|
+ URLEncoder.encode("/", "UTF-8")
|
||||||
+ JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);
|
+ JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnaddr);
|
||||||
|
|
||||||
@ -760,9 +762,9 @@ private void generateNodeDataHeader(JspWriter out, DatanodeDescriptor d,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void generateDecommissioningNodeData(JspWriter out, DatanodeDescriptor d,
|
void generateDecommissioningNodeData(JspWriter out, DatanodeDescriptor d,
|
||||||
String suffix, boolean alive, int nnHttpPort, String nnaddr, String scheme)
|
String suffix, boolean alive, int nnInfoPort, String nnaddr, String scheme)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
generateNodeDataHeader(out, d, suffix, alive, nnHttpPort, nnaddr, scheme);
|
generateNodeDataHeader(out, d, suffix, alive, nnInfoPort, nnaddr, scheme);
|
||||||
if (!alive) {
|
if (!alive) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -786,7 +788,7 @@ void generateDecommissioningNodeData(JspWriter out, DatanodeDescriptor d,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void generateNodeData(JspWriter out, DatanodeDescriptor d, String suffix,
|
void generateNodeData(JspWriter out, DatanodeDescriptor d, String suffix,
|
||||||
boolean alive, int nnHttpPort, String nnaddr, String scheme) throws IOException {
|
boolean alive, int nnInfoPort, String nnaddr, String scheme) throws IOException {
|
||||||
/*
|
/*
|
||||||
* Say the datanode is dn1.hadoop.apache.org with ip 192.168.0.5 we use:
|
* Say the datanode is dn1.hadoop.apache.org with ip 192.168.0.5 we use:
|
||||||
* 1) d.getHostName():d.getPort() to display. Domain and port are stripped
|
* 1) d.getHostName():d.getPort() to display. Domain and port are stripped
|
||||||
@ -798,7 +800,7 @@ void generateNodeData(JspWriter out, DatanodeDescriptor d, String suffix,
|
|||||||
* interact with datanodes.
|
* interact with datanodes.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
generateNodeDataHeader(out, d, suffix, alive, nnHttpPort, nnaddr, scheme);
|
generateNodeDataHeader(out, d, suffix, alive, nnInfoPort, nnaddr, scheme);
|
||||||
long currentTime = Time.now();
|
long currentTime = Time.now();
|
||||||
long timestamp = d.getLastUpdate();
|
long timestamp = d.getLastUpdate();
|
||||||
if (!alive) {
|
if (!alive) {
|
||||||
@ -865,11 +867,8 @@ void generateNodesList(ServletContext context, JspWriter out,
|
|||||||
final List<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
|
final List<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
|
||||||
dm.fetchDatanodes(live, dead, true);
|
dm.fetchDatanodes(live, dead, true);
|
||||||
|
|
||||||
InetSocketAddress nnSocketAddress =
|
String nnaddr = nn.getServiceRpcAddress().getAddress().getHostName() + ":"
|
||||||
(InetSocketAddress)context.getAttribute(
|
+ nn.getServiceRpcAddress().getPort();
|
||||||
NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY);
|
|
||||||
String nnaddr = nnSocketAddress.getAddress().getHostAddress() + ":"
|
|
||||||
+ nnSocketAddress.getPort();
|
|
||||||
|
|
||||||
whatNodes = request.getParameter("whatNodes"); // show only live or only
|
whatNodes = request.getParameter("whatNodes"); // show only live or only
|
||||||
// dead nodes
|
// dead nodes
|
||||||
@ -905,16 +904,11 @@ void generateNodesList(ServletContext context, JspWriter out,
|
|||||||
|
|
||||||
counterReset();
|
counterReset();
|
||||||
|
|
||||||
try {
|
|
||||||
Thread.sleep(1000);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
}
|
|
||||||
|
|
||||||
if (live.isEmpty() && dead.isEmpty()) {
|
if (live.isEmpty() && dead.isEmpty()) {
|
||||||
out.print("There are no datanodes in the cluster");
|
out.print("There are no datanodes in the cluster");
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
int nnHttpPort = nn.getHttpAddress().getPort();
|
int nnInfoPort = request.getServerPort();
|
||||||
out.print("<div id=\"dfsnodetable\"> ");
|
out.print("<div id=\"dfsnodetable\"> ");
|
||||||
if (whatNodes.equals("LIVE")) {
|
if (whatNodes.equals("LIVE")) {
|
||||||
out.print("<a name=\"LiveNodes\" id=\"title\">" + "Live Datanodes : "
|
out.print("<a name=\"LiveNodes\" id=\"title\">" + "Live Datanodes : "
|
||||||
@ -956,7 +950,7 @@ void generateNodesList(ServletContext context, JspWriter out,
|
|||||||
|
|
||||||
JspHelper.sortNodeList(live, sorterField, sorterOrder);
|
JspHelper.sortNodeList(live, sorterField, sorterOrder);
|
||||||
for (int i = 0; i < live.size(); i++) {
|
for (int i = 0; i < live.size(); i++) {
|
||||||
generateNodeData(out, live.get(i), port_suffix, true, nnHttpPort,
|
generateNodeData(out, live.get(i), port_suffix, true, nnInfoPort,
|
||||||
nnaddr, request.getScheme());
|
nnaddr, request.getScheme());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -979,7 +973,7 @@ void generateNodesList(ServletContext context, JspWriter out,
|
|||||||
JspHelper.sortNodeList(dead, sorterField, sorterOrder);
|
JspHelper.sortNodeList(dead, sorterField, sorterOrder);
|
||||||
for (int i = 0; i < dead.size(); i++) {
|
for (int i = 0; i < dead.size(); i++) {
|
||||||
generateNodeData(out, dead.get(i), port_suffix, false,
|
generateNodeData(out, dead.get(i), port_suffix, false,
|
||||||
nnHttpPort, nnaddr, request.getScheme());
|
nnInfoPort, nnaddr, request.getScheme());
|
||||||
}
|
}
|
||||||
|
|
||||||
out.print("</table>\n");
|
out.print("</table>\n");
|
||||||
@ -1010,7 +1004,7 @@ void generateNodesList(ServletContext context, JspWriter out,
|
|||||||
JspHelper.sortNodeList(decommissioning, "name", "ASC");
|
JspHelper.sortNodeList(decommissioning, "name", "ASC");
|
||||||
for (int i = 0; i < decommissioning.size(); i++) {
|
for (int i = 0; i < decommissioning.size(); i++) {
|
||||||
generateDecommissioningNodeData(out, decommissioning.get(i),
|
generateDecommissioningNodeData(out, decommissioning.get(i),
|
||||||
port_suffix, true, nnHttpPort, nnaddr, request.getScheme());
|
port_suffix, true, nnInfoPort, nnaddr, request.getScheme());
|
||||||
}
|
}
|
||||||
out.print("</table>\n");
|
out.print("</table>\n");
|
||||||
}
|
}
|
||||||
|
@ -61,9 +61,10 @@ private static void testViewingFile(MiniDFSCluster cluster, String filePath)
|
|||||||
|
|
||||||
InetSocketAddress nnIpcAddress = cluster.getNameNode().getNameNodeAddress();
|
InetSocketAddress nnIpcAddress = cluster.getNameNode().getNameNodeAddress();
|
||||||
InetSocketAddress nnHttpAddress = cluster.getNameNode().getHttpAddress();
|
InetSocketAddress nnHttpAddress = cluster.getNameNode().getHttpAddress();
|
||||||
int dnInfoPort = cluster.getDataNodes().get(0).getInfoPort();
|
String base = JspHelper.Url.url("http", cluster.getDataNodes().get(0)
|
||||||
|
.getDatanodeId());
|
||||||
|
|
||||||
URL url = new URL("http://localhost:" + dnInfoPort + "/"
|
URL url = new URL(base + "/"
|
||||||
+ "browseDirectory.jsp" + JspHelper.getUrlParam("dir",
|
+ "browseDirectory.jsp" + JspHelper.getUrlParam("dir",
|
||||||
URLEncoder.encode(testPath.toString(), "UTF-8"), true)
|
URLEncoder.encode(testPath.toString(), "UTF-8"), true)
|
||||||
+ JspHelper.getUrlParam("namenodeInfoPort", Integer
|
+ JspHelper.getUrlParam("namenodeInfoPort", Integer
|
||||||
|
Loading…
Reference in New Issue
Block a user