HDFS-3099. SecondaryNameNode does not properly initialize metrics system. Contributed by Aaron T. Myers.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1301222 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Aaron Myers 2012-03-15 21:51:23 +00:00
parent 6d96a28a08
commit 65425b0961
3 changed files with 44 additions and 23 deletions

View File

@ -303,6 +303,9 @@ Release 0.23.3 - UNRELEASED
HDFS-3005. FSVolume.decDfsUsed(..) should be synchronized. (szetszwo) HDFS-3005. FSVolume.decDfsUsed(..) should be synchronized. (szetszwo)
HDFS-3099. SecondaryNameNode does not properly initialize metrics system.
(atm)
BREAKDOWN OF HDFS-1623 SUBTASKS BREAKDOWN OF HDFS-1623 SUBTASKS
HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd) HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd)

View File

@ -204,6 +204,7 @@ public class SecondaryNameNode implements Runnable {
DFS_SECONDARY_NAMENODE_USER_NAME_KEY, infoBindAddress); DFS_SECONDARY_NAMENODE_USER_NAME_KEY, infoBindAddress);
} }
// initiate Java VM metrics // initiate Java VM metrics
DefaultMetricsSystem.initialize("SecondaryNameNode");
JvmMetrics.create("SecondaryNameNode", JvmMetrics.create("SecondaryNameNode",
conf.get(DFS_METRICS_SESSION_ID_KEY), DefaultMetricsSystem.instance()); conf.get(DFS_METRICS_SESSION_ID_KEY), DefaultMetricsSystem.instance());

View File

@ -17,42 +17,59 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.*; import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
public class TestSecondaryWebUi { public class TestSecondaryWebUi {
private static MiniDFSCluster cluster;
private static SecondaryNameNode snn;
private static Configuration conf = new Configuration();
@BeforeClass
public static void setUpCluster() throws IOException {
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
"0.0.0.0:0");
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0)
.build();
cluster.waitActive();
snn = new SecondaryNameNode(conf);
}
@AfterClass
public static void shutDownCluster() {
if (cluster != null) {
cluster.shutdown();
}
if (snn != null) {
snn.shutdown();
}
}
@Test @Test
public void testSecondaryWebUi() throws IOException { public void testSecondaryWebUi() throws IOException {
Configuration conf = new Configuration(); String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" +
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, SecondaryNameNode.getHttpAddress(conf).getPort() + "/status.jsp"));
"0.0.0.0:0"); assertTrue(pageContents.contains("Last Checkpoint Time"));
MiniDFSCluster cluster = null; }
SecondaryNameNode snn = null;
try { @Test
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0) public void testSecondaryWebJmx() throws MalformedURLException, IOException {
.build(); String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" +
cluster.waitActive(); SecondaryNameNode.getHttpAddress(conf).getPort() + "/jmx"));
assertTrue(pageContents.contains(
snn = new SecondaryNameNode(conf); "Hadoop:service=SecondaryNameNode,name=JvmMetrics"));
String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" +
SecondaryNameNode.getHttpAddress(conf).getPort() + "/status.jsp"));
assertTrue(pageContents.contains("Last Checkpoint Time"));
} finally {
if (cluster != null) {
cluster.shutdown();
}
if (snn != null) {
snn.shutdown();
}
}
} }
} }