From 51d1c6ffa711b02f14b996204dfda66b01f88851 Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Wed, 2 Sep 2009 22:01:34 +0000 Subject: [PATCH] HADOOP-4675 Current Ganglia metrics implementation is incompatible with Ganglia 3.1 git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@810709 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 3 +++ conf/hadoop-metrics.properties | 6 ++++- .../metrics/ganglia/GangliaContext.java | 22 +++++++++---------- 3 files changed, 19 insertions(+), 12 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index e1e80ab828..7d2b08e4e8 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -515,6 +515,9 @@ Trunk (unreleased changes) HADOOP-6224. Add a method to WritableUtils performing a bounded read of an encoded String. (Jothi Padmanabhan via cdouglas) + HADOOP-4675 Current Ganglia metrics implementation is incompatible with Ganglia 3.1 + (Brian Brockelman, Scott Beardsley via stack) + OPTIMIZATIONS HADOOP-5595. NameNode does not need to run a replicator to choose a diff --git a/conf/hadoop-metrics.properties b/conf/hadoop-metrics.properties index d8c3d8a913..d3ce41ed1e 100644 --- a/conf/hadoop-metrics.properties +++ b/conf/hadoop-metrics.properties @@ -7,7 +7,9 @@ dfs.class=org.apache.hadoop.metrics.spi.NullContext #dfs.fileName=/tmp/dfsmetrics.log # Configuration of the "dfs" context for ganglia +# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter) # dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext +# dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext31 # dfs.period=10 # dfs.servers=localhost:8649 @@ -21,13 +23,15 @@ mapred.class=org.apache.hadoop.metrics.spi.NullContext #mapred.fileName=/tmp/mrmetrics.log # Configuration of the "mapred" context for ganglia +# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter) # mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext +# mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext31 # mapred.period=10 # mapred.servers=localhost:8649 # Configuration of the "jvm" context for null -jvm.class=org.apache.hadoop.metrics.spi.NullContext +#jvm.class=org.apache.hadoop.metrics.spi.NullContext # Configuration of the "jvm" context for file #jvm.class=org.apache.hadoop.metrics.file.FileContext diff --git a/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java b/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java index 1affb02f72..66c7a37770 100644 --- a/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java +++ b/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java @@ -71,16 +71,16 @@ public class GangliaContext extends AbstractMetricsContext { typeTable.put(Float.class, "float"); } - private byte[] buffer = new byte[BUFFER_SIZE]; - private int offset; + protected byte[] buffer = new byte[BUFFER_SIZE]; + protected int offset; - private List metricsServers; + protected List metricsServers; private Map unitsTable; private Map slopeTable; private Map tmaxTable; private Map dmaxTable; - private DatagramSocket datagramSocket; + protected DatagramSocket datagramSocket; /** Creates a new instance of GangliaContext */ public GangliaContext() { @@ -132,7 +132,7 @@ public void emitRecord(String contextName, String recordName, } } - private void emitMetric(String name, String type, String value) + protected void emitMetric(String name, String type, String value) throws IOException { String units = getUnits(name); int slope = getSlope(name); @@ -156,7 +156,7 @@ private void emitMetric(String name, String type, String value) } } - private String getUnits(String metricName) { + protected String getUnits(String metricName) { String result = unitsTable.get(metricName); if (result == null) { result = DEFAULT_UNITS; @@ -164,7 +164,7 @@ private String getUnits(String metricName) { return result; } - private int getSlope(String metricName) { + protected int getSlope(String metricName) { String slopeString = slopeTable.get(metricName); if (slopeString == null) { slopeString = DEFAULT_SLOPE; @@ -172,7 +172,7 @@ private int getSlope(String metricName) { return ("zero".equals(slopeString) ? 0 : 3); // see gmetric.c } - private int getTmax(String metricName) { + protected int getTmax(String metricName) { if (tmaxTable == null) { return DEFAULT_TMAX; } @@ -185,7 +185,7 @@ private int getTmax(String metricName) { } } - private int getDmax(String metricName) { + protected int getDmax(String metricName) { String dmaxString = dmaxTable.get(metricName); if (dmaxString == null) { return DEFAULT_DMAX; @@ -200,7 +200,7 @@ private int getDmax(String metricName) { * as an int, followed by the bytes of the string, padded if necessary to * a multiple of 4. */ - private void xdr_string(String s) { + protected void xdr_string(String s) { byte[] bytes = s.getBytes(); int len = bytes.length; xdr_int(len); @@ -222,7 +222,7 @@ private void pad() { /** * Puts an integer into the buffer as 4 bytes, big-endian. */ - private void xdr_int(int i) { + protected void xdr_int(int i) { buffer[offset++] = (byte)((i >> 24) & 0xff); buffer[offset++] = (byte)((i >> 16) & 0xff); buffer[offset++] = (byte)((i >> 8) & 0xff);