HADOOP-4675 Current Ganglia metrics implementation is incompatible with Ganglia 3.1

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@810709 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-09-02 22:01:34 +00:00
parent a3c52b93ea
commit 51d1c6ffa7
3 changed files with 19 additions and 12 deletions

View File

@ -515,6 +515,9 @@ Trunk (unreleased changes)
HADOOP-6224. Add a method to WritableUtils performing a bounded read of an HADOOP-6224. Add a method to WritableUtils performing a bounded read of an
encoded String. (Jothi Padmanabhan via cdouglas) encoded String. (Jothi Padmanabhan via cdouglas)
HADOOP-4675 Current Ganglia metrics implementation is incompatible with Ganglia 3.1
(Brian Brockelman, Scott Beardsley via stack)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-5595. NameNode does not need to run a replicator to choose a HADOOP-5595. NameNode does not need to run a replicator to choose a

View File

@ -7,7 +7,9 @@ dfs.class=org.apache.hadoop.metrics.spi.NullContext
#dfs.fileName=/tmp/dfsmetrics.log #dfs.fileName=/tmp/dfsmetrics.log
# Configuration of the "dfs" context for ganglia # Configuration of the "dfs" context for ganglia
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
# dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext # dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
# dfs.period=10 # dfs.period=10
# dfs.servers=localhost:8649 # dfs.servers=localhost:8649
@ -21,13 +23,15 @@ mapred.class=org.apache.hadoop.metrics.spi.NullContext
#mapred.fileName=/tmp/mrmetrics.log #mapred.fileName=/tmp/mrmetrics.log
# Configuration of the "mapred" context for ganglia # Configuration of the "mapred" context for ganglia
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
# mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext # mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
# mapred.period=10 # mapred.period=10
# mapred.servers=localhost:8649 # mapred.servers=localhost:8649
# Configuration of the "jvm" context for null # Configuration of the "jvm" context for null
jvm.class=org.apache.hadoop.metrics.spi.NullContext #jvm.class=org.apache.hadoop.metrics.spi.NullContext
# Configuration of the "jvm" context for file # Configuration of the "jvm" context for file
#jvm.class=org.apache.hadoop.metrics.file.FileContext #jvm.class=org.apache.hadoop.metrics.file.FileContext

View File

@ -71,16 +71,16 @@ public class GangliaContext extends AbstractMetricsContext {
typeTable.put(Float.class, "float"); typeTable.put(Float.class, "float");
} }
private byte[] buffer = new byte[BUFFER_SIZE]; protected byte[] buffer = new byte[BUFFER_SIZE];
private int offset; protected int offset;
private List<? extends SocketAddress> metricsServers; protected List<? extends SocketAddress> metricsServers;
private Map<String,String> unitsTable; private Map<String,String> unitsTable;
private Map<String,String> slopeTable; private Map<String,String> slopeTable;
private Map<String,String> tmaxTable; private Map<String,String> tmaxTable;
private Map<String,String> dmaxTable; private Map<String,String> dmaxTable;
private DatagramSocket datagramSocket; protected DatagramSocket datagramSocket;
/** Creates a new instance of GangliaContext */ /** Creates a new instance of GangliaContext */
public GangliaContext() { public GangliaContext() {
@ -132,7 +132,7 @@ public class GangliaContext extends AbstractMetricsContext {
} }
} }
private void emitMetric(String name, String type, String value) protected void emitMetric(String name, String type, String value)
throws IOException { throws IOException {
String units = getUnits(name); String units = getUnits(name);
int slope = getSlope(name); int slope = getSlope(name);
@ -156,7 +156,7 @@ public class GangliaContext extends AbstractMetricsContext {
} }
} }
private String getUnits(String metricName) { protected String getUnits(String metricName) {
String result = unitsTable.get(metricName); String result = unitsTable.get(metricName);
if (result == null) { if (result == null) {
result = DEFAULT_UNITS; result = DEFAULT_UNITS;
@ -164,7 +164,7 @@ public class GangliaContext extends AbstractMetricsContext {
return result; return result;
} }
private int getSlope(String metricName) { protected int getSlope(String metricName) {
String slopeString = slopeTable.get(metricName); String slopeString = slopeTable.get(metricName);
if (slopeString == null) { if (slopeString == null) {
slopeString = DEFAULT_SLOPE; slopeString = DEFAULT_SLOPE;
@ -172,7 +172,7 @@ public class GangliaContext extends AbstractMetricsContext {
return ("zero".equals(slopeString) ? 0 : 3); // see gmetric.c return ("zero".equals(slopeString) ? 0 : 3); // see gmetric.c
} }
private int getTmax(String metricName) { protected int getTmax(String metricName) {
if (tmaxTable == null) { if (tmaxTable == null) {
return DEFAULT_TMAX; return DEFAULT_TMAX;
} }
@ -185,7 +185,7 @@ public class GangliaContext extends AbstractMetricsContext {
} }
} }
private int getDmax(String metricName) { protected int getDmax(String metricName) {
String dmaxString = dmaxTable.get(metricName); String dmaxString = dmaxTable.get(metricName);
if (dmaxString == null) { if (dmaxString == null) {
return DEFAULT_DMAX; return DEFAULT_DMAX;
@ -200,7 +200,7 @@ public class GangliaContext extends AbstractMetricsContext {
* as an int, followed by the bytes of the string, padded if necessary to * as an int, followed by the bytes of the string, padded if necessary to
* a multiple of 4. * a multiple of 4.
*/ */
private void xdr_string(String s) { protected void xdr_string(String s) {
byte[] bytes = s.getBytes(); byte[] bytes = s.getBytes();
int len = bytes.length; int len = bytes.length;
xdr_int(len); xdr_int(len);
@ -222,7 +222,7 @@ public class GangliaContext extends AbstractMetricsContext {
/** /**
* Puts an integer into the buffer as 4 bytes, big-endian. * Puts an integer into the buffer as 4 bytes, big-endian.
*/ */
private void xdr_int(int i) { protected void xdr_int(int i) {
buffer[offset++] = (byte)((i >> 24) & 0xff); buffer[offset++] = (byte)((i >> 24) & 0xff);
buffer[offset++] = (byte)((i >> 16) & 0xff); buffer[offset++] = (byte)((i >> 16) & 0xff);
buffer[offset++] = (byte)((i >> 8) & 0xff); buffer[offset++] = (byte)((i >> 8) & 0xff);