diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index e0dbe146d0d..dc1dcda4015 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -176,7 +176,11 @@ Release 0.23.2 - UNRELEASED HADOOP-8051 HttpFS documentation it is not wired to the generated site (tucu) - HADOOP-8055. Hadoop tarball distribution lacks a core-site.xml (harsh) + HADOOP-8055. Hadoop tarball distribution lacks a core-site.xml (harsh) + + HADOOP-8052. Hadoop Metrics2 should emit Float.MAX_VALUE (instead of + Double.MAX_VALUE) to avoid making Ganglia's gmetad core. (Varun Kapoor + via mattf) Release 0.23.1 - 2012-02-08 diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleStat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleStat.java index f154269698a..589062a691c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleStat.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleStat.java @@ -143,8 +143,16 @@ public double max() { @SuppressWarnings("PublicInnerClass") public static class MinMax { - private double min = Double.MAX_VALUE; - private double max = Double.MIN_VALUE; + // Float.MAX_VALUE is used rather than Double.MAX_VALUE, even though the + // min and max variables are of type double. + // Float.MAX_VALUE is big enough, and using Double.MAX_VALUE makes + // Ganglia core due to buffer overflow. + // The same reasoning applies to the MIN_VALUE counterparts. + static final double DEFAULT_MIN_VALUE = Float.MAX_VALUE; + static final double DEFAULT_MAX_VALUE = Float.MIN_VALUE; + + private double min = DEFAULT_MIN_VALUE; + private double max = DEFAULT_MAX_VALUE; public void add(double value) { if (value > max) max = value; @@ -155,8 +163,8 @@ public void add(double value) { public double max() { return max; } public void reset() { - min = Double.MAX_VALUE; - max = Double.MIN_VALUE; + min = DEFAULT_MIN_VALUE; + max = DEFAULT_MAX_VALUE; } public void reset(MinMax other) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java index 36ca6bb1664..0fb0ad8ace9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java @@ -36,8 +36,8 @@ public class TestSampleStat { assertEquals("mean", 0.0, stat.mean(), EPSILON); assertEquals("variance", 0.0, stat.variance(), EPSILON); assertEquals("stddev", 0.0, stat.stddev(), EPSILON); - assertEquals("min", Double.MAX_VALUE, stat.min(), EPSILON); - assertEquals("max", Double.MIN_VALUE, stat.max(), EPSILON); + assertEquals("min", SampleStat.MinMax.DEFAULT_MIN_VALUE, stat.min(), EPSILON); + assertEquals("max", SampleStat.MinMax.DEFAULT_MAX_VALUE, stat.max(), EPSILON); stat.add(3); assertEquals("num samples", 1L, stat.numSamples()); @@ -60,8 +60,8 @@ public class TestSampleStat { assertEquals("mean", 0.0, stat.mean(), EPSILON); assertEquals("variance", 0.0, stat.variance(), EPSILON); assertEquals("stddev", 0.0, stat.stddev(), EPSILON); - assertEquals("min", Double.MAX_VALUE, stat.min(), EPSILON); - assertEquals("max", Double.MIN_VALUE, stat.max(), EPSILON); + assertEquals("min", SampleStat.MinMax.DEFAULT_MIN_VALUE, stat.min(), EPSILON); + assertEquals("max", SampleStat.MinMax.DEFAULT_MAX_VALUE, stat.max(), EPSILON); } }