HBASE-8392 TestMetricMBeanBase#testGetAttribute is flakey under hadoop2 profile
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1475998 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7b3dc4f7ba
commit
76bae3d62d
|
@ -1,52 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.metrics;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Deprecated
|
||||
@Category(SmallTests.class)
|
||||
public class TestExactCounterMetric {
|
||||
|
||||
@Test
|
||||
public void testBasic() {
|
||||
final ExactCounterMetric counter = new ExactCounterMetric("testCounter", null);
|
||||
for (int i = 1; i <= 10; i++) {
|
||||
for (int j = 0; j < i; j++) {
|
||||
counter.update(i + "");
|
||||
}
|
||||
}
|
||||
|
||||
List<Pair<String, Long>> topFive = counter.getTop(5);
|
||||
Long i = 10L;
|
||||
for (Pair<String, Long> entry : topFive) {
|
||||
Assert.assertEquals(i + "", entry.getFirst());
|
||||
Assert.assertEquals(i, entry.getSecond());
|
||||
i--;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.metrics;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import com.yammer.metrics.stats.ExponentiallyDecayingSample;
|
||||
import com.yammer.metrics.stats.Snapshot;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Deprecated
|
||||
@Category(SmallTests.class)
|
||||
public class TestExponentiallyDecayingSample {
|
||||
|
||||
@Test
|
||||
public void testBasic() {
|
||||
final ExponentiallyDecayingSample sample =
|
||||
new ExponentiallyDecayingSample(100, 0.99);
|
||||
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
sample.update(i);
|
||||
}
|
||||
Assert.assertEquals(100, sample.size());
|
||||
|
||||
final Snapshot snapshot = sample.getSnapshot();
|
||||
Assert.assertEquals(100, snapshot.size());
|
||||
|
||||
for (double i : snapshot.getValues()) {
|
||||
Assert.assertTrue(i >= 0.0 && i < 1000.0);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTooBig() throws Exception {
|
||||
final ExponentiallyDecayingSample sample =
|
||||
new ExponentiallyDecayingSample(100, 0.99);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
sample.update(i);
|
||||
}
|
||||
Assert.assertEquals(10, sample.size());
|
||||
|
||||
final Snapshot snapshot = sample.getSnapshot();
|
||||
Assert.assertEquals(10, sample.size());
|
||||
|
||||
for (double i : snapshot.getValues()) {
|
||||
Assert.assertTrue(i >= 0.0 && i < 1000.0);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,103 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.metrics;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.hbase.metrics.histogram.MetricsHistogram;
|
||||
import com.yammer.metrics.stats.Snapshot;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Deprecated
|
||||
@Category(SmallTests.class)
|
||||
public class TestMetricsHistogram {
|
||||
|
||||
@Test
|
||||
public void testBasicUniform() {
|
||||
MetricsHistogram h = new MetricsHistogram("testHistogram", null);
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
h.update(i);
|
||||
}
|
||||
|
||||
Assert.assertEquals(100, h.getCount());
|
||||
Assert.assertEquals(0, h.getMin());
|
||||
Assert.assertEquals(99, h.getMax());
|
||||
}
|
||||
|
||||
private static int safeIndex(int i, int len) {
|
||||
if (i < len && i>= 0) {
|
||||
return i;
|
||||
} else if (i >= len) {
|
||||
return len - 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandom() {
|
||||
final Random r = new Random();
|
||||
final MetricsHistogram h = new MetricsHistogram("testHistogram", null);
|
||||
|
||||
final long[] data = new long[1000];
|
||||
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
data[i] = (long) (r.nextGaussian() * 10000.0);
|
||||
h.update(data[i]);
|
||||
}
|
||||
|
||||
final Snapshot s = h.getSnapshot();
|
||||
Arrays.sort(data);
|
||||
|
||||
// as long as the histogram chooses an item with index N+/-slop, accept it
|
||||
final int slop = 20;
|
||||
|
||||
// make sure the median, 75th percentile and 95th percentile are good
|
||||
final int medianIndex = data.length / 2;
|
||||
final long minAcceptableMedian = data[safeIndex(medianIndex - slop,
|
||||
data.length)];
|
||||
final long maxAcceptableMedian = data[safeIndex(medianIndex + slop,
|
||||
data.length)];
|
||||
Assert.assertTrue(s.getMedian() >= minAcceptableMedian
|
||||
&& s.getMedian() <= maxAcceptableMedian);
|
||||
|
||||
final int seventyFifthIndex = (int) (data.length * 0.75);
|
||||
final long minAcceptableseventyFifth = data[safeIndex(seventyFifthIndex
|
||||
- slop, data.length)];
|
||||
final long maxAcceptableseventyFifth = data[safeIndex(seventyFifthIndex
|
||||
+ slop, data.length)];
|
||||
Assert.assertTrue(s.get75thPercentile() >= minAcceptableseventyFifth
|
||||
&& s.get75thPercentile() <= maxAcceptableseventyFifth);
|
||||
|
||||
final int ninetyFifthIndex = (int) (data.length * 0.95);
|
||||
final long minAcceptableninetyFifth = data[safeIndex(ninetyFifthIndex
|
||||
- slop, data.length)];
|
||||
final long maxAcceptableninetyFifth = data[safeIndex(ninetyFifthIndex
|
||||
+ slop, data.length)];
|
||||
Assert.assertTrue(s.get95thPercentile() >= minAcceptableninetyFifth
|
||||
&& s.get95thPercentile() <= maxAcceptableninetyFifth);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,181 +0,0 @@
|
|||
/**
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.metrics;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.management.AttributeNotFoundException;
|
||||
import javax.management.MBeanAttributeInfo;
|
||||
import javax.management.MBeanException;
|
||||
import javax.management.MBeanInfo;
|
||||
import javax.management.ReflectionException;
|
||||
|
||||
import com.yammer.metrics.stats.Snapshot;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.metrics.histogram.MetricsHistogram;
|
||||
import org.apache.hadoop.metrics.MetricsContext;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.hadoop.metrics.MetricsUtil;
|
||||
import org.apache.hadoop.metrics.util.MetricsIntValue;
|
||||
import org.apache.hadoop.metrics.util.MetricsRegistry;
|
||||
import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@Deprecated
|
||||
@Category(SmallTests.class)
|
||||
public class TestMetricsMBeanBase extends TestCase {
|
||||
|
||||
private class TestStatistics extends MetricsMBeanBase {
|
||||
public TestStatistics(MetricsRegistry registry) {
|
||||
super(registry, "TestStatistics");
|
||||
}
|
||||
}
|
||||
|
||||
private MetricsRegistry registry;
|
||||
private MetricsRecord metricsRecord;
|
||||
private TestStatistics stats;
|
||||
private MetricsRate metricsRate;
|
||||
private MetricsIntValue intValue;
|
||||
private MetricsTimeVaryingRate varyRate;
|
||||
|
||||
public void setUp() {
|
||||
this.registry = new MetricsRegistry();
|
||||
this.metricsRate = new MetricsRate("metricsRate", registry, "test");
|
||||
this.intValue = new MetricsIntValue("intValue", registry, "test");
|
||||
this.varyRate = new MetricsTimeVaryingRate("varyRate", registry, "test");
|
||||
this.stats = new TestStatistics(registry);
|
||||
MetricsContext context = MetricsUtil.getContext("hbase");
|
||||
this.metricsRecord = MetricsUtil.createRecord(context, "test");
|
||||
this.metricsRecord.setTag("TestStatistics", "test");
|
||||
//context.registerUpdater(this);
|
||||
|
||||
}
|
||||
|
||||
public void tearDown() {
|
||||
|
||||
}
|
||||
|
||||
public void testGetAttribute() throws Exception {
|
||||
this.metricsRate.inc(2);
|
||||
Thread.sleep(1000);
|
||||
this.metricsRate.pushMetric(this.metricsRecord);
|
||||
this.intValue.set(5);
|
||||
this.intValue.pushMetric(this.metricsRecord);
|
||||
this.varyRate.inc(10);
|
||||
this.varyRate.inc(50);
|
||||
this.varyRate.pushMetric(this.metricsRecord);
|
||||
|
||||
|
||||
assertEquals( 2.0, (Float)this.stats.getAttribute("metricsRate"), 0.005 );
|
||||
assertEquals( 5, this.stats.getAttribute("intValue") );
|
||||
assertEquals( 10L, this.stats.getAttribute("varyRateMinTime") );
|
||||
assertEquals( 50L, this.stats.getAttribute("varyRateMaxTime") );
|
||||
assertEquals( 30L, this.stats.getAttribute("varyRateAvgTime") );
|
||||
assertEquals( 2, this.stats.getAttribute("varyRateNumOps") );
|
||||
}
|
||||
|
||||
public void testGetMBeanInfo() {
|
||||
MBeanInfo info = this.stats.getMBeanInfo();
|
||||
MBeanAttributeInfo[] attributes = info.getAttributes();
|
||||
assertEquals( 6, attributes.length );
|
||||
|
||||
Map<String,MBeanAttributeInfo> attributeByName =
|
||||
new HashMap<String,MBeanAttributeInfo>(attributes.length);
|
||||
for (MBeanAttributeInfo attr : attributes)
|
||||
attributeByName.put(attr.getName(), attr);
|
||||
|
||||
assertAttribute( attributeByName.get("metricsRate"),
|
||||
"metricsRate", "java.lang.Float", "test");
|
||||
assertAttribute( attributeByName.get("intValue"),
|
||||
"intValue", "java.lang.Integer", "test");
|
||||
assertAttribute( attributeByName.get("varyRateMinTime"),
|
||||
"varyRateMinTime", "java.lang.Long", "test");
|
||||
assertAttribute( attributeByName.get("varyRateMaxTime"),
|
||||
"varyRateMaxTime", "java.lang.Long", "test");
|
||||
assertAttribute( attributeByName.get("varyRateAvgTime"),
|
||||
"varyRateAvgTime", "java.lang.Long", "test");
|
||||
assertAttribute( attributeByName.get("varyRateNumOps"),
|
||||
"varyRateNumOps", "java.lang.Integer", "test");
|
||||
}
|
||||
|
||||
public void testMetricsMBeanBaseHistogram()
|
||||
throws ReflectionException, AttributeNotFoundException, MBeanException {
|
||||
MetricsRegistry mr = new MetricsRegistry();
|
||||
MetricsHistogram histo = mock(MetricsHistogram.class);
|
||||
Snapshot snap = mock(Snapshot.class);
|
||||
|
||||
//Set up the mocks
|
||||
String histoName = "MockHisto";
|
||||
when(histo.getName()).thenReturn(histoName);
|
||||
when(histo.getCount()).thenReturn(20l);
|
||||
when(histo.getMin()).thenReturn(1l);
|
||||
when(histo.getMax()).thenReturn(999l);
|
||||
when(histo.getMean()).thenReturn(500.2);
|
||||
when(histo.getStdDev()).thenReturn(1.2);
|
||||
when(histo.getSnapshot()).thenReturn(snap);
|
||||
|
||||
when(snap.getMedian()).thenReturn(490.0);
|
||||
when(snap.get75thPercentile()).thenReturn(550.0);
|
||||
when(snap.get95thPercentile()).thenReturn(900.0);
|
||||
when(snap.get99thPercentile()).thenReturn(990.0);
|
||||
|
||||
mr.add("myTestHisto", histo);
|
||||
|
||||
MetricsMBeanBase mBeanBase = new MetricsMBeanBase(mr, "test");
|
||||
|
||||
assertEquals(new Long(20), mBeanBase
|
||||
.getAttribute(histoName + MetricsHistogram.NUM_OPS_METRIC_NAME));
|
||||
assertEquals(new Long(1), mBeanBase
|
||||
.getAttribute(histoName + MetricsHistogram.MIN_METRIC_NAME));
|
||||
assertEquals(new Long(999), mBeanBase
|
||||
.getAttribute(histoName + MetricsHistogram.MAX_METRIC_NAME));
|
||||
assertEquals(new Float(500.2), mBeanBase
|
||||
.getAttribute(histoName + MetricsHistogram.MEAN_METRIC_NAME));
|
||||
assertEquals(new Float(1.2), mBeanBase
|
||||
.getAttribute(histoName + MetricsHistogram.STD_DEV_METRIC_NAME));
|
||||
|
||||
assertEquals(new Float(490.0), mBeanBase
|
||||
.getAttribute(histoName + MetricsHistogram.MEDIAN_METRIC_NAME));
|
||||
assertEquals(new Float(550.0), mBeanBase
|
||||
.getAttribute(histoName + MetricsHistogram.SEVENTY_FIFTH_PERCENTILE_METRIC_NAME));
|
||||
assertEquals(new Float(900.0), mBeanBase
|
||||
.getAttribute(histoName + MetricsHistogram.NINETY_FIFTH_PERCENTILE_METRIC_NAME));
|
||||
assertEquals(new Float(990.0), mBeanBase
|
||||
.getAttribute(histoName + MetricsHistogram.NINETY_NINETH_PERCENTILE_METRIC_NAME));
|
||||
}
|
||||
|
||||
protected void assertAttribute(MBeanAttributeInfo attr, String name,
|
||||
String type, String description) {
|
||||
|
||||
assertEquals(attr.getName(), name);
|
||||
assertEquals(attr.getType(), type);
|
||||
assertEquals(attr.getDescription(), description);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
Loading…
Reference in New Issue