HBASE-22637 fix flaky test in TestMetaTableMetrics

Signed-off-by: Peter Somogyi <psomogyi@apache.org>
Signed-off-by: Sakthi <sakthivel.azhaku@gmail.com>
This commit is contained in:
Mate Szalay-Beko 2019-07-02 09:35:43 +02:00 committed by Peter Somogyi
parent a2a929f488
commit 6205a6c8b3
1 changed files with 150 additions and 137 deletions

View File

@ -11,17 +11,18 @@
package org.apache.hadoop.hbase.coprocessor; package org.apache.hadoop.hbase.coprocessor;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Hashtable; import java.util.Hashtable;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Random; import java.util.Random;
import java.util.Set; import java.util.Set;
@ -47,7 +48,9 @@ import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads; import org.hamcrest.CustomTypeSafeMatcher;
import org.hamcrest.Matcher;
import org.hamcrest.core.AllOf;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
@ -56,7 +59,6 @@ import org.junit.experimental.categories.Category;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@Category({ CoprocessorTests.class, MediumTests.class }) @Category({ CoprocessorTests.class, MediumTests.class })
public class TestMetaTableMetrics { public class TestMetaTableMetrics {
@ -73,18 +75,19 @@ public class TestMetaTableMetrics {
ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build(); ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).build();
private static final int NUM_ROWS = 5; private static final int NUM_ROWS = 5;
private static final String value = "foo"; private static final String value = "foo";
private static Configuration conf = null; private static final String METRICS_ATTRIBUTE_NAME_PREFIX = "MetaTable_";
private static final List<String> METRICS_ATTRIBUTE_NAME_POSTFIXES =
Arrays.asList("_count", "_mean_rate", "_1min_rate", "_5min_rate", "_15min_rate");
private static int connectorPort = 61120; private static int connectorPort = 61120;
final byte[] cf = Bytes.toBytes("info"); private final byte[] cf = Bytes.toBytes("info");
final byte[] col = Bytes.toBytes("any"); private final byte[] col = Bytes.toBytes("any");
byte[] tablename; private byte[] tablename;
final int nthreads = 20; private final int nthreads = 20;
@BeforeClass @BeforeClass
public static void setupBeforeClass() throws Exception { public static void setupBeforeClass() throws Exception {
Configuration conf = UTIL.getConfiguration();
conf = UTIL.getConfiguration();
// Set system coprocessor so it can be applied to meta regions // Set system coprocessor so it can be applied to meta regions
UTIL.getConfiguration().set("hbase.coprocessor.region.classes", UTIL.getConfiguration().set("hbase.coprocessor.region.classes",
MetaTableMetrics.class.getName()); MetaTableMetrics.class.getName());
@ -100,7 +103,7 @@ public class TestMetaTableMetrics {
UTIL.startMiniCluster(1); UTIL.startMiniCluster(1);
break; break;
} catch (Exception e) { } catch (Exception e) {
LOG.debug("Encountered exception when starting cluster. Trying port " + connectorPort, e); LOG.debug("Encountered exception when starting cluster. Trying port {}", connectorPort, e);
try { try {
// this is to avoid "IllegalStateException: A mini-cluster is already running" // this is to avoid "IllegalStateException: A mini-cluster is already running"
UTIL.shutdownMiniCluster(); UTIL.shutdownMiniCluster();
@ -109,10 +112,6 @@ public class TestMetaTableMetrics {
} }
} }
} }
UTIL.getAdmin()
.createTable(TableDescriptorBuilder.newBuilder(NAME1)
.setColumnFamily(CFD)
.build());
} }
@AfterClass @AfterClass
@ -120,116 +119,44 @@ public class TestMetaTableMetrics {
UTIL.shutdownMiniCluster(); UTIL.shutdownMiniCluster();
} }
private void writeData(Table t) throws IOException { // Verifies that meta table metrics exist in jmx. In case of one table (one region) with a single
List<Put> puts = new ArrayList<>(NUM_ROWS); // client: 9 metrics
for (int i = 0; i < NUM_ROWS; i++) { // are generated and for each metrics, there should be 5 JMX attributes produced. e.g. for one
Put p = new Put(Bytes.toBytes(i + 1)); // table, there should
p.addColumn(FAMILY, QUALIFIER, Bytes.toBytes(value)); // be 5 MetaTable_table_<TableName>_request attributes, such as:
puts.add(p); // - MetaTable_table_TestExampleMetaTableMetricsOne_request_count
} // - MetaTable_table_TestExampleMetaTableMetricsOne_request_mean_rate
t.put(puts); // - MetaTable_table_TestExampleMetaTableMetricsOne_request_1min_rate
} // - MetaTable_table_TestExampleMetaTableMetricsOne_request_5min_rate
// - MetaTable_table_TestExampleMetaTableMetricsOne_request_15min_rate
private Set<String> readJmxMetricsWithRetry() throws IOException {
final int count = 0;
for (int i = 0; i < 10; i++) {
Set<String> metrics = readJmxMetrics();
if (metrics != null) {
return metrics;
}
LOG.warn("Failed to get jmxmetrics... sleeping, retrying; " + i + " of " + count + " times");
Threads.sleep(1000);
}
return null;
}
/**
* Read the attributes from Hadoop->HBase->RegionServer->MetaTableMetrics in JMX
* @throws IOException when fails to retrieve jmx metrics.
*/
// this method comes from this class: TestStochasticBalancerJmxMetrics with minor modifications.
private Set<String> readJmxMetrics() throws IOException {
JMXConnector connector = null;
ObjectName target = null;
MBeanServerConnection mb = null;
try {
connector =
JMXConnectorFactory.connect(JMXListener.buildJMXServiceURL(connectorPort, connectorPort));
mb = connector.getMBeanServerConnection();
@SuppressWarnings("JdkObsolete")
Hashtable<String, String> pairs = new Hashtable<>();
pairs.put("service", "HBase");
pairs.put("name", "RegionServer");
pairs.put("sub",
"Coprocessor.Region.CP_org.apache.hadoop.hbase.coprocessor"
+ ".MetaTableMetrics");
target = new ObjectName("Hadoop", pairs);
MBeanInfo beanInfo = mb.getMBeanInfo(target);
Set<String> existingAttrs = new HashSet<>();
for (MBeanAttributeInfo attrInfo : beanInfo.getAttributes()) {
existingAttrs.add(attrInfo.getName());
}
return existingAttrs;
} catch (Exception e) {
LOG.warn("Failed to get bean." + target, e);
if (mb != null) {
Set<ObjectInstance> instances = mb.queryMBeans(null, null);
Iterator<ObjectInstance> iterator = instances.iterator();
LOG.warn("MBean Found:");
while (iterator.hasNext()) {
ObjectInstance instance = iterator.next();
LOG.warn("Class Name: " + instance.getClassName());
LOG.warn("Object Name: " + instance.getObjectName());
}
}
} finally {
if (connector != null) {
try {
connector.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
return null;
}
// verifies meta table metrics exist from jmx
// for one table, there should be 5 MetaTable_table_<TableName> metrics.
// such as:
// [Time-limited test] example.TestMetaTableMetrics(204): ==
// MetaTable_table_TestExampleMetaTableMetricsOne_request_count
// [Time-limited test] example.TestMetaTableMetrics(204): ==
// MetaTable_table_TestExampleMetaTableMetricsOne_request_mean_rate
// [Time-limited test] example.TestMetaTableMetrics(204): ==
// MetaTable_table_TestExampleMetaTableMetricsOne_request_1min_rate
// [Time-limited test] example.TestMetaTableMetrics(204): ==
// MetaTable_table_TestExampleMetaTableMetricsOne_request_5min_rate
// [Time-limited test] example.TestMetaTableMetrics(204): ==
// MetaTable_table_TestExampleMetaTableMetricsOne_request_15min_rate
@Test @Test
public void test() throws IOException, InterruptedException { public void testMetaTableMetricsInJmx() throws Exception {
try (Table t = UTIL.getConnection().getTable(NAME1)) { UTIL.getAdmin()
writeData(t); .createTable(TableDescriptorBuilder.newBuilder(NAME1).setColumnFamily(CFD).build());
// Flush the data writeData(NAME1);
UTIL.flush(NAME1); UTIL.deleteTable(NAME1);
// Issue a compaction
UTIL.compact(NAME1, true);
Thread.sleep(2000);
}
Set<String> jmxMetrics = readJmxMetricsWithRetry();
assertNotNull(jmxMetrics);
long name1TableMetricsCount =
jmxMetrics.stream().filter(metric -> metric.contains("MetaTable_table_" + NAME1)).count();
assertEquals(5L, name1TableMetricsCount);
String putWithClientMetricNameRegex = "MetaTable_client_.+_put_request.*"; UTIL.waitFor(30000, 2000, true, () -> {
long putWithClientMetricsCount = Map<String, Double> jmxMetrics = readMetaTableJmxMetrics();
jmxMetrics.stream().filter(metric -> metric.matches(putWithClientMetricNameRegex)) boolean allMetricsFound = AllOf.allOf(
.count(); containsPositiveJmxAttributesFor("MetaTable_get_request"),
assertEquals(5L, putWithClientMetricsCount); containsPositiveJmxAttributesFor("MetaTable_put_request"),
containsPositiveJmxAttributesFor("MetaTable_delete_request"),
containsPositiveJmxAttributesFor("MetaTable_region_.+_lossy_request"),
containsPositiveJmxAttributesFor("MetaTable_table_" + NAME1 + "_request"),
containsPositiveJmxAttributesFor("MetaTable_client_.+_put_request"),
containsPositiveJmxAttributesFor("MetaTable_client_.+_get_request"),
containsPositiveJmxAttributesFor("MetaTable_client_.+_delete_request"),
containsPositiveJmxAttributesFor("MetaTable_client_.+_lossy_request")
).matches(jmxMetrics);
if (allMetricsFound) {
LOG.info("all the meta table metrics found with positive values: {}", jmxMetrics);
} else {
LOG.warn("couldn't find all the meta table metrics with positive values: {}", jmxMetrics);
}
return allMetricsFound;
});
} }
@Test @Test
@ -244,16 +171,102 @@ public class TestMetaTableMetrics {
assertTrue(numRowsInTableAfter >= numRowsInTableBefore + numRows); assertTrue(numRowsInTableAfter >= numRowsInTableBefore + numRows);
getData(numRows); getData(numRows);
} catch (InterruptedException e) { } catch (InterruptedException e) {
LOG.info("Caught InterruptedException while testConcurrentAccess: " + e.getMessage()); LOG.info("Caught InterruptedException while testConcurrentAccess: {}", e.getMessage());
fail(); fail();
} catch (IOException e) { } catch (IOException e) {
LOG.info("Caught IOException while testConcurrentAccess: " + e.getMessage()); LOG.info("Caught IOException while testConcurrentAccess: {}", e.getMessage());
fail(); fail();
} }
} }
public void putData(int nrows) throws InterruptedException { private void writeData(TableName tableName) throws IOException {
LOG.info(String.format("Putting %d rows in hbase:meta", nrows)); try (Table t = UTIL.getConnection().getTable(tableName)) {
List<Put> puts = new ArrayList<>(NUM_ROWS);
for (int i = 0; i < NUM_ROWS; i++) {
Put p = new Put(Bytes.toBytes(i + 1));
p.addColumn(FAMILY, QUALIFIER, Bytes.toBytes(value));
puts.add(p);
}
t.put(puts);
}
}
private Matcher<Map<String, Double>> containsPositiveJmxAttributesFor(final String regexp) {
return new CustomTypeSafeMatcher<Map<String, Double>>(
"failed to find all the 5 positive JMX attributes for: " + regexp) {
@Override
protected boolean matchesSafely(final Map<String, Double> values) {
for (String key : values.keySet()) {
for (String metricsNamePostfix : METRICS_ATTRIBUTE_NAME_POSTFIXES) {
if (key.matches(regexp + metricsNamePostfix) && values.get(key) > 0) {
return true;
}
}
}
return false;
}
};
}
/**
* Read the attributes from Hadoop->HBase->RegionServer->MetaTableMetrics in JMX
* @throws IOException when fails to retrieve jmx metrics.
*/
private Map<String, Double> readMetaTableJmxMetrics() throws IOException {
JMXConnector connector = null;
ObjectName target = null;
MBeanServerConnection mb = null;
try {
connector =
JMXConnectorFactory.connect(JMXListener.buildJMXServiceURL(connectorPort, connectorPort));
mb = connector.getMBeanServerConnection();
@SuppressWarnings("JdkObsolete")
Hashtable<String, String> pairs = new Hashtable<>();
pairs.put("service", "HBase");
pairs.put("name", "RegionServer");
pairs.put("sub",
"Coprocessor.Region.CP_org.apache.hadoop.hbase.coprocessor.MetaTableMetrics");
target = new ObjectName("Hadoop", pairs);
MBeanInfo beanInfo = mb.getMBeanInfo(target);
Map<String, Double> existingAttrs = new HashMap<>();
for (MBeanAttributeInfo attrInfo : beanInfo.getAttributes()) {
Object value = mb.getAttribute(target, attrInfo.getName());
if (attrInfo.getName().startsWith(METRICS_ATTRIBUTE_NAME_PREFIX)
&& value instanceof Number) {
existingAttrs.put(attrInfo.getName(), Double.parseDouble(value.toString()));
}
}
LOG.info("MBean Found: {}", target);
return existingAttrs;
} catch (Exception e) {
LOG.warn("Failed to get Meta Table Metrics bean (will retry later): {}", target, e);
if (mb != null) {
Set<ObjectInstance> instances = mb.queryMBeans(null, null);
Iterator<ObjectInstance> iterator = instances.iterator();
LOG.debug("All the MBeans we found:");
while (iterator.hasNext()) {
ObjectInstance instance = iterator.next();
LOG.debug("Class and object name: {} [{}]", instance.getClassName(),
instance.getObjectName());
}
}
} finally {
if (connector != null) {
try {
connector.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
return Collections.emptyMap();
}
private void putData(int nrows) throws InterruptedException {
LOG.info("Putting {} rows in hbase:meta", nrows);
Thread[] threads = new Thread[nthreads]; Thread[] threads = new Thread[nthreads];
for (int i = 1; i <= nthreads; i++) { for (int i = 1; i <= nthreads; i++) {
threads[i - 1] = new PutThread(1, nrows); threads[i - 1] = new PutThread(1, nrows);
@ -261,8 +274,8 @@ public class TestMetaTableMetrics {
startThreadsAndWaitToJoin(threads); startThreadsAndWaitToJoin(threads);
} }
public void getData(int nrows) throws InterruptedException { private void getData(int nrows) throws InterruptedException {
LOG.info(String.format("Getting %d rows from hbase:meta", nrows)); LOG.info("Getting {} rows from hbase:meta", nrows);
Thread[] threads = new Thread[nthreads]; Thread[] threads = new Thread[nthreads];
for (int i = 1; i <= nthreads; i++) { for (int i = 1; i <= nthreads; i++) {
threads[i - 1] = new GetThread(1, nrows); threads[i - 1] = new GetThread(1, nrows);
@ -279,11 +292,11 @@ public class TestMetaTableMetrics {
} }
} }
class PutThread extends Thread { private class PutThread extends Thread {
int start; int start;
int end; int end;
public PutThread(int start, int end) { PutThread(int start, int end) {
this.start = start; this.start = start;
this.end = end; this.end = end;
} }
@ -297,16 +310,16 @@ public class TestMetaTableMetrics {
table.put(p); table.put(p);
} }
} catch (IOException e) { } catch (IOException e) {
LOG.info("Caught IOException while PutThread operation: " + e.getMessage()); LOG.warn("Caught IOException while PutThread operation", e);
} }
} }
} }
class GetThread extends Thread { private class GetThread extends Thread {
int start; int start;
int end; int end;
public GetThread(int start, int end) { GetThread(int start, int end) {
this.start = start; this.start = start;
this.end = end; this.end = end;
} }
@ -319,7 +332,7 @@ public class TestMetaTableMetrics {
table.get(get); table.get(get);
} }
} catch (IOException e) { } catch (IOException e) {
LOG.info("Caught IOException while GetThread operation: " + e.getMessage()); LOG.warn("Caught IOException while GetThread operation", e);
} }
} }
} }