HBASE-17930 Avoid using Canary.sniff in HBaseTestingUtility

This commit is contained in:
zhangduo 2017-04-17 17:26:23 +08:00
parent b35121d904
commit 75d1e0361a
2 changed files with 16 additions and 50 deletions

View File

@ -69,6 +69,7 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -78,7 +79,6 @@ import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.Scan.ReadType;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.tool.Canary.RegionTask.TaskType;
import org.apache.hadoop.hbase.util.Bytes;
@ -110,6 +110,7 @@ import org.apache.zookeeper.data.Stat;
* 3. zookeeper mode - for each zookeeper instance, selects a zNode and
* outputs some information about failure or latency.
*/
@InterfaceAudience.Private
public final class Canary implements Tool {
// Sink interface used by the canary to outputs information
public interface Sink {
@ -1106,49 +1107,6 @@ public final class Canary implements Tool {
}
}
/**
* Canary entry point for specified table.
* @throws Exception
*/
public static void sniff(final Admin admin, TableName tableName, boolean rawScanEnabled)
throws Exception {
sniff(admin, tableName, TaskType.READ, rawScanEnabled);
}
/**
* Canary entry point for specified table.
* Keeping this method backward compatibility
* @throws Exception
*/
public static void sniff(final Admin admin, TableName tableName)
throws Exception {
sniff(admin, tableName, TaskType.READ, false);
}
/**
* Canary entry point for specified table with task type(read/write)
* @throws Exception
*/
public static void sniff(final Admin admin, TableName tableName, TaskType taskType,
boolean rawScanEnabled) throws Exception {
List<Future<Void>> taskFutures =
Canary.sniff(admin, new StdOutSink(), tableName.getNameAsString(),
new ScheduledThreadPoolExecutor(1), taskType, rawScanEnabled);
for (Future<Void> future : taskFutures) {
future.get();
}
}
/**
* Canary entry point for specified table with task type(read/write)
* Keeping this method backward compatible
* @throws Exception
*/
public static void sniff(final Admin admin, TableName tableName, TaskType taskType)
throws Exception {
Canary.sniff(admin, tableName, taskType, false);
}
/**
* Canary entry point for specified table.
* @throws Exception

View File

@ -70,7 +70,6 @@ import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
@ -106,7 +105,6 @@ import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.visibility.VisibilityLabelsCache;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.tool.Canary;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
@ -4045,10 +4043,20 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
public boolean evaluate() throws IOException {
boolean tableAvailable = getAdmin().isTableAvailable(tableName);
if (tableAvailable) {
try {
Canary.sniff(getAdmin(), tableName);
} catch (Exception e) {
throw new IOException("Canary sniff failed for table " + tableName, e);
try (Table table = getConnection().getTable(tableName)) {
HTableDescriptor htd = table.getTableDescriptor();
for (HRegionLocation loc : getConnection().getRegionLocator(tableName)
.getAllRegionLocations()) {
Scan scan = new Scan().withStartRow(loc.getRegionInfo().getStartKey())
.withStopRow(loc.getRegionInfo().getEndKey()).setOneRowLimit()
.setMaxResultsPerColumnFamily(1).setCacheBlocks(false);
for (byte[] family : htd.getFamiliesKeys()) {
scan.addFamily(family);
}
try (ResultScanner scanner = table.getScanner(scan)) {
scanner.next();
}
}
}
}
return tableAvailable;