HBASE-19000 Group multiple block cache clear requests per server

Signed-off-by: tedyu <yuzhihong@gmail.com>
This commit is contained in:
Guangxu Cheng 2017-12-12 22:53:11 +08:00 committed by tedyu
parent 7f586995a8
commit 86043ef629
7 changed files with 114 additions and 41 deletions

View File

@ -18,6 +18,11 @@
*/
package org.apache.hadoop.hbase;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public
@ -25,10 +30,12 @@ public final class CacheEvictionStats {
private final long evictedBlocks;
private final long maxCacheSize;
private final Map<byte[], Throwable> exceptions;
CacheEvictionStats(CacheEvictionStatsBuilder builder) {
this.evictedBlocks = builder.evictedBlocks;
this.maxCacheSize = builder.maxCacheSize;
this.exceptions = builder.exceptions;
}
public long getEvictedBlocks() {
@ -39,6 +46,21 @@ public final class CacheEvictionStats {
return maxCacheSize;
}
public Map<byte[], Throwable> getExceptions() {
return Collections.unmodifiableMap(exceptions);
}
public int getExceptionCount() {
return exceptions.size();
}
private String getFailedRegions() {
return exceptions.keySet().stream()
.map(regionName -> RegionInfo.prettyPrint(RegionInfo.encodeRegionName(regionName)))
.collect(Collectors.toList())
.toString();
}
@InterfaceAudience.Private
public static CacheEvictionStatsBuilder builder() {
return new CacheEvictionStatsBuilder();
@ -49,6 +71,8 @@ public final class CacheEvictionStats {
return "CacheEvictionStats{" +
"evictedBlocks=" + evictedBlocks +
", maxCacheSize=" + maxCacheSize +
", failedRegionsSize=" + getExceptionCount() +
", failedRegions=" + getFailedRegions() +
'}';
}
}

View File

@ -18,12 +18,16 @@
*/
package org.apache.hadoop.hbase;
import java.util.HashMap;
import java.util.Map;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public final class CacheEvictionStatsBuilder {
long evictedBlocks = 0;
long maxCacheSize = 0;
Map<byte[], Throwable> exceptions = new HashMap<>();
CacheEvictionStatsBuilder() {
}
@ -38,9 +42,14 @@ public final class CacheEvictionStatsBuilder {
return this;
}
public void addException(byte[] regionName, Throwable ie){
exceptions.put(regionName, ie);
}
public CacheEvictionStatsBuilder append(CacheEvictionStats stats) {
this.evictedBlocks += stats.getEvictedBlocks();
this.maxCacheSize += stats.getMaxCacheSize();
this.exceptions.putAll(stats.getExceptions());
return this;
}

View File

@ -111,6 +111,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
@ -1460,32 +1461,37 @@ public class HBaseAdmin implements Admin {
CacheEvictionStatsBuilder cacheEvictionStats = CacheEvictionStats.builder();
List<Pair<RegionInfo, ServerName>> pairs =
MetaTableAccessor.getTableRegionsAndLocations(connection, tableName);
for (Pair<RegionInfo, ServerName> pair: pairs) {
if (pair.getFirst().isOffline() || pair.getSecond() == null) {
continue;
}
try {
cacheEvictionStats = cacheEvictionStats.append(
clearBlockCache(pair.getSecond(), pair.getFirst()));
} catch (NotServingRegionException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Failed to clear block cache for " + pair.getFirst() + " on " +
pair.getSecond() + ": " + StringUtils.stringifyException(e));
Map<ServerName, List<RegionInfo>> regionInfoByServerName =
pairs.stream()
.filter(pair -> !(pair.getFirst().isOffline()))
.filter(pair -> pair.getSecond() != null)
.collect(Collectors.groupingBy(pair -> pair.getSecond(),
Collectors.mapping(pair -> pair.getFirst(), Collectors.toList())));
for (Map.Entry<ServerName, List<RegionInfo>> entry : regionInfoByServerName.entrySet()) {
CacheEvictionStats stats = clearBlockCache(entry.getKey(), entry.getValue());
cacheEvictionStats = cacheEvictionStats.append(stats);
if (stats.getExceptionCount() > 0) {
for (Map.Entry<byte[], Throwable> exception : stats.getExceptions().entrySet()) {
LOG.debug("Failed to clear block cache for "
+ Bytes.toStringBinary(exception.getKey())
+ " on " + entry.getKey() + ": ", exception.getValue());
}
}
}
return cacheEvictionStats.build();
}
private CacheEvictionStats clearBlockCache(final ServerName sn, final RegionInfo hri)
private CacheEvictionStats clearBlockCache(final ServerName sn, final List<RegionInfo> hris)
throws IOException {
HBaseRpcController controller = rpcControllerFactory.newController();
AdminService.BlockingInterface admin = this.connection.getAdmin(sn);
ClearRegionBlockCacheRequest request =
RequestConverter.buildClearRegionBlockCacheRequest(hri.getRegionName());
RequestConverter.buildClearRegionBlockCacheRequest(hris);
ClearRegionBlockCacheResponse response;
try {
return ProtobufUtil.toCacheEvictionStats(
admin.clearRegionBlockCache(controller, request).getStats());
response = admin.clearRegionBlockCache(controller, request);
return ProtobufUtil.toCacheEvictionStats(response.getStats());
} catch (ServiceException se) {
throw ProtobufUtil.getRemoteException(se);
}

View File

@ -42,6 +42,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.CacheEvictionStats;
import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner;
@ -152,6 +153,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamil
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionExceptionMessage;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
@ -3415,15 +3417,35 @@ public final class ProtobufUtil {
.collect(Collectors.toList());
}
public static CacheEvictionStats toCacheEvictionStats(HBaseProtos.CacheEvictionStats cacheEvictionStats) {
return CacheEvictionStats.builder()
.withEvictedBlocks(cacheEvictionStats.getEvictedBlocks())
.withMaxCacheSize(cacheEvictionStats.getMaxCacheSize())
.build();
public static CacheEvictionStats toCacheEvictionStats(
HBaseProtos.CacheEvictionStats stats) throws IOException{
CacheEvictionStatsBuilder builder = CacheEvictionStats.builder();
builder.withEvictedBlocks(stats.getEvictedBlocks())
.withMaxCacheSize(stats.getMaxCacheSize());
if (stats.getExceptionCount() > 0) {
for (HBaseProtos.RegionExceptionMessage exception : stats.getExceptionList()) {
HBaseProtos.RegionSpecifier rs = exception.getRegion();
byte[] regionName = rs.getValue().toByteArray();
builder.addException(regionName, ProtobufUtil.toException(exception.getException()));
}
}
return builder.build();
}
public static HBaseProtos.CacheEvictionStats toCacheEvictionStats(CacheEvictionStats cacheEvictionStats) {
return HBaseProtos.CacheEvictionStats.newBuilder()
public static HBaseProtos.CacheEvictionStats toCacheEvictionStats(
CacheEvictionStats cacheEvictionStats) {
HBaseProtos.CacheEvictionStats.Builder builder
= HBaseProtos.CacheEvictionStats.newBuilder();
for (Map.Entry<byte[], Throwable> entry : cacheEvictionStats.getExceptions().entrySet()) {
builder.addException(
RegionExceptionMessage.newBuilder()
.setRegion(RequestConverter.buildRegionSpecifier(
RegionSpecifierType.REGION_NAME, entry.getKey()))
.setException(ResponseConverter.buildException(entry.getValue()))
.build()
);
}
return builder
.setEvictedBlocks(cacheEvictionStats.getEvictedBlocks())
.setMaxCacheSize(cacheEvictionStats.getMaxCacheSize())
.build();

View File

@ -53,7 +53,6 @@ import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
@ -1507,10 +1506,14 @@ public final class RequestConverter {
*
* @return a ClearRegionBlockCacheRequest
*/
public static ClearRegionBlockCacheRequest buildClearRegionBlockCacheRequest(final byte[]
regionName) {
RegionSpecifier region = buildRegionSpecifier(RegionSpecifierType.REGION_NAME, regionName);
return ClearRegionBlockCacheRequest.newBuilder().addAllRegion(Lists.newArrayList(region)).build();
public static ClearRegionBlockCacheRequest
buildClearRegionBlockCacheRequest(List<RegionInfo> hris) {
ClearRegionBlockCacheRequest.Builder builder = ClearRegionBlockCacheRequest.newBuilder();
hris.forEach(
hri -> builder.addRegion(
buildRegionSpecifier(RegionSpecifierType.REGION_NAME, hri.getRegionName())
));
return builder.build();
}
/**

View File

@ -242,8 +242,14 @@ message RegionServerInfo {
optional VersionInfo version_info = 2;
}
message RegionExceptionMessage {
required RegionSpecifier region = 1;
required NameBytesPair exception = 2;
}
message CacheEvictionStats {
optional int64 evicted_blocks = 1;
optional int64 bytes_evicted = 2;
optional int64 max_cache_size = 3;
repeated RegionExceptionMessage exception = 4;
}

View File

@ -60,9 +60,9 @@ import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.MultiActionResultTooLarge;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UnknownScannerException;
@ -1354,11 +1354,15 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
* @throws IOException if any of the specifiers is not null,
* but failed to find the region
*/
private List<HRegion> getRegions(
final List<RegionSpecifier> regionSpecifiers) throws IOException {
private List<HRegion> getRegions(final List<RegionSpecifier> regionSpecifiers,
final CacheEvictionStatsBuilder stats) {
List<HRegion> regions = Lists.newArrayListWithCapacity(regionSpecifiers.size());
for (RegionSpecifier regionSpecifier: regionSpecifiers) {
regions.add(regionServer.getRegion(regionSpecifier.getValue().toByteArray()));
try {
regions.add(regionServer.getRegion(regionSpecifier.getValue().toByteArray()));
} catch (NotServingRegionException e) {
stats.addException(regionSpecifier.getValue().toByteArray(), e);
}
}
return regions;
}
@ -3436,19 +3440,18 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
@Override
public ClearRegionBlockCacheResponse clearRegionBlockCache(RpcController controller,
ClearRegionBlockCacheRequest request)
throws ServiceException {
ClearRegionBlockCacheRequest request) {
ClearRegionBlockCacheResponse.Builder builder =
ClearRegionBlockCacheResponse.newBuilder();
CacheEvictionStatsBuilder stats = CacheEvictionStats.builder();
try {
List<HRegion> regions = getRegions(request.getRegionList());
for (HRegion region : regions) {
List<HRegion> regions = getRegions(request.getRegionList(), stats);
for (HRegion region : regions) {
try {
stats = stats.append(this.regionServer.clearRegionBlockCache(region));
} catch (Exception e) {
stats.addException(region.getRegionInfo().getRegionName(), e);
}
} catch (Exception e) {
throw new ServiceException(e);
}
return ClearRegionBlockCacheResponse.newBuilder()
.setStats(ProtobufUtil.toCacheEvictionStats(stats.build()))
.build();
return builder.setStats(ProtobufUtil.toCacheEvictionStats(stats.build())).build();
}
}