HBASE-19000 Group multiple block cache clear requests per server

Signed-off-by: tedyu <yuzhihong@gmail.com>
This commit is contained in:
Guangxu Cheng 2017-12-12 22:53:11 +08:00 committed by tedyu
parent a489292a83
commit 015b66103a
7 changed files with 114 additions and 41 deletions

View File

@ -18,6 +18,11 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public @InterfaceAudience.Public
@ -25,10 +30,12 @@ public final class CacheEvictionStats {
private final long evictedBlocks; private final long evictedBlocks;
private final long maxCacheSize; private final long maxCacheSize;
private final Map<byte[], Throwable> exceptions;
CacheEvictionStats(CacheEvictionStatsBuilder builder) { CacheEvictionStats(CacheEvictionStatsBuilder builder) {
this.evictedBlocks = builder.evictedBlocks; this.evictedBlocks = builder.evictedBlocks;
this.maxCacheSize = builder.maxCacheSize; this.maxCacheSize = builder.maxCacheSize;
this.exceptions = builder.exceptions;
} }
public long getEvictedBlocks() { public long getEvictedBlocks() {
@ -39,6 +46,21 @@ public final class CacheEvictionStats {
return maxCacheSize; return maxCacheSize;
} }
public Map<byte[], Throwable> getExceptions() {
return Collections.unmodifiableMap(exceptions);
}
public int getExceptionCount() {
return exceptions.size();
}
private String getFailedRegions() {
return exceptions.keySet().stream()
.map(regionName -> RegionInfo.prettyPrint(RegionInfo.encodeRegionName(regionName)))
.collect(Collectors.toList())
.toString();
}
@InterfaceAudience.Private @InterfaceAudience.Private
public static CacheEvictionStatsBuilder builder() { public static CacheEvictionStatsBuilder builder() {
return new CacheEvictionStatsBuilder(); return new CacheEvictionStatsBuilder();
@ -49,6 +71,8 @@ public final class CacheEvictionStats {
return "CacheEvictionStats{" + return "CacheEvictionStats{" +
"evictedBlocks=" + evictedBlocks + "evictedBlocks=" + evictedBlocks +
", maxCacheSize=" + maxCacheSize + ", maxCacheSize=" + maxCacheSize +
", failedRegionsSize=" + getExceptionCount() +
", failedRegions=" + getFailedRegions() +
'}'; '}';
} }
} }

View File

@ -18,12 +18,16 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import java.util.HashMap;
import java.util.Map;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
public final class CacheEvictionStatsBuilder { public final class CacheEvictionStatsBuilder {
long evictedBlocks = 0; long evictedBlocks = 0;
long maxCacheSize = 0; long maxCacheSize = 0;
Map<byte[], Throwable> exceptions = new HashMap<>();
CacheEvictionStatsBuilder() { CacheEvictionStatsBuilder() {
} }
@ -38,9 +42,14 @@ public final class CacheEvictionStatsBuilder {
return this; return this;
} }
public void addException(byte[] regionName, Throwable ie){
exceptions.put(regionName, ie);
}
public CacheEvictionStatsBuilder append(CacheEvictionStats stats) { public CacheEvictionStatsBuilder append(CacheEvictionStats stats) {
this.evictedBlocks += stats.getEvictedBlocks(); this.evictedBlocks += stats.getEvictedBlocks();
this.maxCacheSize += stats.getMaxCacheSize(); this.maxCacheSize += stats.getMaxCacheSize();
this.exceptions.putAll(stats.getExceptions());
return this; return this;
} }

View File

@ -111,6 +111,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
@ -1460,32 +1461,37 @@ public class HBaseAdmin implements Admin {
CacheEvictionStatsBuilder cacheEvictionStats = CacheEvictionStats.builder(); CacheEvictionStatsBuilder cacheEvictionStats = CacheEvictionStats.builder();
List<Pair<RegionInfo, ServerName>> pairs = List<Pair<RegionInfo, ServerName>> pairs =
MetaTableAccessor.getTableRegionsAndLocations(connection, tableName); MetaTableAccessor.getTableRegionsAndLocations(connection, tableName);
for (Pair<RegionInfo, ServerName> pair: pairs) { Map<ServerName, List<RegionInfo>> regionInfoByServerName =
if (pair.getFirst().isOffline() || pair.getSecond() == null) { pairs.stream()
continue; .filter(pair -> !(pair.getFirst().isOffline()))
} .filter(pair -> pair.getSecond() != null)
try { .collect(Collectors.groupingBy(pair -> pair.getSecond(),
cacheEvictionStats = cacheEvictionStats.append( Collectors.mapping(pair -> pair.getFirst(), Collectors.toList())));
clearBlockCache(pair.getSecond(), pair.getFirst()));
} catch (NotServingRegionException e) { for (Map.Entry<ServerName, List<RegionInfo>> entry : regionInfoByServerName.entrySet()) {
if (LOG.isDebugEnabled()) { CacheEvictionStats stats = clearBlockCache(entry.getKey(), entry.getValue());
LOG.debug("Failed to clear block cache for " + pair.getFirst() + " on " + cacheEvictionStats = cacheEvictionStats.append(stats);
pair.getSecond() + ": " + StringUtils.stringifyException(e)); if (stats.getExceptionCount() > 0) {
for (Map.Entry<byte[], Throwable> exception : stats.getExceptions().entrySet()) {
LOG.debug("Failed to clear block cache for "
+ Bytes.toStringBinary(exception.getKey())
+ " on " + entry.getKey() + ": ", exception.getValue());
} }
} }
} }
return cacheEvictionStats.build(); return cacheEvictionStats.build();
} }
private CacheEvictionStats clearBlockCache(final ServerName sn, final RegionInfo hri) private CacheEvictionStats clearBlockCache(final ServerName sn, final List<RegionInfo> hris)
throws IOException { throws IOException {
HBaseRpcController controller = rpcControllerFactory.newController(); HBaseRpcController controller = rpcControllerFactory.newController();
AdminService.BlockingInterface admin = this.connection.getAdmin(sn); AdminService.BlockingInterface admin = this.connection.getAdmin(sn);
ClearRegionBlockCacheRequest request = ClearRegionBlockCacheRequest request =
RequestConverter.buildClearRegionBlockCacheRequest(hri.getRegionName()); RequestConverter.buildClearRegionBlockCacheRequest(hris);
ClearRegionBlockCacheResponse response;
try { try {
return ProtobufUtil.toCacheEvictionStats( response = admin.clearRegionBlockCache(controller, request);
admin.clearRegionBlockCache(controller, request).getStats()); return ProtobufUtil.toCacheEvictionStats(response.getStats());
} catch (ServiceException se) { } catch (ServiceException se) {
throw ProtobufUtil.getRemoteException(se); throw ProtobufUtil.getRemoteException(se);
} }

View File

@ -42,6 +42,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.CacheEvictionStats; import org.apache.hadoop.hbase.CacheEvictionStats;
import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
@ -152,6 +153,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamil
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionExceptionMessage;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
@ -3415,15 +3417,35 @@ public final class ProtobufUtil {
.collect(Collectors.toList()); .collect(Collectors.toList());
} }
public static CacheEvictionStats toCacheEvictionStats(HBaseProtos.CacheEvictionStats cacheEvictionStats) { public static CacheEvictionStats toCacheEvictionStats(
return CacheEvictionStats.builder() HBaseProtos.CacheEvictionStats stats) throws IOException{
.withEvictedBlocks(cacheEvictionStats.getEvictedBlocks()) CacheEvictionStatsBuilder builder = CacheEvictionStats.builder();
.withMaxCacheSize(cacheEvictionStats.getMaxCacheSize()) builder.withEvictedBlocks(stats.getEvictedBlocks())
.build(); .withMaxCacheSize(stats.getMaxCacheSize());
if (stats.getExceptionCount() > 0) {
for (HBaseProtos.RegionExceptionMessage exception : stats.getExceptionList()) {
HBaseProtos.RegionSpecifier rs = exception.getRegion();
byte[] regionName = rs.getValue().toByteArray();
builder.addException(regionName, ProtobufUtil.toException(exception.getException()));
}
}
return builder.build();
} }
public static HBaseProtos.CacheEvictionStats toCacheEvictionStats(CacheEvictionStats cacheEvictionStats) { public static HBaseProtos.CacheEvictionStats toCacheEvictionStats(
return HBaseProtos.CacheEvictionStats.newBuilder() CacheEvictionStats cacheEvictionStats) {
HBaseProtos.CacheEvictionStats.Builder builder
= HBaseProtos.CacheEvictionStats.newBuilder();
for (Map.Entry<byte[], Throwable> entry : cacheEvictionStats.getExceptions().entrySet()) {
builder.addException(
RegionExceptionMessage.newBuilder()
.setRegion(RequestConverter.buildRegionSpecifier(
RegionSpecifierType.REGION_NAME, entry.getKey()))
.setException(ResponseConverter.buildException(entry.getValue()))
.build()
);
}
return builder
.setEvictedBlocks(cacheEvictionStats.getEvictedBlocks()) .setEvictedBlocks(cacheEvictionStats.getEvictedBlocks())
.setMaxCacheSize(cacheEvictionStats.getMaxCacheSize()) .setMaxCacheSize(cacheEvictionStats.getMaxCacheSize())
.build(); .build();

View File

@ -53,7 +53,6 @@ import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
@ -1507,10 +1506,14 @@ public final class RequestConverter {
* *
* @return a ClearRegionBlockCacheRequest * @return a ClearRegionBlockCacheRequest
*/ */
public static ClearRegionBlockCacheRequest buildClearRegionBlockCacheRequest(final byte[] public static ClearRegionBlockCacheRequest
regionName) { buildClearRegionBlockCacheRequest(List<RegionInfo> hris) {
RegionSpecifier region = buildRegionSpecifier(RegionSpecifierType.REGION_NAME, regionName); ClearRegionBlockCacheRequest.Builder builder = ClearRegionBlockCacheRequest.newBuilder();
return ClearRegionBlockCacheRequest.newBuilder().addAllRegion(Lists.newArrayList(region)).build(); hris.forEach(
hri -> builder.addRegion(
buildRegionSpecifier(RegionSpecifierType.REGION_NAME, hri.getRegionName())
));
return builder.build();
} }
/** /**

View File

@ -242,8 +242,14 @@ message RegionServerInfo {
optional VersionInfo version_info = 2; optional VersionInfo version_info = 2;
} }
message RegionExceptionMessage {
required RegionSpecifier region = 1;
required NameBytesPair exception = 2;
}
message CacheEvictionStats { message CacheEvictionStats {
optional int64 evicted_blocks = 1; optional int64 evicted_blocks = 1;
optional int64 bytes_evicted = 2; optional int64 bytes_evicted = 2;
optional int64 max_cache_size = 3; optional int64 max_cache_size = 3;
repeated RegionExceptionMessage exception = 4;
} }

View File

@ -60,9 +60,9 @@ import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.MultiActionResultTooLarge; import org.apache.hadoop.hbase.MultiActionResultTooLarge;
import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.UnknownScannerException;
@ -1354,11 +1354,15 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
* @throws IOException if any of the specifiers is not null, * @throws IOException if any of the specifiers is not null,
* but failed to find the region * but failed to find the region
*/ */
private List<HRegion> getRegions( private List<HRegion> getRegions(final List<RegionSpecifier> regionSpecifiers,
final List<RegionSpecifier> regionSpecifiers) throws IOException { final CacheEvictionStatsBuilder stats) {
List<HRegion> regions = Lists.newArrayListWithCapacity(regionSpecifiers.size()); List<HRegion> regions = Lists.newArrayListWithCapacity(regionSpecifiers.size());
for (RegionSpecifier regionSpecifier: regionSpecifiers) { for (RegionSpecifier regionSpecifier: regionSpecifiers) {
try {
regions.add(regionServer.getRegion(regionSpecifier.getValue().toByteArray())); regions.add(regionServer.getRegion(regionSpecifier.getValue().toByteArray()));
} catch (NotServingRegionException e) {
stats.addException(regionSpecifier.getValue().toByteArray(), e);
}
} }
return regions; return regions;
} }
@ -3436,19 +3440,18 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
@Override @Override
public ClearRegionBlockCacheResponse clearRegionBlockCache(RpcController controller, public ClearRegionBlockCacheResponse clearRegionBlockCache(RpcController controller,
ClearRegionBlockCacheRequest request) ClearRegionBlockCacheRequest request) {
throws ServiceException { ClearRegionBlockCacheResponse.Builder builder =
ClearRegionBlockCacheResponse.newBuilder();
CacheEvictionStatsBuilder stats = CacheEvictionStats.builder(); CacheEvictionStatsBuilder stats = CacheEvictionStats.builder();
try { List<HRegion> regions = getRegions(request.getRegionList(), stats);
List<HRegion> regions = getRegions(request.getRegionList());
for (HRegion region : regions) { for (HRegion region : regions) {
try {
stats = stats.append(this.regionServer.clearRegionBlockCache(region)); stats = stats.append(this.regionServer.clearRegionBlockCache(region));
}
} catch (Exception e) { } catch (Exception e) {
throw new ServiceException(e); stats.addException(region.getRegionInfo().getRegionName(), e);
} }
return ClearRegionBlockCacheResponse.newBuilder() }
.setStats(ProtobufUtil.toCacheEvictionStats(stats.build())) return builder.setStats(ProtobufUtil.toCacheEvictionStats(stats.build())).build();
.build();
} }
} }