HBASE-7121 Fix TestHFileOutputFormat after moving RS to metrics2

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1407216 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
eclark 2012-11-08 19:09:33 +00:00
parent 0cf5cfec84
commit dbddf05a0a
2 changed files with 46 additions and 11 deletions

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.CacheStats; import org.apache.hadoop.hbase.io.hfile.CacheStats;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
@ -46,7 +47,8 @@ class MetricsRegionServerWrapperImpl
public static final int PERIOD = 15; public static final int PERIOD = 15;
private final HRegionServer regionServer; private final HRegionServer regionServer;
private final BlockCache blockCache;
private BlockCache blockCache;
private volatile long numStores = 0; private volatile long numStores = 0;
private volatile long numStoreFiles = 0; private volatile long numStoreFiles = 0;
@ -70,13 +72,29 @@ class MetricsRegionServerWrapperImpl
public MetricsRegionServerWrapperImpl(final HRegionServer regionServer) { public MetricsRegionServerWrapperImpl(final HRegionServer regionServer) {
this.regionServer = regionServer; this.regionServer = regionServer;
this.blockCache = this.regionServer.cacheConfig.getBlockCache(); initBlockCache();
this.cacheStats = blockCache.getStats();
this.executor = CompatibilitySingletonFactory.getInstance(MetricsExecutor.class).getExecutor(); this.executor = CompatibilitySingletonFactory.getInstance(MetricsExecutor.class).getExecutor();
this.runnable = new RegionServerMetricsWrapperRunnable(); this.runnable = new RegionServerMetricsWrapperRunnable();
this.executor.scheduleWithFixedDelay(this.runnable, PERIOD, PERIOD, TimeUnit.SECONDS); this.executor.scheduleWithFixedDelay(this.runnable, PERIOD, PERIOD, TimeUnit.SECONDS);
} }
/**
* It's possible that due to threading the block cache could not be initialized
* yet (testing multiple region servers in one jvm). So we need to try and initialize
* the blockCache and cacheStats reference multiple times until we succeed.
*/
private synchronized void initBlockCache() {
CacheConfig cacheConfig = this.regionServer.cacheConfig;
if (cacheConfig != null && this.blockCache == null) {
this.blockCache = cacheConfig.getBlockCache();
}
if (this.blockCache != null && this.cacheStats == null) {
this.cacheStats = blockCache.getStats();
}
}
@Override @Override
public String getClusterId() { public String getClusterId() {
return regionServer.getClusterId(); return regionServer.getClusterId();
@ -309,7 +327,7 @@ class MetricsRegionServerWrapperImpl
@Override @Override
synchronized public void run() { synchronized public void run() {
initBlockCache();
cacheStats = blockCache.getStats(); cacheStats = blockCache.getStats();
HDFSBlocksDistribution hdfsBlocksDistribution = HDFSBlocksDistribution hdfsBlocksDistribution =

View File

@ -19,8 +19,11 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.metrics2.MetricsExecutor; import org.apache.hadoop.metrics2.MetricsExecutor;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -44,17 +47,29 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper {
@Override @Override
public String getTableName() { public String getTableName() {
return this.region.getTableDesc().getNameAsString(); HTableDescriptor tableDesc = this.region.getTableDesc();
if (tableDesc == null) {
return "";
}
return tableDesc.getNameAsString();
} }
@Override @Override
public String getRegionName() { public String getRegionName() {
return this.region.getRegionInfo().getEncodedName(); HRegionInfo regionInfo = this.region.getRegionInfo();
if (regionInfo == null) {
return "";
}
return regionInfo.getEncodedName();
} }
@Override @Override
public long getNumStores() { public long getNumStores() {
return this.region.stores.size(); Map<byte[],Store> stores = this.region.stores;
if (stores == null) {
return 0;
}
return stores.size();
} }
@Override @Override
@ -90,11 +105,13 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper {
long tempMemstoreSize = 0; long tempMemstoreSize = 0;
long tempStoreFileSize = 0; long tempStoreFileSize = 0;
if (region.stores != null) {
for (Store store : region.stores.values()) { for (Store store : region.stores.values()) {
tempNumStoreFiles += store.getStorefilesCount(); tempNumStoreFiles += store.getStorefilesCount();
tempMemstoreSize += store.getMemStoreSize(); tempMemstoreSize += store.getMemStoreSize();
tempStoreFileSize += store.getStorefilesSize(); tempStoreFileSize += store.getStorefilesSize();
} }
}
numStoreFiles = tempNumStoreFiles; numStoreFiles = tempNumStoreFiles;
memstoreSize = tempMemstoreSize; memstoreSize = tempMemstoreSize;