HBASE-8564 TestMetricsRegionServer depends on test order

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1483617 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
eclark 2013-05-17 00:28:50 +00:00
parent 8806f23ee4
commit b3c97c3e3e
10 changed files with 176 additions and 35 deletions

View File

@ -33,8 +33,8 @@ import java.util.ServiceLoader;
public class CompatibilitySingletonFactory extends CompatibilityFactory { public class CompatibilitySingletonFactory extends CompatibilityFactory {
public static enum SingletonStorage { public static enum SingletonStorage {
INSTANCE; INSTANCE;
Object lock = new Object(); private final Object lock = new Object();
private static final Map<Class, Object> instances = new HashMap<Class, Object>(); private final Map<Class, Object> instances = new HashMap<Class, Object>();
} }
private static final Log LOG = LogFactory.getLog(CompatibilitySingletonFactory.class); private static final Log LOG = LogFactory.getLog(CompatibilitySingletonFactory.class);

View File

@ -0,0 +1,23 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
public interface RandomStringGenerator {
String getRandString();
}

View File

@ -0,0 +1,36 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.util.UUID;
public class RandomStringGeneratorImpl implements RandomStringGenerator {
private final String s;
public RandomStringGeneratorImpl() {
s = UUID.randomUUID().toString();
}
@Override
public String getRandString() {
return s;
}
}

View File

@ -0,0 +1,84 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
public class TestCompatibilitySingletonFactory {
private static final int ITERATIONS = 100000;
private static final Random RANDOM = new Random();
private class TestCompatibilitySingletonFactoryCallable implements Callable<String> {
@Override
public String call() throws Exception {
Thread.sleep(RANDOM.nextInt(10));
RandomStringGenerator
instance =
CompatibilitySingletonFactory.getInstance(RandomStringGenerator.class);
return instance.getRandString();
}
}
@Test
public void testGetInstance() throws Exception {
List<TestCompatibilitySingletonFactoryCallable> callables =
new ArrayList<TestCompatibilitySingletonFactoryCallable>(ITERATIONS);
List<String> resultStrings = new ArrayList<String>(ITERATIONS);
// Create the callables.
for (int i = 0; i < ITERATIONS; i++) {
callables.add(new TestCompatibilitySingletonFactoryCallable());
}
// Now run the callables.
ExecutorService executorService = Executors.newFixedThreadPool(100);
List<Future<String>> futures = executorService.invokeAll(callables);
// Wait for them all to finish.
for (Future<String> f : futures) {
resultStrings.add(f.get());
}
// Get the first string.
String firstString = resultStrings.get(0);
// Assert that all the strings are equal to the fist.
for (String s : resultStrings) {
assertEquals(firstString, s);
}
// an assert to make sure that RandomStringGeneratorImpl is generating random strings.
assertNotEquals(new RandomStringGeneratorImpl().getRandString(), firstString);
}
}

View File

@ -0,0 +1 @@
org.apache.hadoop.hbase.RandomStringGeneratorImpl

View File

@ -19,14 +19,12 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
/** /**
* Factory to create MetricsRegionServerSource when given a MetricsRegionServerWrapper * Factory to create MetricsRegionServerSource when given a MetricsRegionServerWrapper.
*/ */
public class MetricsRegionServerSourceFactoryImpl implements MetricsRegionServerSourceFactory { public class MetricsRegionServerSourceFactoryImpl implements MetricsRegionServerSourceFactory {
public static enum FactoryStorage { public static enum FactoryStorage {
INSTANCE; INSTANCE;
private Object aggLock = new Object(); private Object aggLock = new Object();
private Object serverLock = new Object();
private MetricsRegionServerSource serverSource;
private MetricsRegionAggregateSourceImpl aggImpl; private MetricsRegionAggregateSourceImpl aggImpl;
} }
@ -41,14 +39,8 @@ public class MetricsRegionServerSourceFactoryImpl implements MetricsRegionServer
@Override @Override
public synchronized MetricsRegionServerSource createServer(MetricsRegionServerWrapper regionServerWrapper) { public MetricsRegionServerSource createServer(MetricsRegionServerWrapper regionServerWrapper) {
synchronized (FactoryStorage.INSTANCE.serverLock) { return new MetricsRegionServerSourceImpl(regionServerWrapper);
if (FactoryStorage.INSTANCE.serverSource == null) {
FactoryStorage.INSTANCE.serverSource = new MetricsRegionServerSourceImpl(
regionServerWrapper);
}
return FactoryStorage.INSTANCE.serverSource;
}
} }
@Override @Override

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
@ -30,9 +28,8 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
public class MetricsRegionAggregateSourceImpl extends BaseSourceImpl public class MetricsRegionAggregateSourceImpl extends BaseSourceImpl
implements MetricsRegionAggregateSource { implements MetricsRegionAggregateSource {
private final Log LOG = LogFactory.getLog(this.getClass());
// lock to guard against concurrent access to regionSources // lock to guard against concurrent access to regionSources
final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
private final TreeSet<MetricsRegionSourceImpl> regionSources = private final TreeSet<MetricsRegionSourceImpl> regionSources =
new TreeSet<MetricsRegionSourceImpl>(); new TreeSet<MetricsRegionSourceImpl>();

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CompatibilityFactory; import org.apache.hadoop.hbase.CompatibilityFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
/** /**
@ -29,10 +30,10 @@ import org.apache.hadoop.hbase.CompatibilityFactory;
@InterfaceAudience.Private @InterfaceAudience.Private
public class MetricsRegion { public class MetricsRegion {
private MetricsRegionSource source; private final MetricsRegionSource source;
public MetricsRegion(MetricsRegionWrapper wrapper) { public MetricsRegion(final MetricsRegionWrapper wrapper) {
source = CompatibilityFactory.getInstance(MetricsRegionServerSourceFactory.class) source = CompatibilitySingletonFactory.getInstance(MetricsRegionServerSourceFactory.class)
.createRegion(wrapper); .createRegion(wrapper);
} }
@ -48,11 +49,11 @@ public class MetricsRegion {
source.updateDelete(); source.updateDelete();
} }
public void updateGet(long getSize) { public void updateGet(final long getSize) {
source.updateGet(getSize); source.updateGet(getSize);
} }
public void updateScanNext(long scanSize) { public void updateScanNext(final long scanSize) {
source.updateScan(scanSize); source.updateScan(scanSize);
} }

View File

@ -17,8 +17,6 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
@ -26,22 +24,27 @@ import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
/** /**
* This class is for maintaining the various regionserver statistics * This class is for maintaining the various regionserver statistics
* and publishing them through the metrics interfaces. * and publishing them through the metrics interfaces.
* <p> * <p/>
* This class has a number of metrics variables that are publicly accessible; * This class has a number of metrics variables that are publicly accessible;
* these variables (objects) have methods to update their values. * these variables (objects) have methods to update their values.
*/ */
@InterfaceStability.Evolving @InterfaceStability.Evolving
@InterfaceAudience.Private @InterfaceAudience.Private
public class MetricsRegionServer { public class MetricsRegionServer {
private final Log LOG = LogFactory.getLog(this.getClass());
private MetricsRegionServerSource serverSource; private MetricsRegionServerSource serverSource;
private MetricsRegionServerWrapper regionServerWrapper; private MetricsRegionServerWrapper regionServerWrapper;
public MetricsRegionServer(MetricsRegionServerWrapper regionServerWrapper) { public MetricsRegionServer(MetricsRegionServerWrapper regionServerWrapper) {
this(regionServerWrapper,
CompatibilitySingletonFactory.getInstance(MetricsRegionServerSourceFactory.class)
.createServer(regionServerWrapper));
}
MetricsRegionServer(MetricsRegionServerWrapper regionServerWrapper,
MetricsRegionServerSource serverSource) {
this.regionServerWrapper = regionServerWrapper; this.regionServerWrapper = regionServerWrapper;
serverSource = this.serverSource = serverSource;
CompatibilitySingletonFactory.getInstance(MetricsRegionServerSourceFactory.class)
.createServer(regionServerWrapper);
} }
// for unit-test usage // for unit-test usage
@ -53,35 +56,35 @@ public class MetricsRegionServer {
return regionServerWrapper; return regionServerWrapper;
} }
public void updatePut(long t){ public void updatePut(long t) {
if (t > 1000) { if (t > 1000) {
serverSource.incrSlowPut(); serverSource.incrSlowPut();
} }
serverSource.updatePut(t); serverSource.updatePut(t);
} }
public void updateDelete(long t){ public void updateDelete(long t) {
if (t > 1000) { if (t > 1000) {
serverSource.incrSlowDelete(); serverSource.incrSlowDelete();
} }
serverSource.updateDelete(t); serverSource.updateDelete(t);
} }
public void updateGet(long t){ public void updateGet(long t) {
if (t > 1000) { if (t > 1000) {
serverSource.incrSlowGet(); serverSource.incrSlowGet();
} }
serverSource.updateGet(t); serverSource.updateGet(t);
} }
public void updateIncrement(long t){ public void updateIncrement(long t) {
if (t > 1000) { if (t > 1000) {
serverSource.incrSlowIncrement(); serverSource.incrSlowIncrement();
} }
serverSource.updateIncrement(t); serverSource.updateIncrement(t);
} }
public void updateAppend(long t){ public void updateAppend(long t) {
if (t > 1000) { if (t > 1000) {
serverSource.incrSlowAppend(); serverSource.incrSlowAppend();
} }

View File

@ -38,7 +38,11 @@ public class TestMetricsRegionServer {
@Test @Test
public void testWrapperSource() { public void testWrapperSource() {
MetricsRegionServer rsm = new MetricsRegionServer(new MetricsRegionServerWrapperStub()); MetricsRegionServerWrapperStub wrapper = new MetricsRegionServerWrapperStub();
MetricsRegionServerSource source =
CompatibilityFactory.getInstance(MetricsRegionServerSourceFactory.class)
.createServer(wrapper);
MetricsRegionServer rsm = new MetricsRegionServer(wrapper, source);
MetricsRegionServerSource serverSource = rsm.getMetricsSource(); MetricsRegionServerSource serverSource = rsm.getMetricsSource();
HELPER.assertTag("serverName", "test", serverSource); HELPER.assertTag("serverName", "test", serverSource);
HELPER.assertTag("clusterId", "tClusterId", serverSource); HELPER.assertTag("clusterId", "tClusterId", serverSource);