HBASE-20180 Avoid Class::newInstance
This commit is contained in:
parent
ad425e8603
commit
5e2f8e4023
|
@ -83,6 +83,7 @@
|
|||
<compilerArgs>
|
||||
<arg>-XepDisableWarningsInGeneratedCode</arg>
|
||||
<arg>-Xep:FallThrough:OFF</arg> <!-- already in findbugs -->
|
||||
<arg>-Xep:ClassNewInstance:ERROR</arg>
|
||||
</compilerArgs>
|
||||
<annotationProcessorPaths>
|
||||
<path>
|
||||
|
|
|
@ -27,6 +27,7 @@ import com.google.protobuf.RpcController;
|
|||
import com.google.protobuf.Service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
@ -480,21 +481,19 @@ extends AggregateService implements RegionCoprocessor {
|
|||
ColumnInterpreter<T,S,P,Q,R> constructColumnInterpreterFromRequest(
|
||||
AggregateRequest request) throws IOException {
|
||||
String className = request.getInterpreterClassName();
|
||||
Class<?> cls;
|
||||
try {
|
||||
cls = Class.forName(className);
|
||||
ColumnInterpreter<T,S,P,Q,R> ci = (ColumnInterpreter<T, S, P, Q, R>) cls.newInstance();
|
||||
ColumnInterpreter<T,S,P,Q,R> ci;
|
||||
Class<?> cls = Class.forName(className);
|
||||
ci = (ColumnInterpreter<T, S, P, Q, R>) cls.getDeclaredConstructor().newInstance();
|
||||
|
||||
if (request.hasInterpreterSpecificBytes()) {
|
||||
ByteString b = request.getInterpreterSpecificBytes();
|
||||
P initMsg = getParsedGenericInstance(ci.getClass(), 2, b);
|
||||
ci.initialize(initMsg);
|
||||
}
|
||||
return ci;
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new IOException(e);
|
||||
} catch (InstantiationException e) {
|
||||
throw new IOException(e);
|
||||
} catch (IllegalAccessException e) {
|
||||
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException |
|
||||
NoSuchMethodException | InvocationTargetException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -281,7 +281,8 @@ public class MultithreadedTableMapper<K2, V2> extends TableMapper<K2, V2> {
|
|||
outer.getInputSplit());
|
||||
Class<?> wrappedMapperClass = Class.forName("org.apache.hadoop.mapreduce.lib.map.WrappedMapper");
|
||||
Method getMapContext = wrappedMapperClass.getMethod("getMapContext", MapContext.class);
|
||||
subcontext = (Context) getMapContext.invoke(wrappedMapperClass.newInstance(), mc);
|
||||
subcontext = (Context) getMapContext.invoke(
|
||||
wrappedMapperClass.getDeclaredConstructor().newInstance(), mc);
|
||||
} catch (Exception ee) { // FindBugs: REC_CATCH_EXCEPTION
|
||||
// rethrow as IOE
|
||||
throw new IOException(e);
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.ByteArrayOutputStream;
|
|||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
@ -300,17 +301,11 @@ public class TableSnapshotInputFormatImpl {
|
|||
if (splitAlgoClassName == null)
|
||||
return null;
|
||||
try {
|
||||
return ((Class<? extends RegionSplitter.SplitAlgorithm>)
|
||||
Class.forName(splitAlgoClassName)).newInstance();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new IOException("SplitAlgo class " + splitAlgoClassName +
|
||||
" is not found", e);
|
||||
} catch (InstantiationException e) {
|
||||
throw new IOException("SplitAlgo class " + splitAlgoClassName +
|
||||
" is not instantiable", e);
|
||||
} catch (IllegalAccessException e) {
|
||||
throw new IOException("SplitAlgo class " + splitAlgoClassName +
|
||||
" is not instantiable", e);
|
||||
return Class.forName(splitAlgoClassName).asSubclass(RegionSplitter.SplitAlgorithm.class)
|
||||
.getDeclaredConstructor().newInstance();
|
||||
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException |
|
||||
NoSuchMethodException | InvocationTargetException e) {
|
||||
throw new IOException("SplitAlgo class " + splitAlgoClassName + " is not found", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.ByteArrayInputStream;
|
|||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
|
@ -593,14 +594,11 @@ public final class Constraints {
|
|||
// add the constraint, now that we expect it to be valid.
|
||||
Class<? extends Constraint> clazz = classloader.loadClass(key)
|
||||
.asSubclass(Constraint.class);
|
||||
Constraint constraint = clazz.newInstance();
|
||||
Constraint constraint = clazz.getDeclaredConstructor().newInstance();
|
||||
constraint.setConf(conf);
|
||||
constraints.add(constraint);
|
||||
} catch (ClassNotFoundException e1) {
|
||||
throw new IOException(e1);
|
||||
} catch (InstantiationException e1) {
|
||||
throw new IOException(e1);
|
||||
} catch (IllegalAccessException e1) {
|
||||
} catch (InvocationTargetException | NoSuchMethodException | ClassNotFoundException |
|
||||
InstantiationException | IllegalAccessException e1) {
|
||||
throw new IOException(e1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.master;
|
|||
|
||||
import com.google.protobuf.Service;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -170,17 +171,22 @@ public class MasterCoprocessorHost
|
|||
@Override
|
||||
public MasterCoprocessor checkAndGetInstance(Class<?> implClass)
|
||||
throws InstantiationException, IllegalAccessException {
|
||||
if (MasterCoprocessor.class.isAssignableFrom(implClass)) {
|
||||
return (MasterCoprocessor)implClass.newInstance();
|
||||
} else if (CoprocessorService.class.isAssignableFrom(implClass)) {
|
||||
// For backward compatibility with old CoprocessorService impl which don't extend
|
||||
// MasterCoprocessor.
|
||||
return new CoprocessorServiceBackwardCompatiblity.MasterCoprocessorService(
|
||||
(CoprocessorService)implClass.newInstance());
|
||||
} else {
|
||||
LOG.error(implClass.getName() + " is not of type MasterCoprocessor. Check the "
|
||||
+ "configuration " + CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY);
|
||||
return null;
|
||||
try {
|
||||
if (MasterCoprocessor.class.isAssignableFrom(implClass)) {
|
||||
return implClass.asSubclass(MasterCoprocessor.class).getDeclaredConstructor().newInstance();
|
||||
} else if (CoprocessorService.class.isAssignableFrom(implClass)) {
|
||||
// For backward compatibility with old CoprocessorService impl which don't extend
|
||||
// MasterCoprocessor.
|
||||
CoprocessorService cs;
|
||||
cs = implClass.asSubclass(CoprocessorService.class).getDeclaredConstructor().newInstance();
|
||||
return new CoprocessorServiceBackwardCompatiblity.MasterCoprocessorService(cs);
|
||||
} else {
|
||||
LOG.error("{} is not of type MasterCoprocessor. Check the configuration of {}",
|
||||
implClass.getName(), CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY);
|
||||
return null;
|
||||
}
|
||||
} catch (NoSuchMethodException | InvocationTargetException e) {
|
||||
throw (InstantiationException) new InstantiationException(implClass.getName()).initCause(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -310,10 +310,10 @@ public class ReplicationPeerManager {
|
|||
String[] filters = filterCSV.split(",");
|
||||
for (String filter : filters) {
|
||||
try {
|
||||
Class.forName(filter).newInstance();
|
||||
Class.forName(filter).getDeclaredConstructor().newInstance();
|
||||
} catch (Exception e) {
|
||||
throw new DoNotRetryIOException("Configured WALEntryFilter " + filter +
|
||||
" could not be created. Failing add/update " + "peer operation.", e);
|
||||
" could not be created. Failing add/update peer operation.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.net.BindException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
@ -1185,11 +1186,13 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
|
|||
rowSizeWarnThreshold = rs.conf.getInt(BATCH_ROWS_THRESHOLD_NAME, BATCH_ROWS_THRESHOLD_DEFAULT);
|
||||
RpcSchedulerFactory rpcSchedulerFactory;
|
||||
try {
|
||||
Class<?> rpcSchedulerFactoryClass = rs.conf.getClass(
|
||||
Class<?> cls = rs.conf.getClass(
|
||||
REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS,
|
||||
SimpleRpcSchedulerFactory.class);
|
||||
rpcSchedulerFactory = ((RpcSchedulerFactory) rpcSchedulerFactoryClass.newInstance());
|
||||
} catch (InstantiationException | IllegalAccessException e) {
|
||||
rpcSchedulerFactory = cls.asSubclass(RpcSchedulerFactory.class)
|
||||
.getDeclaredConstructor().newInstance();
|
||||
} catch (NoSuchMethodException | InvocationTargetException |
|
||||
InstantiationException | IllegalAccessException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
// Server to handle client requests.
|
||||
|
@ -3534,8 +3537,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
|
|||
for (RemoteProcedureRequest req : request.getProcList()) {
|
||||
RSProcedureCallable callable;
|
||||
try {
|
||||
callable =
|
||||
Class.forName(req.getProcClass()).asSubclass(RSProcedureCallable.class).newInstance();
|
||||
callable = Class.forName(req.getProcClass()).asSubclass(RSProcedureCallable.class)
|
||||
.getDeclaredConstructor().newInstance();
|
||||
} catch (Exception e) {
|
||||
regionServer.remoteProcedureComplete(req.getProcId(), e);
|
||||
continue;
|
||||
|
|
|
@ -22,6 +22,7 @@ import com.google.protobuf.Message;
|
|||
import com.google.protobuf.Service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -465,17 +466,22 @@ public class RegionCoprocessorHost
|
|||
@Override
|
||||
public RegionCoprocessor checkAndGetInstance(Class<?> implClass)
|
||||
throws InstantiationException, IllegalAccessException {
|
||||
if (RegionCoprocessor.class.isAssignableFrom(implClass)) {
|
||||
return (RegionCoprocessor)implClass.newInstance();
|
||||
} else if (CoprocessorService.class.isAssignableFrom(implClass)) {
|
||||
// For backward compatibility with old CoprocessorService impl which don't extend
|
||||
// RegionCoprocessor.
|
||||
return new CoprocessorServiceBackwardCompatiblity.RegionCoprocessorService(
|
||||
(CoprocessorService)implClass.newInstance());
|
||||
} else {
|
||||
LOG.error(implClass.getName() + " is not of type RegionCoprocessor. Check the "
|
||||
+ "configuration " + CoprocessorHost.REGION_COPROCESSOR_CONF_KEY);
|
||||
return null;
|
||||
try {
|
||||
if (RegionCoprocessor.class.isAssignableFrom(implClass)) {
|
||||
return implClass.asSubclass(RegionCoprocessor.class).getDeclaredConstructor().newInstance();
|
||||
} else if (CoprocessorService.class.isAssignableFrom(implClass)) {
|
||||
// For backward compatibility with old CoprocessorService impl which don't extend
|
||||
// RegionCoprocessor.
|
||||
CoprocessorService cs;
|
||||
cs = implClass.asSubclass(CoprocessorService.class).getDeclaredConstructor().newInstance();
|
||||
return new CoprocessorServiceBackwardCompatiblity.RegionCoprocessorService(cs);
|
||||
} else {
|
||||
LOG.error("{} is not of type RegionCoprocessor. Check the configuration of {}",
|
||||
implClass.getName(), CoprocessorHost.REGION_COPROCESSOR_CONF_KEY);
|
||||
return null;
|
||||
}
|
||||
} catch (NoSuchMethodException | InvocationTargetException e) {
|
||||
throw (InstantiationException) new InstantiationException(implClass.getName()).initCause(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
|
||||
import com.google.protobuf.Service;
|
||||
|
||||
|
@ -82,17 +83,23 @@ public class RegionServerCoprocessorHost extends
|
|||
@Override
|
||||
public RegionServerCoprocessor checkAndGetInstance(Class<?> implClass)
|
||||
throws InstantiationException, IllegalAccessException {
|
||||
if (RegionServerCoprocessor.class.isAssignableFrom(implClass)) {
|
||||
return (RegionServerCoprocessor)implClass.newInstance();
|
||||
} else if (SingletonCoprocessorService.class.isAssignableFrom(implClass)) {
|
||||
// For backward compatibility with old CoprocessorService impl which don't extend
|
||||
// RegionCoprocessor.
|
||||
return new CoprocessorServiceBackwardCompatiblity.RegionServerCoprocessorService(
|
||||
(SingletonCoprocessorService)implClass.newInstance());
|
||||
} else {
|
||||
LOG.error(implClass.getName() + " is not of type RegionServerCoprocessor. Check the "
|
||||
+ "configuration " + CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY);
|
||||
return null;
|
||||
try {
|
||||
if (RegionServerCoprocessor.class.isAssignableFrom(implClass)) {
|
||||
return implClass.asSubclass(RegionServerCoprocessor.class).getDeclaredConstructor()
|
||||
.newInstance();
|
||||
} else if (SingletonCoprocessorService.class.isAssignableFrom(implClass)) {
|
||||
// For backward compatibility with old CoprocessorService impl which don't extend
|
||||
// RegionCoprocessor.
|
||||
SingletonCoprocessorService tmp = implClass.asSubclass(SingletonCoprocessorService.class)
|
||||
.getDeclaredConstructor().newInstance();
|
||||
return new CoprocessorServiceBackwardCompatiblity.RegionServerCoprocessorService(tmp);
|
||||
} else {
|
||||
LOG.error("{} is not of type RegionServerCoprocessor. Check the configuration of {}",
|
||||
implClass.getName(), CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY);
|
||||
return null;
|
||||
}
|
||||
} catch (NoSuchMethodException | InvocationTargetException e) {
|
||||
throw (InstantiationException) new InstantiationException(implClass.getName()).initCause(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
package org.apache.hadoop.hbase.regionserver.wal;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
@ -118,10 +119,14 @@ public class WALCoprocessorHost
|
|||
}
|
||||
|
||||
@Override
|
||||
public WALCoprocessor checkAndGetInstance(Class<?> implClass)
|
||||
throws InstantiationException, IllegalAccessException {
|
||||
public WALCoprocessor checkAndGetInstance(Class<?> implClass) throws IllegalAccessException,
|
||||
InstantiationException {
|
||||
if (WALCoprocessor.class.isAssignableFrom(implClass)) {
|
||||
return (WALCoprocessor)implClass.newInstance();
|
||||
try {
|
||||
return implClass.asSubclass(WALCoprocessor.class).getDeclaredConstructor().newInstance();
|
||||
} catch (NoSuchMethodException | InvocationTargetException e) {
|
||||
throw (InstantiationException) new InstantiationException(implClass.getName()).initCause(e);
|
||||
}
|
||||
} else {
|
||||
LOG.error(implClass.getName() + " is not of type WALCoprocessor. Check the "
|
||||
+ "configuration " + CoprocessorHost.WAL_COPROCESSOR_CONF_KEY);
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.hbase.replication.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
|
@ -242,12 +243,21 @@ public class ReplicationSource implements ReplicationSourceInterface {
|
|||
rsServerHost = ((HRegionServer) server).getRegionServerCoprocessorHost();
|
||||
}
|
||||
String replicationEndpointImpl = replicationPeer.getPeerConfig().getReplicationEndpointImpl();
|
||||
|
||||
ReplicationEndpoint replicationEndpoint;
|
||||
if (replicationEndpointImpl == null) {
|
||||
// Default to HBase inter-cluster replication endpoint
|
||||
replicationEndpointImpl = HBaseInterClusterReplicationEndpoint.class.getName();
|
||||
// Default to HBase inter-cluster replication endpoint; skip reflection
|
||||
replicationEndpoint = new HBaseInterClusterReplicationEndpoint();
|
||||
} else {
|
||||
try {
|
||||
replicationEndpoint = Class.forName(replicationEndpointImpl)
|
||||
.asSubclass(ReplicationEndpoint.class)
|
||||
.getDeclaredConstructor()
|
||||
.newInstance();
|
||||
} catch (NoSuchMethodException | InvocationTargetException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
}
|
||||
ReplicationEndpoint replicationEndpoint =
|
||||
Class.forName(replicationEndpointImpl).asSubclass(ReplicationEndpoint.class).newInstance();
|
||||
if (rsServerHost != null) {
|
||||
ReplicationEndpoint newReplicationEndPoint =
|
||||
rsServerHost.postCreateReplicationEndPoint(replicationEndpoint);
|
||||
|
|
|
@ -40,10 +40,9 @@ public class ReplicationSourceFactory {
|
|||
String defaultReplicationSourceImpl =
|
||||
isQueueRecovered ? RecoveredReplicationSource.class.getCanonicalName()
|
||||
: ReplicationSource.class.getCanonicalName();
|
||||
@SuppressWarnings("rawtypes")
|
||||
Class c = Class.forName(
|
||||
Class<?> c = Class.forName(
|
||||
conf.get("replication.replicationsource.implementation", defaultReplicationSourceImpl));
|
||||
src = (ReplicationSourceInterface) c.newInstance();
|
||||
src = c.asSubclass(ReplicationSourceInterface.class).getDeclaredConstructor().newInstance();
|
||||
} catch (Exception e) {
|
||||
LOG.warn("Passed replication source implementation throws errors, "
|
||||
+ "defaulting to ReplicationSource",
|
||||
|
|
|
@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.coprocessor;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Abortable;
|
||||
import org.apache.hadoop.hbase.Coprocessor;
|
||||
|
@ -56,47 +58,61 @@ public class TestCoprocessorHost {
|
|||
return this.aborted;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDoubleLoadingAndPriorityValue() {
|
||||
final Configuration conf = HBaseConfiguration.create();
|
||||
CoprocessorHost<RegionCoprocessor, CoprocessorEnvironment<RegionCoprocessor>> host =
|
||||
new CoprocessorHost<RegionCoprocessor, CoprocessorEnvironment<RegionCoprocessor>>(
|
||||
new TestAbortable()) {
|
||||
@Override
|
||||
public RegionCoprocessor checkAndGetInstance(Class<?> implClass)
|
||||
throws InstantiationException, IllegalAccessException {
|
||||
if(RegionCoprocessor.class.isAssignableFrom(implClass)) {
|
||||
return (RegionCoprocessor)implClass.newInstance();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
final Configuration cpHostConf = conf;
|
||||
|
||||
@Override
|
||||
public CoprocessorEnvironment<RegionCoprocessor> createEnvironment(
|
||||
final RegionCoprocessor instance, final int priority, int sequence, Configuration conf) {
|
||||
return new BaseEnvironment<RegionCoprocessor>(instance, priority, 0, cpHostConf);
|
||||
}
|
||||
};
|
||||
final String key = "KEY";
|
||||
final String coprocessor = "org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver";
|
||||
|
||||
CoprocessorHost<RegionCoprocessor, CoprocessorEnvironment<RegionCoprocessor>> host;
|
||||
host = new CoprocessorHostForTest<>(conf);
|
||||
|
||||
// Try and load a coprocessor three times
|
||||
conf.setStrings(key, coprocessor, coprocessor, coprocessor,
|
||||
SimpleRegionObserverV2.class.getName());
|
||||
host.loadSystemCoprocessors(conf, key);
|
||||
|
||||
// Two coprocessors(SimpleRegionObserver and SimpleRegionObserverV2) loaded
|
||||
Assert.assertEquals(2, host.coprocEnvironments.size());
|
||||
|
||||
// Check the priority value
|
||||
CoprocessorEnvironment simpleEnv = host.findCoprocessorEnvironment(
|
||||
CoprocessorEnvironment<?> simpleEnv = host.findCoprocessorEnvironment(
|
||||
SimpleRegionObserver.class.getName());
|
||||
CoprocessorEnvironment simpleEnv_v2 = host.findCoprocessorEnvironment(
|
||||
CoprocessorEnvironment<?> simpleEnv_v2 = host.findCoprocessorEnvironment(
|
||||
SimpleRegionObserverV2.class.getName());
|
||||
|
||||
assertNotNull(simpleEnv);
|
||||
assertNotNull(simpleEnv_v2);
|
||||
assertEquals(Coprocessor.PRIORITY_SYSTEM, simpleEnv.getPriority());
|
||||
assertEquals(Coprocessor.PRIORITY_SYSTEM + 1, simpleEnv_v2.getPriority());
|
||||
}
|
||||
public static class SimpleRegionObserverV2 extends SimpleRegionObserver {
|
||||
|
||||
public static class SimpleRegionObserverV2 extends SimpleRegionObserver { }
|
||||
|
||||
private static class CoprocessorHostForTest<E extends Coprocessor> extends
|
||||
CoprocessorHost<E, CoprocessorEnvironment<E>> {
|
||||
final Configuration cpHostConf;
|
||||
|
||||
public CoprocessorHostForTest(Configuration conf) {
|
||||
super(new TestAbortable());
|
||||
cpHostConf = conf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E checkAndGetInstance(Class<?> implClass)
|
||||
throws InstantiationException, IllegalAccessException {
|
||||
try {
|
||||
return (E) implClass.getDeclaredConstructor().newInstance();
|
||||
} catch (InvocationTargetException | NoSuchMethodException e) {
|
||||
throw (InstantiationException) new InstantiationException().initCause(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CoprocessorEnvironment<E> createEnvironment(final E instance, final int priority,
|
||||
int sequence, Configuration conf) {
|
||||
return new BaseEnvironment<>(instance, priority, 0, cpHostConf);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue