HBASE-12126 Region server coprocessor endpoint (Virag Kothari)
This commit is contained in:
parent
8c125a40bf
commit
6a37c169fc
|
@ -1223,4 +1223,30 @@ public interface Admin extends Abortable, Closeable {
|
|||
* @return A MasterCoprocessorRpcChannel instance
|
||||
*/
|
||||
CoprocessorRpcChannel coprocessorService();
|
||||
|
||||
|
||||
/**
|
||||
* Creates and returns a {@link com.google.protobuf.RpcChannel} instance
|
||||
* connected to the passed region server.
|
||||
*
|
||||
* <p>
|
||||
* The obtained {@link com.google.protobuf.RpcChannel} instance can be used to access a published
|
||||
* coprocessor {@link com.google.protobuf.Service} using standard protobuf service invocations:
|
||||
* </p>
|
||||
*
|
||||
* <div style="background-color: #cccccc; padding: 2px">
|
||||
* <blockquote><pre>
|
||||
* CoprocessorRpcChannel channel = myAdmin.coprocessorService(serverName);
|
||||
* MyService.BlockingInterface service = MyService.newBlockingStub(channel);
|
||||
* MyCallRequest request = MyCallRequest.newBuilder()
|
||||
* ...
|
||||
* .build();
|
||||
* MyCallResponse response = service.myCall(null, request);
|
||||
* </pre></blockquote></div>
|
||||
*
|
||||
* @param the server name to which the endpoint call is made
|
||||
* @return A RegionServerCoprocessorRpcChannel instance
|
||||
*/
|
||||
CoprocessorRpcChannel coprocessorService(ServerName sn);
|
||||
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
|
@ -68,6 +69,7 @@ import org.apache.hadoop.hbase.exceptions.MergeRegionException;
|
|||
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
|
||||
import org.apache.hadoop.hbase.ipc.MasterCoprocessorRpcChannel;
|
||||
import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController;
|
||||
import org.apache.hadoop.hbase.ipc.RegionServerCoprocessorRpcChannel;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.protobuf.RequestConverter;
|
||||
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
|
||||
|
@ -3640,4 +3642,32 @@ public class HBaseAdmin implements Admin {
|
|||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and returns a {@link com.google.protobuf.RpcChannel} instance
|
||||
* connected to the passed region server.
|
||||
*
|
||||
* <p>
|
||||
* The obtained {@link com.google.protobuf.RpcChannel} instance can be used to access a published
|
||||
* coprocessor {@link com.google.protobuf.Service} using standard protobuf service invocations:
|
||||
* </p>
|
||||
*
|
||||
* <div style="background-color: #cccccc; padding: 2px">
|
||||
* <blockquote><pre>
|
||||
* CoprocessorRpcChannel channel = myAdmin.coprocessorService(serverName);
|
||||
* MyService.BlockingInterface service = MyService.newBlockingStub(channel);
|
||||
* MyCallRequest request = MyCallRequest.newBuilder()
|
||||
* ...
|
||||
* .build();
|
||||
* MyCallResponse response = service.myCall(null, request);
|
||||
* </pre></blockquote></div>
|
||||
*
|
||||
* @param the server name to which the endpoint call is made
|
||||
* @return A RegionServerCoprocessorRpcChannel instance
|
||||
*/
|
||||
@Override
|
||||
public CoprocessorRpcChannel coprocessorService(ServerName sn) {
|
||||
return new RegionServerCoprocessorRpcChannel(connection, sn);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
|
||||
* agreements. See the NOTICE file distributed with this work for additional information regarding
|
||||
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance with the License. You may obtain a
|
||||
* copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
|
||||
* law or agreed to in writing, software distributed under the License is distributed on an "AS IS"
|
||||
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.ipc;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import com.google.protobuf.HBaseZeroCopyByteString;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.client.ClusterConnection;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
|
||||
|
||||
|
||||
import com.google.protobuf.Descriptors;
|
||||
import com.google.protobuf.Message;
|
||||
|
||||
/**
|
||||
* Provides clients with an RPC connection to call coprocessor endpoint
|
||||
* {@link com.google.protobuf.Service}s against a given region server. An instance of this class may
|
||||
* be obtained by calling {@link org.apache.hadoop.hbase.client.HBaseAdmin#coprocessorService(ServerName)},
|
||||
* but should normally only be used in creating a new {@link com.google.protobuf.Service} stub to
|
||||
* call the endpoint methods.
|
||||
* @see org.apache.hadoop.hbase.client.HBaseAdmin#coprocessorService(ServerName)
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class RegionServerCoprocessorRpcChannel extends CoprocessorRpcChannel {
|
||||
private static Log LOG = LogFactory.getLog(RegionServerCoprocessorRpcChannel.class);
|
||||
private final ClusterConnection connection;
|
||||
private final ServerName serverName;
|
||||
|
||||
public RegionServerCoprocessorRpcChannel(ClusterConnection conn, ServerName serverName) {
|
||||
this.connection = conn;
|
||||
this.serverName = serverName;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Message callExecService(Descriptors.MethodDescriptor method, Message request,
|
||||
Message responsePrototype) throws IOException {
|
||||
if (LOG.isTraceEnabled()) {
|
||||
LOG.trace("Call: " + method.getName() + ", " + request.toString());
|
||||
}
|
||||
final ClientProtos.CoprocessorServiceCall call =
|
||||
ClientProtos.CoprocessorServiceCall.newBuilder()
|
||||
.setRow(HBaseZeroCopyByteString.wrap(HConstants.EMPTY_BYTE_ARRAY))
|
||||
.setServiceName(method.getService().getFullName()).setMethodName(method.getName())
|
||||
.setRequest(request.toByteString()).build();
|
||||
CoprocessorServiceResponse result =
|
||||
ProtobufUtil.execRegionServerService(connection.getClient(serverName), call);
|
||||
Message response = null;
|
||||
if (result.getValue().hasValue()) {
|
||||
response =
|
||||
responsePrototype.newBuilderForType().mergeFrom(result.getValue().getValue()).build();
|
||||
} else {
|
||||
response = responsePrototype.getDefaultInstanceForType();
|
||||
}
|
||||
if (LOG.isTraceEnabled()) {
|
||||
LOG.trace("Result is value=" + response);
|
||||
}
|
||||
return response;
|
||||
}
|
||||
}
|
|
@ -1616,6 +1616,31 @@ public final class ProtobufUtil {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a region server endpoint call
|
||||
* @param client
|
||||
* @param call
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
public static CoprocessorServiceResponse execRegionServerService(
|
||||
final ClientService.BlockingInterface client, final CoprocessorServiceCall call)
|
||||
throws IOException {
|
||||
CoprocessorServiceRequest request =
|
||||
CoprocessorServiceRequest
|
||||
.newBuilder()
|
||||
.setCall(call)
|
||||
.setRegion(
|
||||
RequestConverter.buildRegionSpecifier(REGION_NAME, HConstants.EMPTY_BYTE_ARRAY))
|
||||
.build();
|
||||
try {
|
||||
CoprocessorServiceResponse response = client.execRegionServerService(null, request);
|
||||
return response;
|
||||
} catch (ServiceException se) {
|
||||
throw getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T extends Service> T newServiceStub(Class<T> service, RpcChannel channel)
|
||||
throws Exception {
|
||||
|
|
|
@ -497,6 +497,12 @@ public class TestClientNoCluster extends Configured implements Tool {
|
|||
this.multiInvocationsCount.decrementAndGet();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CoprocessorServiceResponse execRegionServerService(RpcController controller,
|
||||
CoprocessorServiceRequest request) throws ServiceException {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
||||
|
||||
static ScanResponse doMetaScanResponse(final SortedMap<byte [], Pair<HRegionInfo, ServerName>> meta,
|
||||
|
|
|
@ -30331,6 +30331,14 @@ public final class ClientProtos {
|
|||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
|
||||
|
||||
/**
|
||||
* <code>rpc ExecRegionServerService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
|
||||
*/
|
||||
public abstract void execRegionServerService(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
|
||||
|
||||
/**
|
||||
* <code>rpc Multi(.MultiRequest) returns (.MultiResponse);</code>
|
||||
*/
|
||||
|
@ -30384,6 +30392,14 @@ public final class ClientProtos {
|
|||
impl.execService(controller, request, done);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public void execRegionServerService(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
|
||||
impl.execRegionServerService(controller, request, done);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public void multi(
|
||||
com.google.protobuf.RpcController controller,
|
||||
|
@ -30425,6 +30441,8 @@ public final class ClientProtos {
|
|||
case 4:
|
||||
return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
|
||||
case 5:
|
||||
return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
|
||||
case 6:
|
||||
return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request);
|
||||
default:
|
||||
throw new java.lang.AssertionError("Can't get here.");
|
||||
|
@ -30451,6 +30469,8 @@ public final class ClientProtos {
|
|||
case 4:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
|
||||
case 5:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
|
||||
case 6:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
|
||||
default:
|
||||
throw new java.lang.AssertionError("Can't get here.");
|
||||
|
@ -30477,6 +30497,8 @@ public final class ClientProtos {
|
|||
case 4:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
|
||||
case 5:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
|
||||
case 6:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
|
||||
default:
|
||||
throw new java.lang.AssertionError("Can't get here.");
|
||||
|
@ -30526,6 +30548,14 @@ public final class ClientProtos {
|
|||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
|
||||
|
||||
/**
|
||||
* <code>rpc ExecRegionServerService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
|
||||
*/
|
||||
public abstract void execRegionServerService(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
|
||||
|
||||
/**
|
||||
* <code>rpc Multi(.MultiRequest) returns (.MultiResponse);</code>
|
||||
*/
|
||||
|
@ -30582,6 +30612,11 @@ public final class ClientProtos {
|
|||
done));
|
||||
return;
|
||||
case 5:
|
||||
this.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
|
||||
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
|
||||
done));
|
||||
return;
|
||||
case 6:
|
||||
this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request,
|
||||
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse>specializeCallback(
|
||||
done));
|
||||
|
@ -30611,6 +30646,8 @@ public final class ClientProtos {
|
|||
case 4:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
|
||||
case 5:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
|
||||
case 6:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
|
||||
default:
|
||||
throw new java.lang.AssertionError("Can't get here.");
|
||||
|
@ -30637,6 +30674,8 @@ public final class ClientProtos {
|
|||
case 4:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
|
||||
case 5:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
|
||||
case 6:
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
|
||||
default:
|
||||
throw new java.lang.AssertionError("Can't get here.");
|
||||
|
@ -30734,12 +30773,27 @@ public final class ClientProtos {
|
|||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
|
||||
}
|
||||
|
||||
public void execRegionServerService(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
|
||||
channel.callMethod(
|
||||
getDescriptor().getMethods().get(5),
|
||||
controller,
|
||||
request,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
|
||||
com.google.protobuf.RpcUtil.generalizeCallback(
|
||||
done,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
|
||||
}
|
||||
|
||||
public void multi(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) {
|
||||
channel.callMethod(
|
||||
getDescriptor().getMethods().get(5),
|
||||
getDescriptor().getMethods().get(6),
|
||||
controller,
|
||||
request,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(),
|
||||
|
@ -30781,6 +30835,11 @@ public final class ClientProtos {
|
|||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
|
||||
throws com.google.protobuf.ServiceException;
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
|
||||
throws com.google.protobuf.ServiceException;
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
|
||||
|
@ -30854,12 +30913,24 @@ public final class ClientProtos {
|
|||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
|
||||
throws com.google.protobuf.ServiceException {
|
||||
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
|
||||
getDescriptor().getMethods().get(5),
|
||||
controller,
|
||||
request,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
|
||||
throws com.google.protobuf.ServiceException {
|
||||
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod(
|
||||
getDescriptor().getMethods().get(5),
|
||||
getDescriptor().getMethods().get(6),
|
||||
controller,
|
||||
request,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance());
|
||||
|
@ -31121,17 +31192,19 @@ public final class ClientProtos {
|
|||
"ondition\030\003 \001(\0132\n.Condition\"S\n\rMultiRespo" +
|
||||
"nse\022/\n\022regionActionResult\030\001 \003(\0132\023.Region" +
|
||||
"ActionResult\022\021\n\tprocessed\030\002 \001(\010*\'\n\013Consi" +
|
||||
"stency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\0012\261\002\n\rCli" +
|
||||
"stency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\0012\205\003\n\rCli" +
|
||||
"entService\022 \n\003Get\022\013.GetRequest\032\014.GetResp" +
|
||||
"onse\022)\n\006Mutate\022\016.MutateRequest\032\017.MutateR",
|
||||
"esponse\022#\n\004Scan\022\014.ScanRequest\032\r.ScanResp" +
|
||||
"onse\022>\n\rBulkLoadHFile\022\025.BulkLoadHFileReq" +
|
||||
"uest\032\026.BulkLoadHFileResponse\022F\n\013ExecServ" +
|
||||
"ice\022\032.CoprocessorServiceRequest\032\033.Coproc" +
|
||||
"essorServiceResponse\022&\n\005Multi\022\r.MultiReq" +
|
||||
"uest\032\016.MultiResponseBB\n*org.apache.hadoo" +
|
||||
"p.hbase.protobuf.generatedB\014ClientProtos" +
|
||||
"H\001\210\001\001\240\001\001"
|
||||
"essorServiceResponse\022R\n\027ExecRegionServer" +
|
||||
"Service\022\032.CoprocessorServiceRequest\032\033.Co" +
|
||||
"processorServiceResponse\022&\n\005Multi\022\r.Mult" +
|
||||
"iRequest\032\016.MultiResponseBB\n*org.apache.h" +
|
||||
"adoop.hbase.protobuf.generatedB\014ClientPr" +
|
||||
"otosH\001\210\001\001\240\001\001"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
|
|
|
@ -412,6 +412,9 @@ service ClientService {
|
|||
|
||||
rpc ExecService(CoprocessorServiceRequest)
|
||||
returns(CoprocessorServiceResponse);
|
||||
|
||||
rpc ExecRegionServerService(CoprocessorServiceRequest)
|
||||
returns(CoprocessorServiceResponse);
|
||||
|
||||
rpc Multi(MultiRequest)
|
||||
returns(MultiResponse);
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.coprocessor;
|
||||
|
||||
import com.google.protobuf.Service;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
|
||||
|
||||
/**
|
||||
* Coprocessor endpoints registered once per server and providing protobuf services should implement
|
||||
* this interface and return the {@link Service} instance via {@link #getService()}.
|
||||
*/
|
||||
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
|
||||
@InterfaceStability.Evolving
|
||||
public interface SingletonCoprocessorService {
|
||||
Service getService();
|
||||
}
|
|
@ -82,6 +82,7 @@ import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;
|
|||
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
|
||||
import org.apache.hadoop.hbase.exceptions.RegionMovedException;
|
||||
import org.apache.hadoop.hbase.exceptions.RegionOpeningException;
|
||||
import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
|
||||
import org.apache.hadoop.hbase.executor.ExecutorService;
|
||||
import org.apache.hadoop.hbase.executor.ExecutorType;
|
||||
import org.apache.hadoop.hbase.fs.HFileSystem;
|
||||
|
@ -90,11 +91,16 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
|||
import org.apache.hadoop.hbase.ipc.RpcClient;
|
||||
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
|
||||
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
|
||||
import org.apache.hadoop.hbase.ipc.ServerRpcController;
|
||||
import org.apache.hadoop.hbase.master.HMaster;
|
||||
import org.apache.hadoop.hbase.master.TableLockManager;
|
||||
import org.apache.hadoop.hbase.procedure.RegionServerProcedureManagerHost;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.protobuf.RequestConverter;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor;
|
||||
|
@ -151,7 +157,13 @@ import org.apache.zookeeper.KeeperException.NoNodeException;
|
|||
import org.apache.zookeeper.data.Stat;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.protobuf.BlockingRpcChannel;
|
||||
import com.google.protobuf.Descriptors;
|
||||
import com.google.protobuf.Message;
|
||||
import com.google.protobuf.RpcCallback;
|
||||
import com.google.protobuf.RpcController;
|
||||
import com.google.protobuf.Service;
|
||||
import com.google.protobuf.ServiceException;
|
||||
|
||||
/**
|
||||
|
@ -349,6 +361,8 @@ public class HRegionServer extends HasThread implements
|
|||
/** The nonce manager chore. */
|
||||
private Chore nonceManagerChore;
|
||||
|
||||
private Map<String, Service> coprocessorServiceHandlers = Maps.newHashMap();
|
||||
|
||||
/**
|
||||
* The server name the Master sees us as. Its made from the hostname the
|
||||
* master passes us, port, and server startcode. Gets set after registration
|
||||
|
@ -556,6 +570,25 @@ public class HRegionServer extends HasThread implements
|
|||
protected void doMetrics() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean registerService(Service instance) {
|
||||
/*
|
||||
* No stacking of instances is allowed for a single service name
|
||||
*/
|
||||
Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType();
|
||||
if (coprocessorServiceHandlers.containsKey(serviceDesc.getFullName())) {
|
||||
LOG.error("Coprocessor service " + serviceDesc.getFullName()
|
||||
+ " already registered, rejecting request from " + instance);
|
||||
return false;
|
||||
}
|
||||
|
||||
coprocessorServiceHandlers.put(serviceDesc.getFullName(), instance);
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("Registered regionserver coprocessor service: service=" + serviceDesc.getFullName());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create wrapped short-circuit connection to this server.
|
||||
* In its own method so can intercept and mock it over in tests.
|
||||
|
@ -565,7 +598,7 @@ public class HRegionServer extends HasThread implements
|
|||
return ConnectionUtils.createShortCircuitHConnection(
|
||||
HConnectionManager.getConnection(conf), serverName, rpcServices, rpcServices);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Run test on configured codecs to make sure supporting libs are in place.
|
||||
* @param c
|
||||
|
@ -2950,7 +2983,54 @@ public class HRegionServer extends HasThread implements
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public CoprocessorServiceResponse execRegionServerService(final RpcController controller,
|
||||
final CoprocessorServiceRequest serviceRequest) throws ServiceException {
|
||||
try {
|
||||
ServerRpcController execController = new ServerRpcController();
|
||||
CoprocessorServiceCall call = serviceRequest.getCall();
|
||||
String serviceName = call.getServiceName();
|
||||
String methodName = call.getMethodName();
|
||||
if (!coprocessorServiceHandlers.containsKey(serviceName)) {
|
||||
throw new UnknownProtocolException(null,
|
||||
"No registered coprocessor service found for name " + serviceName);
|
||||
}
|
||||
Service service = coprocessorServiceHandlers.get(serviceName);
|
||||
Descriptors.ServiceDescriptor serviceDesc = service.getDescriptorForType();
|
||||
Descriptors.MethodDescriptor methodDesc = serviceDesc.findMethodByName(methodName);
|
||||
if (methodDesc == null) {
|
||||
throw new UnknownProtocolException(service.getClass(), "Unknown method " + methodName
|
||||
+ " called on service " + serviceName);
|
||||
}
|
||||
Message request =
|
||||
service.getRequestPrototype(methodDesc).newBuilderForType().mergeFrom(call.getRequest())
|
||||
.build();
|
||||
final Message.Builder responseBuilder =
|
||||
service.getResponsePrototype(methodDesc).newBuilderForType();
|
||||
service.callMethod(methodDesc, controller, request, new RpcCallback<Message>() {
|
||||
@Override
|
||||
public void run(Message message) {
|
||||
if (message != null) {
|
||||
responseBuilder.mergeFrom(message);
|
||||
}
|
||||
}
|
||||
});
|
||||
Message execResult = responseBuilder.build();
|
||||
if (execController.getFailedOn() != null) {
|
||||
throw execController.getFailedOn();
|
||||
}
|
||||
ClientProtos.CoprocessorServiceResponse.Builder builder =
|
||||
ClientProtos.CoprocessorServiceResponse.newBuilder();
|
||||
builder.setRegion(RequestConverter.buildRegionSpecifier(RegionSpecifierType.REGION_NAME,
|
||||
HConstants.EMPTY_BYTE_ARRAY));
|
||||
builder.setValue(builder.getValueBuilder().setName(execResult.getClass().getName())
|
||||
.setValue(execResult.toByteString()));
|
||||
return builder.build();
|
||||
} catch (IOException ie) {
|
||||
throw new ServiceException(ie);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The cache config instance used by the regionserver.
|
||||
*/
|
||||
|
|
|
@ -2145,4 +2145,10 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
|
|||
throw new ServiceException(ie);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CoprocessorServiceResponse execRegionServerService(RpcController controller,
|
||||
CoprocessorServiceRequest request) throws ServiceException {
|
||||
return regionServer.execRegionServerService(controller, request);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
|
|||
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
|
||||
import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
|
||||
import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
|
||||
import org.apache.hadoop.hbase.coprocessor.SingletonCoprocessorService;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
|
||||
|
||||
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
|
||||
|
@ -243,6 +244,12 @@ public class RegionServerCoprocessorHost extends
|
|||
final Configuration conf, final RegionServerServices services) {
|
||||
super(impl, priority, seq, conf);
|
||||
this.regionServerServices = services;
|
||||
for (Class c : implClass.getInterfaces()) {
|
||||
if (SingletonCoprocessorService.class.isAssignableFrom(c)) {
|
||||
this.regionServerServices.registerService(((SingletonCoprocessorService) impl).getService());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import com.google.protobuf.Service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
@ -124,4 +126,12 @@ public interface RegionServerServices
|
|||
* @return The RegionServer's NonceManager
|
||||
*/
|
||||
public ServerNonceManager getNonceManager();
|
||||
|
||||
/**
|
||||
* Registers a new protocol buffer {@link Service} subclass as a coprocessor endpoint to be
|
||||
* available for handling
|
||||
* @param instance the {@code Service} subclass instance to expose as a coprocessor endpoint
|
||||
* @return {@code true} if the registration was successful, {@code false}
|
||||
*/
|
||||
boolean registerService(Service service);
|
||||
}
|
||||
|
|
|
@ -46,6 +46,8 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
|
|||
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
|
||||
import com.google.protobuf.Service;
|
||||
|
||||
/**
|
||||
* Basic mock region server services. Should only be instantiated by HBaseTestingUtility.b
|
||||
*/
|
||||
|
@ -244,4 +246,10 @@ class MockRegionServerServices implements RegionServerServices {
|
|||
HRegionInfo... hris) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean registerService(Service service) {
|
||||
// TODO Auto-generated method stub
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,107 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.coprocessor;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Coprocessor;
|
||||
import org.apache.hadoop.hbase.CoprocessorEnvironment;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.MediumTests;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
|
||||
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos;
|
||||
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest;
|
||||
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse;
|
||||
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyService;
|
||||
import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
|
||||
import org.apache.hadoop.hbase.ipc.ServerRpcController;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import com.google.protobuf.RpcCallback;
|
||||
import com.google.protobuf.RpcController;
|
||||
import com.google.protobuf.Service;
|
||||
|
||||
@Category(MediumTests.class)
|
||||
public class TestRegionServerCoprocessorEndpoint {
|
||||
private static HBaseTestingUtility TEST_UTIL = null;
|
||||
private static Configuration CONF = null;
|
||||
private static final String DUMMY_VALUE = "val";
|
||||
|
||||
@BeforeClass
|
||||
public static void setupBeforeClass() throws Exception {
|
||||
TEST_UTIL = new HBaseTestingUtility();
|
||||
CONF = TEST_UTIL.getConfiguration();
|
||||
CONF.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY,
|
||||
DummyRegionServerEndpoint.class.getName());
|
||||
TEST_UTIL.startMiniCluster();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownAfterClass() throws Exception {
|
||||
TEST_UTIL.shutdownMiniCluster();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEndpoint() throws Exception {
|
||||
final ServerName serverName = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
|
||||
final ServerRpcController controller = new ServerRpcController();
|
||||
final BlockingRpcCallback<DummyRegionServerEndpointProtos.DummyResponse> rpcCallback =
|
||||
new BlockingRpcCallback<DummyRegionServerEndpointProtos.DummyResponse>();
|
||||
DummyRegionServerEndpointProtos.DummyService service =
|
||||
ProtobufUtil.newServiceStub(DummyRegionServerEndpointProtos.DummyService.class,
|
||||
new HBaseAdmin(CONF).coprocessorService(serverName));
|
||||
service.dummyCall(controller,
|
||||
DummyRegionServerEndpointProtos.DummyRequest.getDefaultInstance(), rpcCallback);
|
||||
assertEquals(DUMMY_VALUE, rpcCallback.get().getValue());
|
||||
if (controller.failedOnException()) {
|
||||
throw controller.getFailedOn();
|
||||
}
|
||||
}
|
||||
|
||||
static class DummyRegionServerEndpoint extends DummyService implements Coprocessor, SingletonCoprocessorService {
|
||||
|
||||
@Override
|
||||
public Service getService() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start(CoprocessorEnvironment env) throws IOException {
|
||||
// TODO Auto-generated method stub
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop(CoprocessorEnvironment env) throws IOException {
|
||||
// TODO Auto-generated method stub
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dummyCall(RpcController controller, DummyRequest request,
|
||||
RpcCallback<DummyResponse> callback) {
|
||||
callback.run(DummyResponse.newBuilder().setValue(DUMMY_VALUE).build());
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -77,6 +77,8 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodes
|
|||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest;
|
||||
|
@ -99,6 +101,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
|||
import org.apache.zookeeper.KeeperException;
|
||||
|
||||
import com.google.protobuf.RpcController;
|
||||
import com.google.protobuf.Service;
|
||||
import com.google.protobuf.ServiceException;
|
||||
|
||||
/**
|
||||
|
@ -576,4 +579,17 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices {
|
|||
HRegionInfo... hris) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean registerService(Service service) {
|
||||
// TODO Auto-generated method stub
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CoprocessorServiceResponse execRegionServerService(RpcController controller,
|
||||
CoprocessorServiceRequest request) throws ServiceException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
// Coprocessor test
|
||||
option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated";
|
||||
option java_outer_classname = "DummyRegionServerEndpointProtos";
|
||||
option java_generic_services = true;
|
||||
option java_generate_equals_and_hash = true;
|
||||
|
||||
message DummyRequest {
|
||||
}
|
||||
|
||||
message DummyResponse {
|
||||
required string value = 1;
|
||||
}
|
||||
|
||||
service DummyService {
|
||||
rpc dummyCall(DummyRequest) returns(DummyResponse);
|
||||
}
|
Loading…
Reference in New Issue