HBASE-12126 Region server coprocessor endpoint (Virag Kothari)

This commit is contained in:
Andrew Purtell 2014-10-10 12:00:51 -07:00
parent 65ae2e5126
commit 8ee55fb339
17 changed files with 1702 additions and 9 deletions

View File

@ -1243,4 +1243,30 @@ public interface Admin extends Abortable, Closeable {
* @return A MasterCoprocessorRpcChannel instance * @return A MasterCoprocessorRpcChannel instance
*/ */
CoprocessorRpcChannel coprocessorService(); CoprocessorRpcChannel coprocessorService();
/**
* Creates and returns a {@link com.google.protobuf.RpcChannel} instance
* connected to the passed region server.
*
* <p>
* The obtained {@link com.google.protobuf.RpcChannel} instance can be used to access a published
* coprocessor {@link com.google.protobuf.Service} using standard protobuf service invocations:
* </p>
*
* <div style="background-color: #cccccc; padding: 2px">
* <blockquote><pre>
* CoprocessorRpcChannel channel = myAdmin.coprocessorService(serverName);
* MyService.BlockingInterface service = MyService.newBlockingStub(channel);
* MyCallRequest request = MyCallRequest.newBuilder()
* ...
* .build();
* MyCallResponse response = service.myCall(null, request);
* </pre></blockquote></div>
*
* @param the server name to which the endpoint call is made
* @return A RegionServerCoprocessorRpcChannel instance
*/
CoprocessorRpcChannel coprocessorService(ServerName sn);
} }

View File

@ -18,6 +18,7 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;
@ -68,6 +69,7 @@ import org.apache.hadoop.hbase.exceptions.MergeRegionException;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.ipc.MasterCoprocessorRpcChannel; import org.apache.hadoop.hbase.ipc.MasterCoprocessorRpcChannel;
import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController; import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController;
import org.apache.hadoop.hbase.ipc.RegionServerCoprocessorRpcChannel;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.RequestConverter;
import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.protobuf.ResponseConverter;
@ -3688,4 +3690,32 @@ public class HBaseAdmin implements Admin {
return true; return true;
} }
} }
/**
* Creates and returns a {@link com.google.protobuf.RpcChannel} instance
* connected to the passed region server.
*
* <p>
* The obtained {@link com.google.protobuf.RpcChannel} instance can be used to access a published
* coprocessor {@link com.google.protobuf.Service} using standard protobuf service invocations:
* </p>
*
* <div style="background-color: #cccccc; padding: 2px">
* <blockquote><pre>
* CoprocessorRpcChannel channel = myAdmin.coprocessorService(serverName);
* MyService.BlockingInterface service = MyService.newBlockingStub(channel);
* MyCallRequest request = MyCallRequest.newBuilder()
* ...
* .build();
* MyCallResponse response = service.myCall(null, request);
* </pre></blockquote></div>
*
* @param the server name to which the endpoint call is made
* @return A RegionServerCoprocessorRpcChannel instance
*/
@Override
public CoprocessorRpcChannel coprocessorService(ServerName sn) {
return new RegionServerCoprocessorRpcChannel(connection, sn);
}
} }

View File

@ -0,0 +1,75 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
* law or agreed to in writing, software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License
* for the specific language governing permissions and limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import java.io.IOException;
import com.google.protobuf.HBaseZeroCopyByteString;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
/**
* Provides clients with an RPC connection to call coprocessor endpoint
* {@link com.google.protobuf.Service}s against a given region server. An instance of this class may
* be obtained by calling {@link org.apache.hadoop.hbase.client.HBaseAdmin#coprocessorService(ServerName)},
* but should normally only be used in creating a new {@link com.google.protobuf.Service} stub to
* call the endpoint methods.
* @see org.apache.hadoop.hbase.client.HBaseAdmin#coprocessorService(ServerName)
*/
@InterfaceAudience.Private
public class RegionServerCoprocessorRpcChannel extends CoprocessorRpcChannel {
private static Log LOG = LogFactory.getLog(RegionServerCoprocessorRpcChannel.class);
private final ClusterConnection connection;
private final ServerName serverName;
public RegionServerCoprocessorRpcChannel(ClusterConnection conn, ServerName serverName) {
this.connection = conn;
this.serverName = serverName;
}
@Override
protected Message callExecService(Descriptors.MethodDescriptor method, Message request,
Message responsePrototype) throws IOException {
if (LOG.isTraceEnabled()) {
LOG.trace("Call: " + method.getName() + ", " + request.toString());
}
final ClientProtos.CoprocessorServiceCall call =
ClientProtos.CoprocessorServiceCall.newBuilder()
.setRow(HBaseZeroCopyByteString.wrap(HConstants.EMPTY_BYTE_ARRAY))
.setServiceName(method.getService().getFullName()).setMethodName(method.getName())
.setRequest(request.toByteString()).build();
CoprocessorServiceResponse result =
ProtobufUtil.execRegionServerService(connection.getClient(serverName), call);
Message response = null;
if (result.getValue().hasValue()) {
response =
responsePrototype.newBuilderForType().mergeFrom(result.getValue().getValue()).build();
} else {
response = responsePrototype.getDefaultInstanceForType();
}
if (LOG.isTraceEnabled()) {
LOG.trace("Result is value=" + response);
}
return response;
}
}

View File

@ -1621,6 +1621,31 @@ public final class ProtobufUtil {
} }
} }
/**
* Make a region server endpoint call
* @param client
* @param call
* @return
* @throws IOException
*/
public static CoprocessorServiceResponse execRegionServerService(
final ClientService.BlockingInterface client, final CoprocessorServiceCall call)
throws IOException {
CoprocessorServiceRequest request =
CoprocessorServiceRequest
.newBuilder()
.setCall(call)
.setRegion(
RequestConverter.buildRegionSpecifier(REGION_NAME, HConstants.EMPTY_BYTE_ARRAY))
.build();
try {
CoprocessorServiceResponse response = client.execRegionServerService(null, request);
return response;
} catch (ServiceException se) {
throw getRemoteException(se);
}
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static <T extends Service> T newServiceStub(Class<T> service, RpcChannel channel) public static <T extends Service> T newServiceStub(Class<T> service, RpcChannel channel)
throws Exception { throws Exception {

View File

@ -492,6 +492,12 @@ public class TestClientNoCluster extends Configured implements Tool {
this.multiInvocationsCount.decrementAndGet(); this.multiInvocationsCount.decrementAndGet();
} }
} }
@Override
public CoprocessorServiceResponse execRegionServerService(RpcController controller,
CoprocessorServiceRequest request) throws ServiceException {
throw new NotImplementedException();
}
} }
static ScanResponse doMetaScanResponse(final SortedMap<byte [], Pair<HRegionInfo, ServerName>> meta, static ScanResponse doMetaScanResponse(final SortedMap<byte [], Pair<HRegionInfo, ServerName>> meta,

View File

@ -30331,6 +30331,14 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done); com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
/**
* <code>rpc ExecRegionServerService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
*/
public abstract void execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
/** /**
* <code>rpc Multi(.MultiRequest) returns (.MultiResponse);</code> * <code>rpc Multi(.MultiRequest) returns (.MultiResponse);</code>
*/ */
@ -30384,6 +30392,14 @@ public final class ClientProtos {
impl.execService(controller, request, done); impl.execService(controller, request, done);
} }
@java.lang.Override
public void execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
impl.execRegionServerService(controller, request, done);
}
@java.lang.Override @java.lang.Override
public void multi( public void multi(
com.google.protobuf.RpcController controller, com.google.protobuf.RpcController controller,
@ -30425,6 +30441,8 @@ public final class ClientProtos {
case 4: case 4:
return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
case 5: case 5:
return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
case 6:
return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request); return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request);
default: default:
throw new java.lang.AssertionError("Can't get here."); throw new java.lang.AssertionError("Can't get here.");
@ -30451,6 +30469,8 @@ public final class ClientProtos {
case 4: case 4:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 5: case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 6:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
default: default:
throw new java.lang.AssertionError("Can't get here."); throw new java.lang.AssertionError("Can't get here.");
@ -30477,6 +30497,8 @@ public final class ClientProtos {
case 4: case 4:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 5: case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 6:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
default: default:
throw new java.lang.AssertionError("Can't get here."); throw new java.lang.AssertionError("Can't get here.");
@ -30526,6 +30548,14 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done); com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
/**
* <code>rpc ExecRegionServerService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
*/
public abstract void execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
/** /**
* <code>rpc Multi(.MultiRequest) returns (.MultiResponse);</code> * <code>rpc Multi(.MultiRequest) returns (.MultiResponse);</code>
*/ */
@ -30582,6 +30612,11 @@ public final class ClientProtos {
done)); done));
return; return;
case 5: case 5:
this.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
done));
return;
case 6:
this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request, this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse>specializeCallback( com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse>specializeCallback(
done)); done));
@ -30611,6 +30646,8 @@ public final class ClientProtos {
case 4: case 4:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 5: case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 6:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
default: default:
throw new java.lang.AssertionError("Can't get here."); throw new java.lang.AssertionError("Can't get here.");
@ -30637,6 +30674,8 @@ public final class ClientProtos {
case 4: case 4:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 5: case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 6:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
default: default:
throw new java.lang.AssertionError("Can't get here."); throw new java.lang.AssertionError("Can't get here.");
@ -30734,12 +30773,27 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance())); org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
} }
public void execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(5),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
}
public void multi( public void multi(
com.google.protobuf.RpcController controller, com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) { com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) {
channel.callMethod( channel.callMethod(
getDescriptor().getMethods().get(5), getDescriptor().getMethods().get(6),
controller, controller,
request, request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(), org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(),
@ -30781,6 +30835,11 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request) org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
throws com.google.protobuf.ServiceException; throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
com.google.protobuf.RpcController controller, com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
@ -30854,12 +30913,24 @@ public final class ClientProtos {
} }
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(5),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi( public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
com.google.protobuf.RpcController controller, com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
throws com.google.protobuf.ServiceException { throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod( return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(5), getDescriptor().getMethods().get(6),
controller, controller,
request, request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()); org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance());
@ -31121,17 +31192,19 @@ public final class ClientProtos {
"ondition\030\003 \001(\0132\n.Condition\"S\n\rMultiRespo" + "ondition\030\003 \001(\0132\n.Condition\"S\n\rMultiRespo" +
"nse\022/\n\022regionActionResult\030\001 \003(\0132\023.Region" + "nse\022/\n\022regionActionResult\030\001 \003(\0132\023.Region" +
"ActionResult\022\021\n\tprocessed\030\002 \001(\010*\'\n\013Consi" + "ActionResult\022\021\n\tprocessed\030\002 \001(\010*\'\n\013Consi" +
"stency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\0012\261\002\n\rCli" + "stency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\0012\205\003\n\rCli" +
"entService\022 \n\003Get\022\013.GetRequest\032\014.GetResp" + "entService\022 \n\003Get\022\013.GetRequest\032\014.GetResp" +
"onse\022)\n\006Mutate\022\016.MutateRequest\032\017.MutateR", "onse\022)\n\006Mutate\022\016.MutateRequest\032\017.MutateR",
"esponse\022#\n\004Scan\022\014.ScanRequest\032\r.ScanResp" + "esponse\022#\n\004Scan\022\014.ScanRequest\032\r.ScanResp" +
"onse\022>\n\rBulkLoadHFile\022\025.BulkLoadHFileReq" + "onse\022>\n\rBulkLoadHFile\022\025.BulkLoadHFileReq" +
"uest\032\026.BulkLoadHFileResponse\022F\n\013ExecServ" + "uest\032\026.BulkLoadHFileResponse\022F\n\013ExecServ" +
"ice\022\032.CoprocessorServiceRequest\032\033.Coproc" + "ice\022\032.CoprocessorServiceRequest\032\033.Coproc" +
"essorServiceResponse\022&\n\005Multi\022\r.MultiReq" + "essorServiceResponse\022R\n\027ExecRegionServer" +
"uest\032\016.MultiResponseBB\n*org.apache.hadoo" + "Service\022\032.CoprocessorServiceRequest\032\033.Co" +
"p.hbase.protobuf.generatedB\014ClientProtos" + "processorServiceResponse\022&\n\005Multi\022\r.Mult" +
"H\001\210\001\001\240\001\001" "iRequest\032\016.MultiResponseBB\n*org.apache.h" +
"adoop.hbase.protobuf.generatedB\014ClientPr" +
"otosH\001\210\001\001\240\001\001"
}; };
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {

View File

@ -412,6 +412,9 @@ service ClientService {
rpc ExecService(CoprocessorServiceRequest) rpc ExecService(CoprocessorServiceRequest)
returns(CoprocessorServiceResponse); returns(CoprocessorServiceResponse);
rpc ExecRegionServerService(CoprocessorServiceRequest)
returns(CoprocessorServiceResponse);
rpc Multi(MultiRequest) rpc Multi(MultiRequest)
returns(MultiResponse); returns(MultiResponse);

View File

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.coprocessor;
import com.google.protobuf.Service;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
/**
* Coprocessor endpoints registered once per server and providing protobuf services should implement
* this interface and return the {@link Service} instance via {@link #getService()}.
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving
public interface SingletonCoprocessorService {
Service getService();
}

View File

@ -80,6 +80,7 @@ import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.exceptions.RegionMovedException; import org.apache.hadoop.hbase.exceptions.RegionMovedException;
import org.apache.hadoop.hbase.exceptions.RegionOpeningException; import org.apache.hadoop.hbase.exceptions.RegionOpeningException;
import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
import org.apache.hadoop.hbase.executor.ExecutorService; import org.apache.hadoop.hbase.executor.ExecutorService;
import org.apache.hadoop.hbase.executor.ExecutorType; import org.apache.hadoop.hbase.executor.ExecutorType;
import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.fs.HFileSystem;
@ -88,12 +89,17 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.ipc.RpcClient; import org.apache.hadoop.hbase.ipc.RpcClient;
import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.RpcServerInterface;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.RegionState.State; import org.apache.hadoop.hbase.master.RegionState.State;
import org.apache.hadoop.hbase.master.TableLockManager; import org.apache.hadoop.hbase.master.TableLockManager;
import org.apache.hadoop.hbase.procedure.RegionServerProcedureManagerHost; import org.apache.hadoop.hbase.procedure.RegionServerProcedureManagerHost;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.RequestConverter;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos; import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad; import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor;
@ -151,7 +157,13 @@ import org.apache.zookeeper.data.Stat;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import com.google.protobuf.BlockingRpcChannel; import com.google.protobuf.BlockingRpcChannel;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException; import com.google.protobuf.ServiceException;
/** /**
@ -355,6 +367,8 @@ public class HRegionServer extends HasThread implements
/** The nonce manager chore. */ /** The nonce manager chore. */
private Chore nonceManagerChore; private Chore nonceManagerChore;
private Map<String, Service> coprocessorServiceHandlers = Maps.newHashMap();
/** /**
* The server name the Master sees us as. Its made from the hostname the * The server name the Master sees us as. Its made from the hostname the
* master passes us, port, and server startcode. Gets set after registration * master passes us, port, and server startcode. Gets set after registration
@ -559,6 +573,25 @@ public class HRegionServer extends HasThread implements
protected void doMetrics() { protected void doMetrics() {
} }
@Override
public boolean registerService(Service instance) {
/*
* No stacking of instances is allowed for a single service name
*/
Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType();
if (coprocessorServiceHandlers.containsKey(serviceDesc.getFullName())) {
LOG.error("Coprocessor service " + serviceDesc.getFullName()
+ " already registered, rejecting request from " + instance);
return false;
}
coprocessorServiceHandlers.put(serviceDesc.getFullName(), instance);
if (LOG.isDebugEnabled()) {
LOG.debug("Registered regionserver coprocessor service: service=" + serviceDesc.getFullName());
}
return true;
}
/** /**
* Create wrapped short-circuit connection to this server. * Create wrapped short-circuit connection to this server.
* In its own method so can intercept and mock it over in tests. * In its own method so can intercept and mock it over in tests.
@ -568,7 +601,7 @@ public class HRegionServer extends HasThread implements
return ConnectionUtils.createShortCircuitHConnection( return ConnectionUtils.createShortCircuitHConnection(
HConnectionManager.getConnection(conf), serverName, rpcServices, rpcServices); HConnectionManager.getConnection(conf), serverName, rpcServices, rpcServices);
} }
/** /**
* Run test on configured codecs to make sure supporting libs are in place. * Run test on configured codecs to make sure supporting libs are in place.
* @param c * @param c
@ -2993,7 +3026,54 @@ public class HRegionServer extends HasThread implements
} }
return result; return result;
} }
public CoprocessorServiceResponse execRegionServerService(final RpcController controller,
final CoprocessorServiceRequest serviceRequest) throws ServiceException {
try {
ServerRpcController execController = new ServerRpcController();
CoprocessorServiceCall call = serviceRequest.getCall();
String serviceName = call.getServiceName();
String methodName = call.getMethodName();
if (!coprocessorServiceHandlers.containsKey(serviceName)) {
throw new UnknownProtocolException(null,
"No registered coprocessor service found for name " + serviceName);
}
Service service = coprocessorServiceHandlers.get(serviceName);
Descriptors.ServiceDescriptor serviceDesc = service.getDescriptorForType();
Descriptors.MethodDescriptor methodDesc = serviceDesc.findMethodByName(methodName);
if (methodDesc == null) {
throw new UnknownProtocolException(service.getClass(), "Unknown method " + methodName
+ " called on service " + serviceName);
}
Message request =
service.getRequestPrototype(methodDesc).newBuilderForType().mergeFrom(call.getRequest())
.build();
final Message.Builder responseBuilder =
service.getResponsePrototype(methodDesc).newBuilderForType();
service.callMethod(methodDesc, controller, request, new RpcCallback<Message>() {
@Override
public void run(Message message) {
if (message != null) {
responseBuilder.mergeFrom(message);
}
}
});
Message execResult = responseBuilder.build();
if (execController.getFailedOn() != null) {
throw execController.getFailedOn();
}
ClientProtos.CoprocessorServiceResponse.Builder builder =
ClientProtos.CoprocessorServiceResponse.newBuilder();
builder.setRegion(RequestConverter.buildRegionSpecifier(RegionSpecifierType.REGION_NAME,
HConstants.EMPTY_BYTE_ARRAY));
builder.setValue(builder.getValueBuilder().setName(execResult.getClass().getName())
.setValue(execResult.toByteString()));
return builder.build();
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
/** /**
* @return The cache config instance used by the regionserver. * @return The cache config instance used by the regionserver.
*/ */

View File

@ -2176,4 +2176,10 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
} }
} }
} }
@Override
public CoprocessorServiceResponse execRegionServerService(RpcController controller,
CoprocessorServiceRequest request) throws ServiceException {
return regionServer.execRegionServerService(controller, request);
}
} }

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionServerObserver; import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
import org.apache.hadoop.hbase.coprocessor.SingletonCoprocessorService;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@ -243,6 +244,12 @@ public class RegionServerCoprocessorHost extends
final Configuration conf, final RegionServerServices services) { final Configuration conf, final RegionServerServices services) {
super(impl, priority, seq, conf); super(impl, priority, seq, conf);
this.regionServerServices = services; this.regionServerServices = services;
for (Class c : implClass.getInterfaces()) {
if (SingletonCoprocessorService.class.isAssignableFrom(c)) {
this.regionServerServices.registerService(((SingletonCoprocessorService) impl).getService());
break;
}
}
} }
@Override @Override

View File

@ -18,6 +18,8 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import com.google.protobuf.Service;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -137,4 +139,14 @@ public interface RegionServerServices
* @return all the online tables in this RS * @return all the online tables in this RS
*/ */
Set<TableName> getOnlineTables(); Set<TableName> getOnlineTables();
/**
* Registers a new protocol buffer {@link Service} subclass as a coprocessor endpoint to be
* available for handling
* @param instance the {@code Service} subclass instance to expose as a coprocessor endpoint
* @return {@code true} if the registration was successful, {@code false}
*/
boolean registerService(Service service);
} }

View File

@ -48,6 +48,8 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import com.google.protobuf.Service;
/** /**
* Basic mock region server services. Should only be instantiated by HBaseTestingUtility.b * Basic mock region server services. Should only be instantiated by HBaseTestingUtility.b
*/ */
@ -258,4 +260,10 @@ class MockRegionServerServices implements RegionServerServices {
HRegionInfo... hris) { HRegionInfo... hris) {
return false; return false;
} }
@Override
public boolean registerService(Service service) {
// TODO Auto-generated method stub
return false;
}
} }

View File

@ -0,0 +1,108 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.coprocessor;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyService;
import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
@Category({CoprocessorTests.class, MediumTests.class})
public class TestRegionServerCoprocessorEndpoint {
private static HBaseTestingUtility TEST_UTIL = null;
private static Configuration CONF = null;
private static final String DUMMY_VALUE = "val";
@BeforeClass
public static void setupBeforeClass() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
CONF = TEST_UTIL.getConfiguration();
CONF.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY,
DummyRegionServerEndpoint.class.getName());
TEST_UTIL.startMiniCluster();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@Test
public void testEndpoint() throws Exception {
final ServerName serverName = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
final ServerRpcController controller = new ServerRpcController();
final BlockingRpcCallback<DummyRegionServerEndpointProtos.DummyResponse> rpcCallback =
new BlockingRpcCallback<DummyRegionServerEndpointProtos.DummyResponse>();
DummyRegionServerEndpointProtos.DummyService service =
ProtobufUtil.newServiceStub(DummyRegionServerEndpointProtos.DummyService.class,
new HBaseAdmin(CONF).coprocessorService(serverName));
service.dummyCall(controller,
DummyRegionServerEndpointProtos.DummyRequest.getDefaultInstance(), rpcCallback);
assertEquals(DUMMY_VALUE, rpcCallback.get().getValue());
if (controller.failedOnException()) {
throw controller.getFailedOn();
}
}
static class DummyRegionServerEndpoint extends DummyService implements Coprocessor, SingletonCoprocessorService {
@Override
public Service getService() {
return this;
}
@Override
public void start(CoprocessorEnvironment env) throws IOException {
// TODO Auto-generated method stub
}
@Override
public void stop(CoprocessorEnvironment env) throws IOException {
// TODO Auto-generated method stub
}
@Override
public void dummyCall(RpcController controller, DummyRequest request,
RpcCallback<DummyResponse> callback) {
callback.run(DummyResponse.newBuilder().setValue(DUMMY_VALUE).build());
}
}
}

View File

@ -78,6 +78,8 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodes
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest;
@ -101,6 +103,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import com.google.protobuf.RpcController; import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException; import com.google.protobuf.ServiceException;
/** /**
@ -589,4 +592,17 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices {
HRegionInfo... hris) { HRegionInfo... hris) {
return false; return false;
} }
@Override
public boolean registerService(Service service) {
// TODO Auto-generated method stub
return false;
}
@Override
public CoprocessorServiceResponse execRegionServerService(RpcController controller,
CoprocessorServiceRequest request) throws ServiceException {
// TODO Auto-generated method stub
return null;
}
} }

View File

@ -0,0 +1,33 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Coprocessor test
option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated";
option java_outer_classname = "DummyRegionServerEndpointProtos";
option java_generic_services = true;
option java_generate_equals_and_hash = true;
message DummyRequest {
}
message DummyResponse {
required string value = 1;
}
service DummyService {
rpc dummyCall(DummyRequest) returns(DummyResponse);
}