HBASE-15741 Provide backward compatibility for HBase coprocessor service names

This commit is contained in:
Gary Helmling 2016-05-02 13:22:43 -07:00
parent 12da71b824
commit b36222831d
10 changed files with 185 additions and 70 deletions

View File

@ -0,0 +1,72 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.hbase.ipc;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
/**
* Utilities for handling coprocessor service calls.
*/
@InterfaceAudience.Private
public final class CoprocessorRpcUtils {
/**
* We assume that all HBase protobuf services share a common package name
* (defined in the .proto files).
*/
private static String hbaseServicePackage;
static {
Descriptors.ServiceDescriptor clientService = ClientProtos.ClientService.getDescriptor();
hbaseServicePackage = clientService.getFullName()
.substring(0, clientService.getFullName().lastIndexOf(clientService.getName()));
}
private CoprocessorRpcUtils() {
// private for utility class
}
/**
* Returns the name to use for coprocessor service calls. For core HBase services
* (in the hbase.pb protobuf package), this returns the unqualified name in order to provide
* backward compatibility across the package name change. For all other services,
* the fully-qualified service name is used.
*/
public static String getServiceName(Descriptors.ServiceDescriptor service) {
if (service.getFullName().startsWith(hbaseServicePackage)) {
return service.getName();
}
return service.getFullName();
}
/**
* Returns a service call instance for the given coprocessor request.
*/
public static ClientProtos.CoprocessorServiceCall buildServiceCall(byte[] row,
Descriptors.MethodDescriptor method, Message request) {
return ClientProtos.CoprocessorServiceCall.newBuilder()
.setRow(ByteStringer.wrap(row))
.setServiceName(CoprocessorRpcUtils.getServiceName(method.getService()))
.setMethodName(method.getName())
.setRequest(request.toByteString()).build();
}
}

View File

@ -62,11 +62,7 @@ public class MasterCoprocessorRpcChannel extends CoprocessorRpcChannel{
} }
final ClientProtos.CoprocessorServiceCall call = final ClientProtos.CoprocessorServiceCall call =
ClientProtos.CoprocessorServiceCall.newBuilder() CoprocessorRpcUtils.buildServiceCall(HConstants.EMPTY_BYTE_ARRAY, method, request);
.setRow(ByteStringer.wrap(HConstants.EMPTY_BYTE_ARRAY))
.setServiceName(method.getService().getFullName())
.setMethodName(method.getName())
.setRequest(request.toByteString()).build();
// TODO: Are we retrying here? Does not seem so. We should use RetryingRpcCaller // TODO: Are we retrying here? Does not seem so. We should use RetryingRpcCaller
CoprocessorServiceResponse result = ProtobufUtil.execService(controller, CoprocessorServiceResponse result = ProtobufUtil.execService(controller,

View File

@ -85,11 +85,7 @@ public class RegionCoprocessorRpcChannel extends CoprocessorRpcChannel{
? rpcControllerFactory.newController() : controller; ? rpcControllerFactory.newController() : controller;
final ClientProtos.CoprocessorServiceCall call = final ClientProtos.CoprocessorServiceCall call =
ClientProtos.CoprocessorServiceCall.newBuilder() CoprocessorRpcUtils.buildServiceCall(row, method, request);
.setRow(ByteStringer.wrap(row))
.setServiceName(method.getService().getFullName())
.setMethodName(method.getName())
.setRequest(request.toByteString()).build();
RegionServerCallable<CoprocessorServiceResponse> callable = RegionServerCallable<CoprocessorServiceResponse> callable =
new RegionServerCallable<CoprocessorServiceResponse>(connection, table, row) { new RegionServerCallable<CoprocessorServiceResponse>(connection, table, row) {
@Override @Override

View File

@ -22,7 +22,6 @@ import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import org.apache.hadoop.hbase.util.ByteStringer;
import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors;
@ -56,10 +55,7 @@ public class RegionServerCoprocessorRpcChannel extends CoprocessorRpcChannel {
LOG.trace("Call: " + method.getName() + ", " + request.toString()); LOG.trace("Call: " + method.getName() + ", " + request.toString());
} }
final ClientProtos.CoprocessorServiceCall call = final ClientProtos.CoprocessorServiceCall call =
ClientProtos.CoprocessorServiceCall.newBuilder() CoprocessorRpcUtils.buildServiceCall(HConstants.EMPTY_BYTE_ARRAY, method, request);
.setRow(ByteStringer.wrap(HConstants.EMPTY_BYTE_ARRAY))
.setServiceName(method.getService().getFullName()).setMethodName(method.getName())
.setRequest(request.toByteString()).build();
// TODO: Are we retrying here? Does not seem so. We should use RetryingRpcCaller // TODO: Are we retrying here? Does not seem so. We should use RetryingRpcCaller
CoprocessorServiceResponse result = CoprocessorServiceResponse result =

View File

@ -84,6 +84,7 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.executor.ExecutorType; import org.apache.hadoop.hbase.executor.ExecutorType;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode; import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
@ -2428,16 +2429,17 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
* No stacking of instances is allowed for a single service name * No stacking of instances is allowed for a single service name
*/ */
Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType(); Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType();
if (coprocessorServiceHandlers.containsKey(serviceDesc.getFullName())) { String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);
LOG.error("Coprocessor service "+serviceDesc.getFullName()+ if (coprocessorServiceHandlers.containsKey(serviceName)) {
LOG.error("Coprocessor service "+serviceName+
" already registered, rejecting request from "+instance " already registered, rejecting request from "+instance
); );
return false; return false;
} }
coprocessorServiceHandlers.put(serviceDesc.getFullName(), instance); coprocessorServiceHandlers.put(serviceName, instance);
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Registered master coprocessor service: service="+serviceDesc.getFullName()); LOG.debug("Registered master coprocessor service: service="+serviceName);
} }
return true; return true;
} }

View File

@ -123,6 +123,7 @@ import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.ipc.CallerDisconnectedException; import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcCallContext; import org.apache.hadoop.hbase.ipc.RpcCallContext;
import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.MonitoredTask;
@ -7962,18 +7963,19 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
* No stacking of instances is allowed for a single service name * No stacking of instances is allowed for a single service name
*/ */
Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType(); Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType();
if (coprocessorServiceHandlers.containsKey(serviceDesc.getFullName())) { String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);
LOG.error("Coprocessor service " + serviceDesc.getFullName() + if (coprocessorServiceHandlers.containsKey(serviceName)) {
LOG.error("Coprocessor service " + serviceName +
" already registered, rejecting request from " + instance " already registered, rejecting request from " + instance
); );
return false; return false;
} }
coprocessorServiceHandlers.put(serviceDesc.getFullName(), instance); coprocessorServiceHandlers.put(serviceName, instance);
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Registered coprocessor service: region=" + LOG.debug("Registered coprocessor service: region=" +
Bytes.toStringBinary(getRegionInfo().getRegionName()) + Bytes.toStringBinary(getRegionInfo().getRegionName()) +
" service=" + serviceDesc.getFullName()); " service=" + serviceName);
} }
return true; return true;
} }

View File

@ -96,6 +96,7 @@ import org.apache.hadoop.hbase.executor.ExecutorType;
import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.http.InfoServer; import org.apache.hadoop.hbase.http.InfoServer;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcClient; import org.apache.hadoop.hbase.ipc.RpcClient;
import org.apache.hadoop.hbase.ipc.RpcClientFactory; import org.apache.hadoop.hbase.ipc.RpcClientFactory;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
@ -686,15 +687,16 @@ public class HRegionServer extends HasThread implements
* No stacking of instances is allowed for a single service name * No stacking of instances is allowed for a single service name
*/ */
Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType(); Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType();
if (coprocessorServiceHandlers.containsKey(serviceDesc.getFullName())) { String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);
LOG.error("Coprocessor service " + serviceDesc.getFullName() if (coprocessorServiceHandlers.containsKey(serviceName)) {
LOG.error("Coprocessor service " + serviceName
+ " already registered, rejecting request from " + instance); + " already registered, rejecting request from " + instance);
return false; return false;
} }
coprocessorServiceHandlers.put(serviceDesc.getFullName(), instance); coprocessorServiceHandlers.put(serviceName, instance);
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Registered regionserver coprocessor service: service="+serviceDesc.getFullName()); LOG.debug("Registered regionserver coprocessor service: service=" + serviceName);
} }
return true; return true;
} }

View File

@ -1,5 +1,5 @@
// Generated by the protocol buffer compiler. DO NOT EDIT! // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: DummyRegionServerEndpoint.proto // source: hbase-server/src/test/protobuf/DummyRegionServerEndpoint.proto
package org.apache.hadoop.hbase.coprocessor.protobuf.generated; package org.apache.hadoop.hbase.coprocessor.protobuf.generated;
@ -12,7 +12,7 @@ public final class DummyRegionServerEndpointProtos {
extends com.google.protobuf.MessageOrBuilder { extends com.google.protobuf.MessageOrBuilder {
} }
/** /**
* Protobuf type {@code DummyRequest} * Protobuf type {@code hbase.test.pb.DummyRequest}
*/ */
public static final class DummyRequest extends public static final class DummyRequest extends
com.google.protobuf.GeneratedMessage com.google.protobuf.GeneratedMessage
@ -75,12 +75,12 @@ public final class DummyRegionServerEndpointProtos {
} }
public static final com.google.protobuf.Descriptors.Descriptor public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() { getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyRequest_descriptor; return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_descriptor;
} }
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() { internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyRequest_fieldAccessorTable return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized( .ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.Builder.class); org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.Builder.class);
} }
@ -231,19 +231,19 @@ public final class DummyRegionServerEndpointProtos {
return builder; return builder;
} }
/** /**
* Protobuf type {@code DummyRequest} * Protobuf type {@code hbase.test.pb.DummyRequest}
*/ */
public static final class Builder extends public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequestOrBuilder { implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() { getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyRequest_descriptor; return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_descriptor;
} }
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() { internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyRequest_fieldAccessorTable return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized( .ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.Builder.class); org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.Builder.class);
} }
@ -277,7 +277,7 @@ public final class DummyRegionServerEndpointProtos {
public com.google.protobuf.Descriptors.Descriptor public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() { getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyRequest_descriptor; return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_descriptor;
} }
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest getDefaultInstanceForType() { public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest getDefaultInstanceForType() {
@ -335,7 +335,7 @@ public final class DummyRegionServerEndpointProtos {
return this; return this;
} }
// @@protoc_insertion_point(builder_scope:DummyRequest) // @@protoc_insertion_point(builder_scope:hbase.test.pb.DummyRequest)
} }
static { static {
@ -343,7 +343,7 @@ public final class DummyRegionServerEndpointProtos {
defaultInstance.initFields(); defaultInstance.initFields();
} }
// @@protoc_insertion_point(class_scope:DummyRequest) // @@protoc_insertion_point(class_scope:hbase.test.pb.DummyRequest)
} }
public interface DummyResponseOrBuilder public interface DummyResponseOrBuilder
@ -365,7 +365,7 @@ public final class DummyRegionServerEndpointProtos {
getValueBytes(); getValueBytes();
} }
/** /**
* Protobuf type {@code DummyResponse} * Protobuf type {@code hbase.test.pb.DummyResponse}
*/ */
public static final class DummyResponse extends public static final class DummyResponse extends
com.google.protobuf.GeneratedMessage com.google.protobuf.GeneratedMessage
@ -434,12 +434,12 @@ public final class DummyRegionServerEndpointProtos {
} }
public static final com.google.protobuf.Descriptors.Descriptor public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() { getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyResponse_descriptor; return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_descriptor;
} }
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() { internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyResponse_fieldAccessorTable return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized( .ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.Builder.class); org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.Builder.class);
} }
@ -655,19 +655,19 @@ public final class DummyRegionServerEndpointProtos {
return builder; return builder;
} }
/** /**
* Protobuf type {@code DummyResponse} * Protobuf type {@code hbase.test.pb.DummyResponse}
*/ */
public static final class Builder extends public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponseOrBuilder { implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() { getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyResponse_descriptor; return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_descriptor;
} }
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() { internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyResponse_fieldAccessorTable return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized( .ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.Builder.class); org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.Builder.class);
} }
@ -703,7 +703,7 @@ public final class DummyRegionServerEndpointProtos {
public com.google.protobuf.Descriptors.Descriptor public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() { getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyResponse_descriptor; return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_descriptor;
} }
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse getDefaultInstanceForType() { public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse getDefaultInstanceForType() {
@ -852,7 +852,7 @@ public final class DummyRegionServerEndpointProtos {
return this; return this;
} }
// @@protoc_insertion_point(builder_scope:DummyResponse) // @@protoc_insertion_point(builder_scope:hbase.test.pb.DummyResponse)
} }
static { static {
@ -860,11 +860,11 @@ public final class DummyRegionServerEndpointProtos {
defaultInstance.initFields(); defaultInstance.initFields();
} }
// @@protoc_insertion_point(class_scope:DummyResponse) // @@protoc_insertion_point(class_scope:hbase.test.pb.DummyResponse)
} }
/** /**
* Protobuf service {@code DummyService} * Protobuf service {@code hbase.test.pb.DummyService}
*/ */
public static abstract class DummyService public static abstract class DummyService
implements com.google.protobuf.Service { implements com.google.protobuf.Service {
@ -872,7 +872,7 @@ public final class DummyRegionServerEndpointProtos {
public interface Interface { public interface Interface {
/** /**
* <code>rpc dummyCall(.DummyRequest) returns (.DummyResponse);</code> * <code>rpc dummyCall(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);</code>
*/ */
public abstract void dummyCall( public abstract void dummyCall(
com.google.protobuf.RpcController controller, com.google.protobuf.RpcController controller,
@ -880,7 +880,7 @@ public final class DummyRegionServerEndpointProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse> done); com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse> done);
/** /**
* <code>rpc dummyThrow(.DummyRequest) returns (.DummyResponse);</code> * <code>rpc dummyThrow(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);</code>
*/ */
public abstract void dummyThrow( public abstract void dummyThrow(
com.google.protobuf.RpcController controller, com.google.protobuf.RpcController controller,
@ -979,7 +979,7 @@ public final class DummyRegionServerEndpointProtos {
} }
/** /**
* <code>rpc dummyCall(.DummyRequest) returns (.DummyResponse);</code> * <code>rpc dummyCall(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);</code>
*/ */
public abstract void dummyCall( public abstract void dummyCall(
com.google.protobuf.RpcController controller, com.google.protobuf.RpcController controller,
@ -987,7 +987,7 @@ public final class DummyRegionServerEndpointProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse> done); com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse> done);
/** /**
* <code>rpc dummyThrow(.DummyRequest) returns (.DummyResponse);</code> * <code>rpc dummyThrow(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);</code>
*/ */
public abstract void dummyThrow( public abstract void dummyThrow(
com.google.protobuf.RpcController controller, com.google.protobuf.RpcController controller,
@ -1163,19 +1163,19 @@ public final class DummyRegionServerEndpointProtos {
} }
// @@protoc_insertion_point(class_scope:DummyService) // @@protoc_insertion_point(class_scope:hbase.test.pb.DummyService)
} }
private static com.google.protobuf.Descriptors.Descriptor private static com.google.protobuf.Descriptors.Descriptor
internal_static_DummyRequest_descriptor; internal_static_hbase_test_pb_DummyRequest_descriptor;
private static private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_DummyRequest_fieldAccessorTable; internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor private static com.google.protobuf.Descriptors.Descriptor
internal_static_DummyResponse_descriptor; internal_static_hbase_test_pb_DummyResponse_descriptor;
private static private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_DummyResponse_fieldAccessorTable; internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() { getDescriptor() {
@ -1185,31 +1185,33 @@ public final class DummyRegionServerEndpointProtos {
descriptor; descriptor;
static { static {
java.lang.String[] descriptorData = { java.lang.String[] descriptorData = {
"\n\037DummyRegionServerEndpoint.proto\"\016\n\014Dum" + "\n>hbase-server/src/test/protobuf/DummyRe" +
"myRequest\"\036\n\rDummyResponse\022\r\n\005value\030\001 \002(" + "gionServerEndpoint.proto\022\rhbase.test.pb\"" +
"\t2g\n\014DummyService\022*\n\tdummyCall\022\r.DummyRe" + "\016\n\014DummyRequest\"\036\n\rDummyResponse\022\r\n\005valu" +
"quest\032\016.DummyResponse\022+\n\ndummyThrow\022\r.Du" + "e\030\001 \002(\t2\237\001\n\014DummyService\022F\n\tdummyCall\022\033." +
"mmyRequest\032\016.DummyResponseB_\n6org.apache" + "hbase.test.pb.DummyRequest\032\034.hbase.test." +
".hadoop.hbase.coprocessor.protobuf.gener" + "pb.DummyResponse\022G\n\ndummyThrow\022\033.hbase.t" +
"atedB\037DummyRegionServerEndpointProtos\210\001\001" + "est.pb.DummyRequest\032\034.hbase.test.pb.Dumm" +
"\240\001\001" "yResponseB_\n6org.apache.hadoop.hbase.cop" +
"rocessor.protobuf.generatedB\037DummyRegion" +
"ServerEndpointProtos\210\001\001\240\001\001"
}; };
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors( public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) { com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root; descriptor = root;
internal_static_DummyRequest_descriptor = internal_static_hbase_test_pb_DummyRequest_descriptor =
getDescriptor().getMessageTypes().get(0); getDescriptor().getMessageTypes().get(0);
internal_static_DummyRequest_fieldAccessorTable = new internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable( com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_DummyRequest_descriptor, internal_static_hbase_test_pb_DummyRequest_descriptor,
new java.lang.String[] { }); new java.lang.String[] { });
internal_static_DummyResponse_descriptor = internal_static_hbase_test_pb_DummyResponse_descriptor =
getDescriptor().getMessageTypes().get(1); getDescriptor().getMessageTypes().get(1);
internal_static_DummyResponse_fieldAccessorTable = new internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable( com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_DummyResponse_descriptor, internal_static_hbase_test_pb_DummyResponse_descriptor,
new java.lang.String[] { "Value", }); new java.lang.String[] { "Value", });
return null; return null;
} }

View File

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import static org.junit.Assert.assertEquals;
import com.google.protobuf.Descriptors;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestCoprocessorRpcUtils {
@Test
public void testServiceName() throws Exception {
// verify that we de-namespace build in HBase rpc services
Descriptors.ServiceDescriptor authService =
AuthenticationProtos.AuthenticationService.getDescriptor();
assertEquals(authService.getName(), CoprocessorRpcUtils.getServiceName(authService));
// non-hbase rpc services should remain fully qualified
Descriptors.ServiceDescriptor dummyService =
DummyRegionServerEndpointProtos.DummyService.getDescriptor();
assertEquals(dummyService.getFullName(), CoprocessorRpcUtils.getServiceName(dummyService));
}
}

View File

@ -15,6 +15,9 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package hbase.test.pb;
// Coprocessor test // Coprocessor test
option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated"; option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated";
option java_outer_classname = "DummyRegionServerEndpointProtos"; option java_outer_classname = "DummyRegionServerEndpointProtos";