HBASE-15741 Provide backward compatibility for HBase coprocessor service names

This commit is contained in:
Gary Helmling 2016-05-02 13:22:43 -07:00
parent 29eb3e8bcc
commit d23d600d76
10 changed files with 185 additions and 72 deletions

View File

@ -0,0 +1,72 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.hbase.ipc;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
/**
* Utilities for handling coprocessor service calls.
*/
@InterfaceAudience.Private
public final class CoprocessorRpcUtils {
/**
* We assume that all HBase protobuf services share a common package name
* (defined in the .proto files).
*/
private static String hbaseServicePackage;
static {
Descriptors.ServiceDescriptor clientService = ClientProtos.ClientService.getDescriptor();
hbaseServicePackage = clientService.getFullName()
.substring(0, clientService.getFullName().lastIndexOf(clientService.getName()));
}
private CoprocessorRpcUtils() {
// private for utility class
}
/**
* Returns the name to use for coprocessor service calls. For core HBase services
* (in the hbase.pb protobuf package), this returns the unqualified name in order to provide
* backward compatibility across the package name change. For all other services,
* the fully-qualified service name is used.
*/
public static String getServiceName(Descriptors.ServiceDescriptor service) {
if (service.getFullName().startsWith(hbaseServicePackage)) {
return service.getName();
}
return service.getFullName();
}
/**
* Returns a service call instance for the given coprocessor request.
*/
public static ClientProtos.CoprocessorServiceCall buildServiceCall(byte[] row,
Descriptors.MethodDescriptor method, Message request) {
return ClientProtos.CoprocessorServiceCall.newBuilder()
.setRow(ByteStringer.wrap(row))
.setServiceName(CoprocessorRpcUtils.getServiceName(method.getService()))
.setMethodName(method.getName())
.setRequest(request.toByteString()).build();
}
}

View File

@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import org.apache.hadoop.hbase.util.ByteStringer;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
@ -61,11 +60,7 @@ public class MasterCoprocessorRpcChannel extends CoprocessorRpcChannel{
}
final ClientProtos.CoprocessorServiceCall call =
ClientProtos.CoprocessorServiceCall.newBuilder()
.setRow(ByteStringer.wrap(HConstants.EMPTY_BYTE_ARRAY))
.setServiceName(method.getService().getFullName())
.setMethodName(method.getName())
.setRequest(request.toByteString()).build();
CoprocessorRpcUtils.buildServiceCall(HConstants.EMPTY_BYTE_ARRAY, method, request);
// TODO: Are we retrying here? Does not seem so. We should use RetryingRpcCaller
CoprocessorServiceResponse result = ProtobufUtil.execService(controller,

View File

@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.protobuf.Descriptors;
@ -83,11 +82,7 @@ public class RegionCoprocessorRpcChannel extends CoprocessorRpcChannel{
? rpcControllerFactory.newController() : controller;
final ClientProtos.CoprocessorServiceCall call =
ClientProtos.CoprocessorServiceCall.newBuilder()
.setRow(ByteStringer.wrap(row))
.setServiceName(method.getService().getFullName())
.setMethodName(method.getName())
.setRequest(request.toByteString()).build();
CoprocessorRpcUtils.buildServiceCall(row, method, request);
RegionServerCallable<CoprocessorServiceResponse> callable =
new RegionServerCallable<CoprocessorServiceResponse>(connection, table, row) {
@Override

View File

@ -22,7 +22,6 @@ import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import org.apache.hadoop.hbase.util.ByteStringer;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
@ -55,10 +54,7 @@ public class RegionServerCoprocessorRpcChannel extends CoprocessorRpcChannel {
LOG.trace("Call: " + method.getName() + ", " + request.toString());
}
final ClientProtos.CoprocessorServiceCall call =
ClientProtos.CoprocessorServiceCall.newBuilder()
.setRow(ByteStringer.wrap(HConstants.EMPTY_BYTE_ARRAY))
.setServiceName(method.getService().getFullName()).setMethodName(method.getName())
.setRequest(request.toByteString()).build();
CoprocessorRpcUtils.buildServiceCall(HConstants.EMPTY_BYTE_ARRAY, method, request);
// TODO: Are we retrying here? Does not seem so. We should use RetryingRpcCaller
CoprocessorServiceResponse result =

View File

@ -91,6 +91,7 @@ import org.apache.hadoop.hbase.coprocessor.BypassCoprocessorException;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.executor.ExecutorType;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
@ -2419,16 +2420,17 @@ public class HMaster extends HRegionServer implements MasterServices {
* No stacking of instances is allowed for a single service name
*/
Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType();
if (coprocessorServiceHandlers.containsKey(serviceDesc.getFullName())) {
LOG.error("Coprocessor service "+serviceDesc.getFullName()+
String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);
if (coprocessorServiceHandlers.containsKey(serviceName)) {
LOG.error("Coprocessor service "+serviceName+
" already registered, rejecting request from "+instance
);
return false;
}
coprocessorServiceHandlers.put(serviceDesc.getFullName(), instance);
coprocessorServiceHandlers.put(serviceName, instance);
if (LOG.isDebugEnabled()) {
LOG.debug("Registered master coprocessor service: service="+serviceDesc.getFullName());
LOG.debug("Registered master coprocessor service: service="+serviceName);
}
return true;
}

View File

@ -123,6 +123,7 @@ import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcCallContext;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.mob.MobUtils;
@ -7428,18 +7429,19 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
* No stacking of instances is allowed for a single service name
*/
Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType();
if (coprocessorServiceHandlers.containsKey(serviceDesc.getFullName())) {
LOG.error("Coprocessor service " + serviceDesc.getFullName() +
String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);
if (coprocessorServiceHandlers.containsKey(serviceName)) {
LOG.error("Coprocessor service " + serviceName +
" already registered, rejecting request from " + instance
);
return false;
}
coprocessorServiceHandlers.put(serviceDesc.getFullName(), instance);
coprocessorServiceHandlers.put(serviceName, instance);
if (LOG.isDebugEnabled()) {
LOG.debug("Registered coprocessor service: region=" +
Bytes.toStringBinary(getRegionInfo().getRegionName()) +
" service=" + serviceDesc.getFullName());
" service=" + serviceName);
}
return true;
}

View File

@ -96,6 +96,7 @@ import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.http.InfoServer;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcClient;
import org.apache.hadoop.hbase.ipc.RpcClientFactory;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
@ -696,15 +697,16 @@ public class HRegionServer extends HasThread implements
* No stacking of instances is allowed for a single service name
*/
Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType();
if (coprocessorServiceHandlers.containsKey(serviceDesc.getFullName())) {
LOG.error("Coprocessor service " + serviceDesc.getFullName()
String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);
if (coprocessorServiceHandlers.containsKey(serviceName)) {
LOG.error("Coprocessor service " + serviceName
+ " already registered, rejecting request from " + instance);
return false;
}
coprocessorServiceHandlers.put(serviceDesc.getFullName(), instance);
coprocessorServiceHandlers.put(serviceName, instance);
if (LOG.isDebugEnabled()) {
LOG.debug("Registered regionserver coprocessor service: service=" + serviceDesc.getFullName());
LOG.debug("Registered regionserver coprocessor service: service=" + serviceName);
}
return true;
}

View File

@ -1,5 +1,5 @@
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: DummyRegionServerEndpoint.proto
// source: hbase-server/src/test/protobuf/DummyRegionServerEndpoint.proto
package org.apache.hadoop.hbase.coprocessor.protobuf.generated;
@ -12,7 +12,7 @@ public final class DummyRegionServerEndpointProtos {
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code DummyRequest}
* Protobuf type {@code hbase.test.pb.DummyRequest}
*/
public static final class DummyRequest extends
com.google.protobuf.GeneratedMessage
@ -75,12 +75,12 @@ public final class DummyRegionServerEndpointProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyRequest_descriptor;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyRequest_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.Builder.class);
}
@ -231,19 +231,19 @@ public final class DummyRegionServerEndpointProtos {
return builder;
}
/**
* Protobuf type {@code DummyRequest}
* Protobuf type {@code hbase.test.pb.DummyRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyRequest_descriptor;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyRequest_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.Builder.class);
}
@ -277,7 +277,7 @@ public final class DummyRegionServerEndpointProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyRequest_descriptor;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_descriptor;
}
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest getDefaultInstanceForType() {
@ -335,7 +335,7 @@ public final class DummyRegionServerEndpointProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:DummyRequest)
// @@protoc_insertion_point(builder_scope:hbase.test.pb.DummyRequest)
}
static {
@ -343,7 +343,7 @@ public final class DummyRegionServerEndpointProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:DummyRequest)
// @@protoc_insertion_point(class_scope:hbase.test.pb.DummyRequest)
}
public interface DummyResponseOrBuilder
@ -365,7 +365,7 @@ public final class DummyRegionServerEndpointProtos {
getValueBytes();
}
/**
* Protobuf type {@code DummyResponse}
* Protobuf type {@code hbase.test.pb.DummyResponse}
*/
public static final class DummyResponse extends
com.google.protobuf.GeneratedMessage
@ -434,12 +434,12 @@ public final class DummyRegionServerEndpointProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyResponse_descriptor;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyResponse_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.Builder.class);
}
@ -655,19 +655,19 @@ public final class DummyRegionServerEndpointProtos {
return builder;
}
/**
* Protobuf type {@code DummyResponse}
* Protobuf type {@code hbase.test.pb.DummyResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyResponse_descriptor;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyResponse_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.Builder.class);
}
@ -703,7 +703,7 @@ public final class DummyRegionServerEndpointProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_DummyResponse_descriptor;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_descriptor;
}
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse getDefaultInstanceForType() {
@ -852,7 +852,7 @@ public final class DummyRegionServerEndpointProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:DummyResponse)
// @@protoc_insertion_point(builder_scope:hbase.test.pb.DummyResponse)
}
static {
@ -860,11 +860,11 @@ public final class DummyRegionServerEndpointProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:DummyResponse)
// @@protoc_insertion_point(class_scope:hbase.test.pb.DummyResponse)
}
/**
* Protobuf service {@code DummyService}
* Protobuf service {@code hbase.test.pb.DummyService}
*/
public static abstract class DummyService
implements com.google.protobuf.Service {
@ -872,7 +872,7 @@ public final class DummyRegionServerEndpointProtos {
public interface Interface {
/**
* <code>rpc dummyCall(.DummyRequest) returns (.DummyResponse);</code>
* <code>rpc dummyCall(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);</code>
*/
public abstract void dummyCall(
com.google.protobuf.RpcController controller,
@ -880,7 +880,7 @@ public final class DummyRegionServerEndpointProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse> done);
/**
* <code>rpc dummyThrow(.DummyRequest) returns (.DummyResponse);</code>
* <code>rpc dummyThrow(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);</code>
*/
public abstract void dummyThrow(
com.google.protobuf.RpcController controller,
@ -979,7 +979,7 @@ public final class DummyRegionServerEndpointProtos {
}
/**
* <code>rpc dummyCall(.DummyRequest) returns (.DummyResponse);</code>
* <code>rpc dummyCall(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);</code>
*/
public abstract void dummyCall(
com.google.protobuf.RpcController controller,
@ -987,7 +987,7 @@ public final class DummyRegionServerEndpointProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse> done);
/**
* <code>rpc dummyThrow(.DummyRequest) returns (.DummyResponse);</code>
* <code>rpc dummyThrow(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);</code>
*/
public abstract void dummyThrow(
com.google.protobuf.RpcController controller,
@ -1163,19 +1163,19 @@ public final class DummyRegionServerEndpointProtos {
}
// @@protoc_insertion_point(class_scope:DummyService)
// @@protoc_insertion_point(class_scope:hbase.test.pb.DummyService)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_DummyRequest_descriptor;
internal_static_hbase_test_pb_DummyRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_DummyRequest_fieldAccessorTable;
internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_DummyResponse_descriptor;
internal_static_hbase_test_pb_DummyResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_DummyResponse_fieldAccessorTable;
internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -1185,31 +1185,33 @@ public final class DummyRegionServerEndpointProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\037DummyRegionServerEndpoint.proto\"\016\n\014Dum" +
"myRequest\"\036\n\rDummyResponse\022\r\n\005value\030\001 \002(" +
"\t2g\n\014DummyService\022*\n\tdummyCall\022\r.DummyRe" +
"quest\032\016.DummyResponse\022+\n\ndummyThrow\022\r.Du" +
"mmyRequest\032\016.DummyResponseB_\n6org.apache" +
".hadoop.hbase.coprocessor.protobuf.gener" +
"atedB\037DummyRegionServerEndpointProtos\210\001\001" +
"\240\001\001"
"\n>hbase-server/src/test/protobuf/DummyRe" +
"gionServerEndpoint.proto\022\rhbase.test.pb\"" +
"\016\n\014DummyRequest\"\036\n\rDummyResponse\022\r\n\005valu" +
"e\030\001 \002(\t2\237\001\n\014DummyService\022F\n\tdummyCall\022\033." +
"hbase.test.pb.DummyRequest\032\034.hbase.test." +
"pb.DummyResponse\022G\n\ndummyThrow\022\033.hbase.t" +
"est.pb.DummyRequest\032\034.hbase.test.pb.Dumm" +
"yResponseB_\n6org.apache.hadoop.hbase.cop" +
"rocessor.protobuf.generatedB\037DummyRegion" +
"ServerEndpointProtos\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_DummyRequest_descriptor =
internal_static_hbase_test_pb_DummyRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_DummyRequest_fieldAccessorTable = new
internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_DummyRequest_descriptor,
internal_static_hbase_test_pb_DummyRequest_descriptor,
new java.lang.String[] { });
internal_static_DummyResponse_descriptor =
internal_static_hbase_test_pb_DummyResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_DummyResponse_fieldAccessorTable = new
internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_DummyResponse_descriptor,
internal_static_hbase_test_pb_DummyResponse_descriptor,
new java.lang.String[] { "Value", });
return null;
}

View File

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import static org.junit.Assert.assertEquals;
import com.google.protobuf.Descriptors;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestCoprocessorRpcUtils {
@Test
public void testServiceName() throws Exception {
// verify that we de-namespace build in HBase rpc services
Descriptors.ServiceDescriptor authService =
AuthenticationProtos.AuthenticationService.getDescriptor();
assertEquals(authService.getName(), CoprocessorRpcUtils.getServiceName(authService));
// non-hbase rpc services should remain fully qualified
Descriptors.ServiceDescriptor dummyService =
DummyRegionServerEndpointProtos.DummyService.getDescriptor();
assertEquals(dummyService.getFullName(), CoprocessorRpcUtils.getServiceName(dummyService));
}
}

View File

@ -15,6 +15,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.test.pb;
// Coprocessor test
option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated";
option java_outer_classname = "DummyRegionServerEndpointProtos";