HADOOP-8805. Move protocol buffer implementation of GetUserMappingProtocol from HDFS to Common. (bowang via tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1387300 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2012-09-18 18:18:17 +00:00
parent c03ca579de
commit 634c337640
8 changed files with 76 additions and 61 deletions

View File

@ -234,6 +234,8 @@ Release 2.0.3-alpha - Unreleased
HADOOP-8812. ExitUtil#terminate should print Exception#toString. (eli)
HADOOP-8805. Move protocol buffer implementation of GetUserMappingProtocol from HDFS to Common. (bowang via tucu)
OPTIMIZATIONS
BUG FIXES

View File

@ -16,21 +16,21 @@
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocolPB;
package org.apache.hadoop.tools;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService;
import org.apache.hadoop.ipc.ProtocolInfo;
import org.apache.hadoop.security.KerberosInfo;
import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetUserMappingsProtocolService;
@KerberosInfo(
serverPrincipal=CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY)
@ProtocolInfo(
protocolName = "org.apache.hadoop.tools.GetUserMappingsProtocol",
protocolVersion = 1)
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "YARN"})
@InterfaceStability.Evolving
public interface GetUserMappingsProtocolPB extends
GetUserMappingsProtocolService.BlockingInterface {

View File

@ -16,54 +16,66 @@
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocolPB;
package org.apache.hadoop.tools.impl.pb.client;
import java.io.Closeable;
import java.io.IOException;
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto;
import java.net.InetSocketAddress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.ProtobufHelper;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.ProtocolMetaInterface;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.tools.GetUserMappingsProtocol;
import org.apache.hadoop.tools.GetUserMappingsProtocolPB;
import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetGroupsForUserRequestProto;
import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetGroupsForUserResponseProto;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
public class GetUserMappingsProtocolClientSideTranslatorPB implements
public class GetUserMappingsProtocolPBClientImpl implements
ProtocolMetaInterface, GetUserMappingsProtocol, Closeable {
/** RpcController is not used and hence is set to null */
private final static RpcController NULL_CONTROLLER = null;
private final GetUserMappingsProtocolPB rpcProxy;
private GetUserMappingsProtocolPB proxy;
public GetUserMappingsProtocolClientSideTranslatorPB(
GetUserMappingsProtocolPB rpcProxy) {
this.rpcProxy = rpcProxy;
public GetUserMappingsProtocolPBClientImpl(
long clientVersion, InetSocketAddress addr, Configuration conf)
throws IOException {
RPC.setProtocolEngine(conf, GetUserMappingsProtocolPB.class,
ProtobufRpcEngine.class);
proxy = (GetUserMappingsProtocolPB) RPC.getProxy(
GetUserMappingsProtocolPB.class, clientVersion, addr, conf);
}
public GetUserMappingsProtocolPBClientImpl(
GetUserMappingsProtocolPB proxy) {
this.proxy = proxy;
}
@Override
public void close() throws IOException {
RPC.stopProxy(rpcProxy);
RPC.stopProxy(proxy);
}
@Override
public String[] getGroupsForUser(String user) throws IOException {
GetGroupsForUserRequestProto request = GetGroupsForUserRequestProto
.newBuilder().setUser(user).build();
GetGroupsForUserResponseProto resp;
GetGroupsForUserRequestProto requestProto =
GetGroupsForUserRequestProto.newBuilder().setUser(user).build();
try {
resp = rpcProxy.getGroupsForUser(NULL_CONTROLLER, request);
} catch (ServiceException se) {
throw ProtobufHelper.getRemoteException(se);
GetGroupsForUserResponseProto responseProto =
proxy.getGroupsForUser(null, requestProto);
return (String[]) responseProto.getGroupsList().toArray(
new String[responseProto.getGroupsCount()]);
} catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e);
}
return resp.getGroupsList().toArray(new String[resp.getGroupsCount()]);
}
@Override
public boolean isMethodSupported(String methodName) throws IOException {
return RpcClientUtil.isMethodSupported(rpcProxy,
return RpcClientUtil.isMethodSupported(proxy,
GetUserMappingsProtocolPB.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER,
RPC.getProtocolVersion(GetUserMappingsProtocolPB.class), methodName);
}

View File

@ -16,42 +16,43 @@
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocolPB;
package org.apache.hadoop.tools.impl.pb.service;
import java.io.IOException;
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto;
import org.apache.hadoop.tools.GetUserMappingsProtocol;
import org.apache.hadoop.tools.GetUserMappingsProtocolPB;
import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetGroupsForUserRequestProto;
import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetGroupsForUserResponseProto;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
public class GetUserMappingsProtocolServerSideTranslatorPB implements
public class GetUserMappingsProtocolPBServiceImpl implements
GetUserMappingsProtocolPB {
private final GetUserMappingsProtocol impl;
private GetUserMappingsProtocol real;
public GetUserMappingsProtocolServerSideTranslatorPB(
GetUserMappingsProtocol impl) {
this.impl = impl;
public GetUserMappingsProtocolPBServiceImpl(GetUserMappingsProtocol impl) {
this.real = impl;
}
@Override
public GetGroupsForUserResponseProto getGroupsForUser(
RpcController controller, GetGroupsForUserRequestProto request)
throws ServiceException {
String[] groups;
String user = request.getUser();
try {
groups = impl.getGroupsForUser(request.getUser());
String[] groups = real.getGroupsForUser(user);
GetGroupsForUserResponseProto.Builder responseBuilder =
GetGroupsForUserResponseProto.newBuilder();
for (String group : groups) {
responseBuilder.addGroups(group);
}
return responseBuilder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
GetGroupsForUserResponseProto.Builder builder = GetGroupsForUserResponseProto
.newBuilder();
for (String g : groups) {
builder.addGroups(g);
}
return builder.build();
}
}

View File

@ -16,8 +16,8 @@
* limitations under the License.
*/
option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "GetUserMappingsProtocolProtos";
option java_package = "org.apache.hadoop.tools.proto";
option java_outer_classname = "GetUserMappingsProtocol";
option java_generic_services = true;
option java_generate_equals_and_hash = true;

View File

@ -36,8 +36,6 @@ import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolClientSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.JournalProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.JournalProtocolTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB;
@ -67,6 +65,8 @@ import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
import org.apache.hadoop.tools.GetUserMappingsProtocol;
import org.apache.hadoop.tools.GetUserMappingsProtocolPB;
import org.apache.hadoop.tools.impl.pb.client.GetUserMappingsProtocolPBClientImpl;
import com.google.common.base.Preconditions;
@ -218,7 +218,7 @@ public class NameNodeProxies {
throws IOException {
GetUserMappingsProtocolPB proxy = (GetUserMappingsProtocolPB)
createNameNodeProxy(address, conf, ugi, GetUserMappingsProtocolPB.class);
return new GetUserMappingsProtocolClientSideTranslatorPB(proxy);
return new GetUserMappingsProtocolPBClientImpl(proxy);
}
private static NamenodeProtocol createNNProxyWithNamenodeProtocol(

View File

@ -67,7 +67,6 @@ import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException;
import org.apache.hadoop.hdfs.protocol.UnresolvedPathException;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol;
import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeProtocolService;
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService;
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.NamenodeProtocolService;
import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshAuthorizationPolicyProtocolService;
import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserMappingsProtocolService;
@ -75,8 +74,6 @@ import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.RefreshAuthorizationPolicyProtocolPB;
@ -120,6 +117,9 @@ import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.tools.GetUserMappingsProtocolPB;
import org.apache.hadoop.tools.impl.pb.service.GetUserMappingsProtocolPBServiceImpl;
import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetUserMappingsProtocolService;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.util.VersionUtil;
@ -190,8 +190,8 @@ class NameNodeRpcServer implements NamenodeProtocols {
BlockingService refreshUserMappingService = RefreshUserMappingsProtocolService
.newReflectiveBlockingService(refreshUserMappingXlator);
GetUserMappingsProtocolServerSideTranslatorPB getUserMappingXlator =
new GetUserMappingsProtocolServerSideTranslatorPB(this);
GetUserMappingsProtocolPBServiceImpl getUserMappingXlator =
new GetUserMappingsProtocolPBServiceImpl(this);
BlockingService getUserMappingService = GetUserMappingsProtocolService
.newReflectiveBlockingService(getUserMappingXlator);

View File

@ -27,7 +27,6 @@ import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocolPB.ClientDatanodeProtocolTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolClientSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.InterDatanodeProtocolTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.JournalProtocolTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolTranslatorPB;
@ -41,6 +40,7 @@ import org.apache.hadoop.security.RefreshUserMappingsProtocol;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
import org.apache.hadoop.tools.GetUserMappingsProtocol;
import org.apache.hadoop.tools.impl.pb.client.GetUserMappingsProtocolPBClientImpl;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@ -141,8 +141,8 @@ public class TestIsMethodSupported {
@Test
public void testGetUserMappingsProtocol() throws IOException {
GetUserMappingsProtocolClientSideTranslatorPB translator =
(GetUserMappingsProtocolClientSideTranslatorPB)
GetUserMappingsProtocolPBClientImpl translator =
(GetUserMappingsProtocolPBClientImpl)
NameNodeProxies.createNonHAProxy(conf, nnAddress,
GetUserMappingsProtocol.class, UserGroupInformation.getCurrentUser(),
true).getProxy();