From 634c3376404aed845aad5860fc3963944c7edf28 Mon Sep 17 00:00:00 2001 From: Alejandro Abdelnur Date: Tue, 18 Sep 2012 18:18:17 +0000 Subject: [PATCH] HADOOP-8805. Move protocol buffer implementation of GetUserMappingProtocol from HDFS to Common. (bowang via tucu) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1387300 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 2 + .../tools}/GetUserMappingsProtocolPB.java | 10 ++-- .../GetUserMappingsProtocolPBClientImpl.java | 58 +++++++++++-------- .../GetUserMappingsProtocolPBServiceImpl.java | 39 +++++++------ .../main/proto/GetUserMappingsProtocol.proto | 6 +- .../apache/hadoop/hdfs/NameNodeProxies.java | 6 +- .../server/namenode/NameNodeRpcServer.java | 10 ++-- .../hadoop/hdfs/TestIsMethodSupported.java | 6 +- 8 files changed, 76 insertions(+), 61 deletions(-) rename {hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB => hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools}/GetUserMappingsProtocolPB.java (83%) rename hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java => hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/impl/pb/client/GetUserMappingsProtocolPBClientImpl.java (50%) rename hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolServerSideTranslatorPB.java => hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/impl/pb/service/GetUserMappingsProtocolPBServiceImpl.java (58%) rename {hadoop-hdfs-project/hadoop-hdfs => hadoop-common-project/hadoop-common}/src/main/proto/GetUserMappingsProtocol.proto (91%) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 546ca6e3f5c..dbff8bb8bd6 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -234,6 +234,8 @@ Release 2.0.3-alpha - Unreleased HADOOP-8812. ExitUtil#terminate should print Exception#toString. (eli) + HADOOP-8805. Move protocol buffer implementation of GetUserMappingProtocol from HDFS to Common. (bowang via tucu) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocolPB.java similarity index 83% rename from hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolPB.java rename to hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocolPB.java index 542a1f4aeea..750e935c99d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocolPB.java @@ -7,7 +7,7 @@ * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,21 +16,21 @@ * limitations under the License. */ -package org.apache.hadoop.hdfs.protocolPB; +package org.apache.hadoop.tools; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService; import org.apache.hadoop.ipc.ProtocolInfo; import org.apache.hadoop.security.KerberosInfo; +import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetUserMappingsProtocolService; @KerberosInfo( serverPrincipal=CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY) @ProtocolInfo( - protocolName = "org.apache.hadoop.tools.GetUserMappingsProtocol", + protocolName = "org.apache.hadoop.tools.GetUserMappingsProtocol", protocolVersion = 1) -@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) +@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "YARN"}) @InterfaceStability.Evolving public interface GetUserMappingsProtocolPB extends GetUserMappingsProtocolService.BlockingInterface { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/impl/pb/client/GetUserMappingsProtocolPBClientImpl.java similarity index 50% rename from hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java rename to hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/impl/pb/client/GetUserMappingsProtocolPBClientImpl.java index 881f796406b..13859f35ab3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/impl/pb/client/GetUserMappingsProtocolPBClientImpl.java @@ -7,7 +7,7 @@ * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,54 +16,66 @@ * limitations under the License. */ -package org.apache.hadoop.hdfs.protocolPB; +package org.apache.hadoop.tools.impl.pb.client; import java.io.Closeable; import java.io.IOException; -import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto; -import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto; +import java.net.InetSocketAddress; + +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.ProtobufHelper; +import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; import org.apache.hadoop.tools.GetUserMappingsProtocol; +import org.apache.hadoop.tools.GetUserMappingsProtocolPB; +import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetGroupsForUserRequestProto; +import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetGroupsForUserResponseProto; -import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; -public class GetUserMappingsProtocolClientSideTranslatorPB implements +public class GetUserMappingsProtocolPBClientImpl implements ProtocolMetaInterface, GetUserMappingsProtocol, Closeable { - /** RpcController is not used and hence is set to null */ - private final static RpcController NULL_CONTROLLER = null; - private final GetUserMappingsProtocolPB rpcProxy; + private GetUserMappingsProtocolPB proxy; - public GetUserMappingsProtocolClientSideTranslatorPB( - GetUserMappingsProtocolPB rpcProxy) { - this.rpcProxy = rpcProxy; + public GetUserMappingsProtocolPBClientImpl( + long clientVersion, InetSocketAddress addr, Configuration conf) + throws IOException { + RPC.setProtocolEngine(conf, GetUserMappingsProtocolPB.class, + ProtobufRpcEngine.class); + proxy = (GetUserMappingsProtocolPB) RPC.getProxy( + GetUserMappingsProtocolPB.class, clientVersion, addr, conf); } - + + public GetUserMappingsProtocolPBClientImpl( + GetUserMappingsProtocolPB proxy) { + this.proxy = proxy; + } + @Override public void close() throws IOException { - RPC.stopProxy(rpcProxy); + RPC.stopProxy(proxy); } - + @Override public String[] getGroupsForUser(String user) throws IOException { - GetGroupsForUserRequestProto request = GetGroupsForUserRequestProto - .newBuilder().setUser(user).build(); - GetGroupsForUserResponseProto resp; + GetGroupsForUserRequestProto requestProto = + GetGroupsForUserRequestProto.newBuilder().setUser(user).build(); try { - resp = rpcProxy.getGroupsForUser(NULL_CONTROLLER, request); - } catch (ServiceException se) { - throw ProtobufHelper.getRemoteException(se); + GetGroupsForUserResponseProto responseProto = + proxy.getGroupsForUser(null, requestProto); + return (String[]) responseProto.getGroupsList().toArray( + new String[responseProto.getGroupsCount()]); + } catch (ServiceException e) { + throw ProtobufHelper.getRemoteException(e); } - return resp.getGroupsList().toArray(new String[resp.getGroupsCount()]); } @Override public boolean isMethodSupported(String methodName) throws IOException { - return RpcClientUtil.isMethodSupported(rpcProxy, + return RpcClientUtil.isMethodSupported(proxy, GetUserMappingsProtocolPB.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER, RPC.getProtocolVersion(GetUserMappingsProtocolPB.class), methodName); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolServerSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/impl/pb/service/GetUserMappingsProtocolPBServiceImpl.java similarity index 58% rename from hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolServerSideTranslatorPB.java rename to hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/impl/pb/service/GetUserMappingsProtocolPBServiceImpl.java index bc3a6c96c3e..b310e230557 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolServerSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/impl/pb/service/GetUserMappingsProtocolPBServiceImpl.java @@ -7,7 +7,7 @@ * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,42 +16,43 @@ * limitations under the License. */ -package org.apache.hadoop.hdfs.protocolPB; +package org.apache.hadoop.tools.impl.pb.service; import java.io.IOException; -import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto; -import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto; import org.apache.hadoop.tools.GetUserMappingsProtocol; +import org.apache.hadoop.tools.GetUserMappingsProtocolPB; +import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetGroupsForUserRequestProto; +import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetGroupsForUserResponseProto; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; -public class GetUserMappingsProtocolServerSideTranslatorPB implements +public class GetUserMappingsProtocolPBServiceImpl implements GetUserMappingsProtocolPB { - private final GetUserMappingsProtocol impl; - - public GetUserMappingsProtocolServerSideTranslatorPB( - GetUserMappingsProtocol impl) { - this.impl = impl; + private GetUserMappingsProtocol real; + + public GetUserMappingsProtocolPBServiceImpl(GetUserMappingsProtocol impl) { + this.real = impl; } - + @Override public GetGroupsForUserResponseProto getGroupsForUser( RpcController controller, GetGroupsForUserRequestProto request) throws ServiceException { - String[] groups; + String user = request.getUser(); try { - groups = impl.getGroupsForUser(request.getUser()); + String[] groups = real.getGroupsForUser(user); + GetGroupsForUserResponseProto.Builder responseBuilder = + GetGroupsForUserResponseProto.newBuilder(); + for (String group : groups) { + responseBuilder.addGroups(group); + } + return responseBuilder.build(); } catch (IOException e) { throw new ServiceException(e); } - GetGroupsForUserResponseProto.Builder builder = GetGroupsForUserResponseProto - .newBuilder(); - for (String g : groups) { - builder.addGroups(g); - } - return builder.build(); } + } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/GetUserMappingsProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto similarity index 91% rename from hadoop-hdfs-project/hadoop-hdfs/src/main/proto/GetUserMappingsProtocol.proto rename to hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto index d3e2321f5a8..470a5e59d20 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/GetUserMappingsProtocol.proto +++ b/hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto @@ -15,9 +15,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -option java_package = "org.apache.hadoop.hdfs.protocol.proto"; -option java_outer_classname = "GetUserMappingsProtocolProtos"; + +option java_package = "org.apache.hadoop.tools.proto"; +option java_outer_classname = "GetUserMappingsProtocol"; option java_generic_services = true; option java_generate_equals_and_hash = true; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java index dc7189d410c..9a3c2147d37 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java @@ -36,8 +36,6 @@ import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB; import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB; -import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolClientSideTranslatorPB; -import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolPB; import org.apache.hadoop.hdfs.protocolPB.JournalProtocolPB; import org.apache.hadoop.hdfs.protocolPB.JournalProtocolTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB; @@ -67,6 +65,8 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol; import org.apache.hadoop.tools.GetUserMappingsProtocol; +import org.apache.hadoop.tools.GetUserMappingsProtocolPB; +import org.apache.hadoop.tools.impl.pb.client.GetUserMappingsProtocolPBClientImpl; import com.google.common.base.Preconditions; @@ -218,7 +218,7 @@ public class NameNodeProxies { throws IOException { GetUserMappingsProtocolPB proxy = (GetUserMappingsProtocolPB) createNameNodeProxy(address, conf, ugi, GetUserMappingsProtocolPB.class); - return new GetUserMappingsProtocolClientSideTranslatorPB(proxy); + return new GetUserMappingsProtocolPBClientImpl(proxy); } private static NamenodeProtocol createNNProxyWithNamenodeProtocol( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java index 0a7b9895cf7..45cf63e31ad 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java @@ -67,7 +67,6 @@ import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException; import org.apache.hadoop.hdfs.protocol.UnresolvedPathException; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeProtocolService; -import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService; import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.NamenodeProtocolService; import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshAuthorizationPolicyProtocolService; import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserMappingsProtocolService; @@ -75,8 +74,6 @@ import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB; import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolPB; import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolServerSideTranslatorPB; -import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolPB; -import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolServerSideTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB; import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolServerSideTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.RefreshAuthorizationPolicyProtocolPB; @@ -120,6 +117,9 @@ import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.tools.GetUserMappingsProtocolPB; +import org.apache.hadoop.tools.impl.pb.service.GetUserMappingsProtocolPBServiceImpl; +import org.apache.hadoop.tools.proto.GetUserMappingsProtocol.GetUserMappingsProtocolService; import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.util.VersionUtil; @@ -190,8 +190,8 @@ class NameNodeRpcServer implements NamenodeProtocols { BlockingService refreshUserMappingService = RefreshUserMappingsProtocolService .newReflectiveBlockingService(refreshUserMappingXlator); - GetUserMappingsProtocolServerSideTranslatorPB getUserMappingXlator = - new GetUserMappingsProtocolServerSideTranslatorPB(this); + GetUserMappingsProtocolPBServiceImpl getUserMappingXlator = + new GetUserMappingsProtocolPBServiceImpl(this); BlockingService getUserMappingService = GetUserMappingsProtocolService .newReflectiveBlockingService(getUserMappingXlator); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestIsMethodSupported.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestIsMethodSupported.java index 1f42c0d4de1..a167e940232 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestIsMethodSupported.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestIsMethodSupported.java @@ -27,7 +27,6 @@ import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocolPB.ClientDatanodeProtocolTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB; -import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolClientSideTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.InterDatanodeProtocolTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.JournalProtocolTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolTranslatorPB; @@ -41,6 +40,7 @@ import org.apache.hadoop.security.RefreshUserMappingsProtocol; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol; import org.apache.hadoop.tools.GetUserMappingsProtocol; +import org.apache.hadoop.tools.impl.pb.client.GetUserMappingsProtocolPBClientImpl; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -141,8 +141,8 @@ public class TestIsMethodSupported { @Test public void testGetUserMappingsProtocol() throws IOException { - GetUserMappingsProtocolClientSideTranslatorPB translator = - (GetUserMappingsProtocolClientSideTranslatorPB) + GetUserMappingsProtocolPBClientImpl translator = + (GetUserMappingsProtocolPBClientImpl) NameNodeProxies.createNonHAProxy(conf, nnAddress, GetUserMappingsProtocol.class, UserGroupInformation.getCurrentUser(), true).getProxy();