Merge r1329319 for HADOOP-8285 for the changes outside common.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1337433 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2012-05-12 02:19:58 +00:00
parent e1013c3d29
commit a5da2f6d68
15 changed files with 17 additions and 27 deletions

View File

@ -318,6 +318,8 @@ Release 2.0.0 - UNRELEASED
HDFS-3211. Add fence(..) and replace NamenodeRegistration with JournalInfo
and epoch in JournalProtocol. (suresh via szetszwo)
HADOOP-8285 HDFS changes for Use ProtoBuf for RpcPayLoadHeader (sanjay radia)
OPTIMIZATIONS
HDFS-2477. Optimize computing the diff between a block report and the

View File

@ -58,8 +58,6 @@
import org.apache.hadoop.io.retry.RetryProxy;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.security.UserGroupInformation;
@ -989,7 +987,7 @@ public static URI createUri(String scheme, InetSocketAddress address) {
public static void addPBProtocol(Configuration conf, Class<?> protocol,
BlockingService service, RPC.Server server) throws IOException {
RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine.class);
server.addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, protocol, service);
server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocol, service);
}
/**

View File

@ -46,7 +46,6 @@
import org.apache.hadoop.ipc.ProtocolTranslator;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
@ -193,7 +192,7 @@ public BlockLocalPathInfo getBlockLocalPathInfo(ExtendedBlock block,
@Override
public boolean isMethodSupported(String methodName) throws IOException {
return RpcClientUtil.isMethodSupported(rpcProxy,
ClientDatanodeProtocolPB.class, RpcKind.RPC_PROTOCOL_BUFFER,
ClientDatanodeProtocolPB.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER,
RPC.getProtocolVersion(ClientDatanodeProtocolPB.class), methodName);
}

View File

@ -109,7 +109,6 @@
import org.apache.hadoop.ipc.ProtocolMetaInterface;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.token.Token;
@ -812,7 +811,7 @@ public void setBalancerBandwidth(long bandwidth) throws IOException {
@Override
public boolean isMethodSupported(String methodName) throws IOException {
return RpcClientUtil.isMethodSupported(rpcProxy,
ClientNamenodeProtocolPB.class, RpcKind.RPC_PROTOCOL_BUFFER,
ClientNamenodeProtocolPB.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER,
RPC.getProtocolVersion(ClientNamenodeProtocolPB.class), methodName);
}

View File

@ -69,7 +69,6 @@
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
@ -308,7 +307,7 @@ public void commitBlockSynchronization(ExtendedBlock block,
public boolean isMethodSupported(String methodName)
throws IOException {
return RpcClientUtil.isMethodSupported(rpcProxy, DatanodeProtocolPB.class,
RpcKind.RPC_PROTOCOL_BUFFER,
RPC.RpcKind.RPC_PROTOCOL_BUFFER,
RPC.getProtocolVersion(DatanodeProtocolPB.class), methodName);
}
}

View File

@ -26,7 +26,6 @@
import org.apache.hadoop.ipc.ProtocolMetaInterface;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.tools.GetUserMappingsProtocol;
import com.google.protobuf.RpcController;
@ -65,7 +64,7 @@ public String[] getGroupsForUser(String user) throws IOException {
@Override
public boolean isMethodSupported(String methodName) throws IOException {
return RpcClientUtil.isMethodSupported(rpcProxy,
GetUserMappingsProtocolPB.class, RpcKind.RPC_PROTOCOL_BUFFER,
GetUserMappingsProtocolPB.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER,
RPC.getProtocolVersion(GetUserMappingsProtocolPB.class), methodName);
}
}

View File

@ -39,7 +39,6 @@
import org.apache.hadoop.ipc.ProtocolMetaInterface;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.security.UserGroupInformation;
import com.google.protobuf.RpcController;
@ -119,7 +118,7 @@ public String updateReplicaUnderRecovery(ExtendedBlock oldBlock,
@Override
public boolean isMethodSupported(String methodName) throws IOException {
return RpcClientUtil.isMethodSupported(rpcProxy,
InterDatanodeProtocolPB.class, RpcKind.RPC_PROTOCOL_BUFFER,
InterDatanodeProtocolPB.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER,
RPC.getProtocolVersion(InterDatanodeProtocolPB.class), methodName);
}
}

View File

@ -33,7 +33,6 @@
import org.apache.hadoop.ipc.ProtocolMetaInterface;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
@ -109,7 +108,7 @@ public FenceResponse fence(JournalInfo journalInfo, long epoch,
@Override
public boolean isMethodSupported(String methodName) throws IOException {
return RpcClientUtil.isMethodSupported(rpcProxy, JournalProtocolPB.class,
RpcKind.RPC_PROTOCOL_BUFFER,
RPC.RpcKind.RPC_PROTOCOL_BUFFER,
RPC.getProtocolVersion(JournalProtocolPB.class), methodName);
}
}

View File

@ -47,7 +47,6 @@
import org.apache.hadoop.ipc.ProtocolMetaInterface;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
@ -209,7 +208,7 @@ public RemoteEditLogManifest getEditLogManifest(long sinceTxId)
@Override
public boolean isMethodSupported(String methodName) throws IOException {
return RpcClientUtil.isMethodSupported(rpcProxy, NamenodeProtocolPB.class,
RpcKind.RPC_PROTOCOL_BUFFER,
RPC.RpcKind.RPC_PROTOCOL_BUFFER,
RPC.getProtocolVersion(NamenodeProtocolPB.class), methodName);
}
}

View File

@ -26,7 +26,6 @@
import org.apache.hadoop.ipc.ProtocolMetaInterface;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
import com.google.protobuf.RpcController;
@ -64,7 +63,7 @@ public void refreshServiceAcl() throws IOException {
public boolean isMethodSupported(String methodName) throws IOException {
return RpcClientUtil.isMethodSupported(rpcProxy,
RefreshAuthorizationPolicyProtocolPB.class,
RpcKind.RPC_PROTOCOL_BUFFER,
RPC.RpcKind.RPC_PROTOCOL_BUFFER,
RPC.getProtocolVersion(RefreshAuthorizationPolicyProtocolPB.class),
methodName);
}

View File

@ -27,7 +27,6 @@
import org.apache.hadoop.ipc.ProtocolMetaInterface;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
import com.google.protobuf.RpcController;
@ -76,7 +75,7 @@ public void refreshSuperUserGroupsConfiguration() throws IOException {
public boolean isMethodSupported(String methodName) throws IOException {
return RpcClientUtil
.isMethodSupported(rpcProxy, RefreshUserMappingsProtocolPB.class,
RpcKind.RPC_PROTOCOL_BUFFER,
RPC.RpcKind.RPC_PROTOCOL_BUFFER,
RPC.getProtocolVersion(RefreshUserMappingsProtocolPB.class),
methodName);
}

View File

@ -62,7 +62,6 @@
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.net.NetUtils;
import org.mockito.internal.stubbing.answers.ThrowsException;
import org.mockito.invocation.InvocationOnMock;
@ -98,7 +97,7 @@ public TestServer(int handlerCount, boolean sleep,
}
@Override
public Writable call(RpcKind rpcKind, String protocol, Writable param, long receiveTime)
public Writable call(RPC.RpcKind rpcKind, String protocol, Writable param, long receiveTime)
throws IOException {
if (sleep) {
// sleep a bit

View File

@ -52,7 +52,6 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.net.NetUtils;
import org.junit.Assert;
@ -86,7 +85,7 @@ public TestServer(int handlerCount, boolean sleep,
}
@Override
public Writable call(RpcKind rpcKind, String protocol, Writable param, long receiveTime)
public Writable call(RPC.RpcKind rpcKind, String protocol, Writable param, long receiveTime)
throws IOException {
if (sleep) {
// sleep a bit

View File

@ -60,6 +60,8 @@ Release 2.0.0 - UNRELEASED
MAPREDUCE-4205. retrofit all JVM shutdown hooks to use ShutdownHookManager
(tucu)
HADOOP-8285 MR changes for Use ProtoBuf for RpcPayLoadHeader (sanjay radia)
OPTIMIZATIONS
BUG FIXES

View File

@ -30,7 +30,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.security.token.SecretManager;
@ -171,7 +170,7 @@ private Server createServer(Class<?> pbProtocol, InetSocketAddress addr, Configu
addr.getHostName(), addr.getPort(), numHandlers, false, conf,
secretManager, portRangeConfig);
LOG.info("Adding protocol "+pbProtocol.getCanonicalName()+" to the server");
server.addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, pbProtocol, blockingService);
server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, pbProtocol, blockingService);
return server;
}
}