svn merge -c 1241253 from trunk for MAPREDUCE-3818.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23-PB@1241625 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2012-02-07 21:04:23 +00:00
parent e4b02079af
commit 50335922f7
2 changed files with 9 additions and 9 deletions

View File

@ -21,6 +21,11 @@ Release 0.23-PB - Unreleased
MAPREDUCE-3740. Fixed broken mapreduce compilation after the patch for MAPREDUCE-3740. Fixed broken mapreduce compilation after the patch for
HADOOP-7965. (Devaraj K via vinodkv) HADOOP-7965. (Devaraj K via vinodkv)
BUG FIXES
MAPREDUCE-3818. Fixed broken compilation in TestSubmitJob after the patch
for HDFS-2895. (Suresh Srinivas via vinodkv)
Release 0.23.1 - Unreleased Release 0.23.1 - Unreleased
NEW FEATURES NEW FEATURES

View File

@ -33,6 +33,7 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB;
import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
@ -148,15 +149,9 @@ static ClientProtocol getJobSubmitClient(JobConf conf,
} }
static org.apache.hadoop.hdfs.protocol.ClientProtocol getDFSClient( static org.apache.hadoop.hdfs.protocol.ClientProtocol getDFSClient(
Configuration conf, UserGroupInformation ugi) Configuration conf, UserGroupInformation ugi) throws IOException {
throws IOException { return new ClientNamenodeProtocolTranslatorPB(NameNode.getAddress(conf),
return (org.apache.hadoop.hdfs.protocol.ClientProtocol) conf, ugi);
RPC.getProxy(org.apache.hadoop.hdfs.protocol.ClientProtocol.class,
org.apache.hadoop.hdfs.protocol.ClientProtocol.versionID,
NameNode.getAddress(conf), ugi,
conf,
NetUtils.getSocketFactory(conf,
org.apache.hadoop.hdfs.protocol.ClientProtocol.class));
} }
/** /**