MAPREDUCE-3701. Delete HadoopYarnRPC from 0.23 branch. (mahadev)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1234161 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mahadev Konar 2012-01-20 21:47:59 +00:00
parent f80dc919ad
commit 6d331aee03
2 changed files with 3 additions and 80 deletions

View File

@ -475,6 +475,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3689. RM web UI doesn't handle newline in job name. MAPREDUCE-3689. RM web UI doesn't handle newline in job name.
(Thomas Graves via mahadev) (Thomas Graves via mahadev)
MAPREDUCE-3701. Delete HadoopYarnRPC from 0.23 branch.
(mahadev)
Release 0.23.0 - 2011-11-01 Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -1,80 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.ipc;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.AvroSpecificRpcEngine;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.YarnException;
/**
* This uses Hadoop RPC. Uses a tunnel AvroSpecificRpcEngine over
* Hadoop connection.
* This does not give cross-language wire compatibility, since the Hadoop
* RPC wire format is non-standard, but it does permit use of Avro's protocol
* versioning features for inter-Java RPCs.
*/
public class HadoopYarnRPC extends YarnRPC {
private static final Log LOG = LogFactory.getLog(HadoopYarnRPC.class);
@Override
public Object getProxy(Class protocol, InetSocketAddress addr,
Configuration conf) {
LOG.debug("Creating a HadoopYarnRpc proxy for protocol " + protocol);
RPC.setProtocolEngine(conf, protocol, AvroSpecificRpcEngine.class);
try {
return RPC.getProxy(protocol, 1, addr, conf);
} catch (IOException e) {
throw new YarnException(e);
}
}
@Override
public void stopProxy(Object proxy, Configuration conf) {
RPC.stopProxy(proxy);
}
@Override
public Server getServer(Class protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager,
int numHandlers) {
LOG.debug("Creating a HadoopYarnRpc server for protocol " + protocol +
" with " + numHandlers + " handlers");
RPC.setProtocolEngine(conf, protocol, AvroSpecificRpcEngine.class);
final RPC.Server hadoopServer;
try {
hadoopServer = RPC.getServer(protocol, instance, addr.getHostName(),
addr.getPort(), numHandlers, false, conf, secretManager);
} catch (IOException e) {
throw new YarnException(e);
}
return hadoopServer;
}
}