MAPREDUCE-3698. Client cannot talk to the history server in secure mode. (mahadev)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1234120 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mahadev Konar 2012-01-20 20:44:17 +00:00
parent 959d1d0181
commit c6923061d0
8 changed files with 70 additions and 7 deletions

View File

@ -517,6 +517,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3582. Move successfully passing MR1 tests to MR2 maven tree.
(ahmed via tucu)
MAPREDUCE-3698. Client cannot talk to the history server in secure mode.
(mahadev)
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES

View File

@ -0,0 +1,45 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app.security.authorize;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.Service;
import org.apache.hadoop.yarn.proto.HSClientProtocol;
/**
* {@link PolicyProvider} for YARN MapReduce protocols.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class ClientHSPolicyProvider extends PolicyProvider {
private static final Service[] mrHSServices =
new Service[] {
new Service(
JHAdminConfig.MR_HS_SECURITY_SERVICE_AUTHORIZATION,
HSClientProtocol.HSClientProtocolService.BlockingInterface.class)
};
@Override
public Service[] getServices() {
return mrHSServices;
}
}

View File

@ -22,13 +22,20 @@ import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.mapreduce.v2.api.HSClientProtocol;
import org.apache.hadoop.yarn.ipc.ProtoOverHadoopRpcEngine;
import org.apache.hadoop.yarn.proto.HSClientProtocol.HSClientProtocolService;
public class HSClientProtocolPBClientImpl extends MRClientProtocolPBClientImpl
implements HSClientProtocol {
public HSClientProtocolPBClientImpl(long clientVersion,
InetSocketAddress addr, Configuration conf) throws IOException {
super(clientVersion, addr, conf);
super();
RPC.setProtocolEngine(conf, HSClientProtocolService.BlockingInterface.class,
ProtoOverHadoopRpcEngine.class);
proxy = (HSClientProtocolService.BlockingInterface)RPC.getProxy(
HSClientProtocolService.BlockingInterface.class, clientVersion, addr, conf);
}
}

View File

@ -93,7 +93,9 @@ import com.google.protobuf.ServiceException;
public class MRClientProtocolPBClientImpl implements MRClientProtocol {
private MRClientProtocolService.BlockingInterface proxy;
protected MRClientProtocolService.BlockingInterface proxy;
public MRClientProtocolPBClientImpl() {};
public MRClientProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException {
RPC.setProtocolEngine(conf, MRClientProtocolService.BlockingInterface.class, ProtoOverHadoopRpcEngine.class);

View File

@ -111,4 +111,9 @@ public class JHAdminConfig {
public static final int DEFAULT_MR_HISTORY_WEBAPP_PORT = 19888;
public static final String DEFAULT_MR_HISTORY_WEBAPP_ADDRESS =
"0.0.0.0:" + DEFAULT_MR_HISTORY_WEBAPP_PORT;
/*
* HS Service Authorization
*/
public static final String MR_HS_SECURITY_SERVICE_AUTHORIZATION =
"security.mrhs.client.protocol.acl";
}

View File

@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.v2.security.client;
import java.lang.annotation.Annotation;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.security.KerberosInfo;

View File

@ -66,7 +66,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.security.authorize.MRAMPolicyProvider;
import org.apache.hadoop.mapreduce.v2.app.security.authorize.ClientHSPolicyProvider;
import org.apache.hadoop.mapreduce.v2.hs.webapp.HsWebApp;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.net.NetUtils;
@ -136,7 +136,7 @@ public class HistoryClientService extends AbstractService {
if (conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
server.refreshServiceAcl(conf, new MRAMPolicyProvider());
server.refreshServiceAcl(conf, new ClientHSPolicyProvider());
}
server.start();

View File

@ -68,7 +68,6 @@ public class ProtoOverHadoopRpcEngine implements RpcEngine {
public <T> ProtocolProxy<T> getProxy(Class<T> protocol, long clientVersion,
InetSocketAddress addr, UserGroupInformation ticket, Configuration conf,
SocketFactory factory, int rpcTimeout) throws IOException {
return new ProtocolProxy<T>(protocol, (T) Proxy.newProxyInstance(protocol
.getClassLoader(), new Class[] { protocol }, new Invoker(protocol,
addr, ticket, conf, factory, rpcTimeout)), false);