MAPREDUCE-6162. mapred hsadmin fails on a secure cluster. Contributed by Jason Lowe

This commit is contained in:
Jason Lowe 2014-11-17 22:59:59 +00:00
parent 48d62fad80
commit 09b3dee122
15 changed files with 81 additions and 55 deletions

View File

@ -239,8 +239,10 @@ Release 2.7.0 - UNRELEASED
BUG FIXES
MAPREDUCE-5918. LineRecordReader can return the same decompressor to CodecPool
multiple times (Sergey Murylev via raviprak)
MAPREDUCE-5918. LineRecordReader can return the same decompressor to
CodecPool multiple times (Sergey Murylev via raviprak)
MAPREDUCE-6162. mapred hsadmin fails on a secure cluster (jlowe)
Release 2.6.0 - 2014-11-18

View File

@ -19,13 +19,17 @@ package org.apache.hadoop.mapreduce.v2.app.security.authorize;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocol;
import org.apache.hadoop.mapreduce.v2.api.HSClientProtocolPB;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.Service;
import org.apache.hadoop.tools.GetUserMappingsProtocol;
/**
* {@link PolicyProvider} for YARN MapReduce protocols.
* {@link PolicyProvider} for MapReduce history server protocols.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
@ -35,7 +39,16 @@ public class ClientHSPolicyProvider extends PolicyProvider {
new Service[] {
new Service(
JHAdminConfig.MR_HS_SECURITY_SERVICE_AUTHORIZATION,
HSClientProtocolPB.class)
HSClientProtocolPB.class),
new Service(
CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_GET_USER_MAPPINGS,
GetUserMappingsProtocol.class),
new Service(
CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_REFRESH_USER_MAPPINGS,
RefreshUserMappingsProtocol.class),
new Service(
JHAdminConfig.MR_HS_SECURITY_SERVICE_AUTHORIZATION_ADMIN_REFRESH,
HSAdminRefreshProtocol.class)
};
@Override

View File

@ -74,6 +74,7 @@
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>HSAdminRefreshProtocol.proto</include>
<include>mr_protos.proto</include>
<include>mr_service_protos.proto</include>
<include>MRClientProtocol.proto</include>

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.protocol;
package org.apache.hadoop.mapreduce.v2.api;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability;
@ -25,10 +25,6 @@ import org.apache.hadoop.security.KerberosInfo;
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
import org.apache.hadoop.tools.GetUserMappingsProtocol;
/**
* Protocol use
*
*/
@KerberosInfo(serverPrincipal = CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY)
@Private
@InterfaceStability.Evolving

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.protocol;
package org.apache.hadoop.mapreduce.v2.api;
import java.io.IOException;
@ -25,10 +25,6 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.KerberosInfo;
/**
* Protocol use
*
*/
@KerberosInfo(serverPrincipal = CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY)
@Private
@InterfaceStability.Evolving

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.protocolPB;
package org.apache.hadoop.mapreduce.v2.api;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability;
@ -25,7 +25,7 @@ import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.HSAd
import org.apache.hadoop.security.KerberosInfo;
@KerberosInfo(serverPrincipal = CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY)
@ProtocolInfo(protocolName = "org.apache.hadoop.mapreduce.v2.hs.protocol.HSAdminRefreshProtocol", protocolVersion = 1)
@ProtocolInfo(protocolName = "org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocol", protocolVersion = 1)
@Private
@InterfaceStability.Evolving
public interface HSAdminRefreshProtocolPB extends

View File

@ -167,6 +167,8 @@ public class JHAdminConfig {
*/
public static final String MR_HS_SECURITY_SERVICE_AUTHORIZATION =
"security.mrhs.client.protocol.acl";
public static final String MR_HS_SECURITY_SERVICE_AUTHORIZATION_ADMIN_REFRESH =
"security.mrhs.admin.refresh.protocol.acl";
/**
* The HistoryStorage class to use to cache history data.

View File

@ -77,36 +77,6 @@
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>
${basedir}/../../../../hadoop-common-project/hadoop-common/src/main/proto
</param>
<param>${basedir}/src/main/proto</param>
</imports>
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>HSAdminRefreshProtocol.proto</include>
</includes>
</source>
<output>${project.build.directory}/generated-sources/java</output>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -27,9 +27,9 @@ import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.mapreduce.v2.hs.protocol.HSAdminRefreshProtocol;
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocol;
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocolPB;
import org.apache.hadoop.mapreduce.v2.hs.protocolPB.HSAdminRefreshProtocolClientSideTranslatorPB;
import org.apache.hadoop.mapreduce.v2.hs.protocolPB.HSAdminRefreshProtocolPB;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
import org.apache.hadoop.security.UserGroupInformation;

View File

@ -25,9 +25,10 @@ import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocol;
import org.apache.hadoop.mapreduce.v2.hs.HSProxies;
import org.apache.hadoop.mapreduce.v2.hs.protocol.HSAdminRefreshProtocol;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
import org.apache.hadoop.security.UserGroupInformation;
@ -46,6 +47,21 @@ public class HSAdmin extends Configured implements Tool {
super(conf);
}
@Override
public void setConf(Configuration conf) {
if (conf != null) {
conf = addSecurityConfiguration(conf);
}
super.setConf(conf);
}
private Configuration addSecurityConfiguration(Configuration conf) {
conf = new JobConf(conf);
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY,
conf.get(JHAdminConfig.MR_HISTORY_PRINCIPAL, ""));
return conf;
}
/**
* Displays format of commands.
*

View File

@ -26,11 +26,12 @@ import org.apache.hadoop.ipc.ProtobufHelper;
import org.apache.hadoop.ipc.ProtocolMetaInterface;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocol;
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocolPB;
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshAdminAclsRequestProto;
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshLoadedJobCacheRequestProto;
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshJobRetentionSettingsRequestProto;
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshLogRetentionSettingsRequestProto;
import org.apache.hadoop.mapreduce.v2.hs.protocol.HSAdminRefreshProtocol;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;

View File

@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.v2.hs.protocolPB;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocol;
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocolPB;
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshAdminAclsResponseProto;
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshAdminAclsRequestProto;
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshLoadedJobCacheRequestProto;
@ -29,7 +31,6 @@ import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.Refr
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshJobRetentionSettingsResponseProto;
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshLogRetentionSettingsRequestProto;
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshLogRetentionSettingsResponseProto;
import org.apache.hadoop.mapreduce.v2.hs.protocol.HSAdminRefreshProtocol;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;

View File

@ -25,6 +25,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.WritableRpcEngine;
@ -34,7 +35,6 @@ import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.yarn.ipc.RPCUtil;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogDeletionService;
import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshUserMappingsProtocolService;
import org.apache.hadoop.security.protocolPB.RefreshUserMappingsProtocolPB;
@ -43,12 +43,13 @@ import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService;
import org.apache.hadoop.tools.protocolPB.GetUserMappingsProtocolPB;
import org.apache.hadoop.tools.protocolPB.GetUserMappingsProtocolServerSideTranslatorPB;
import org.apache.hadoop.mapreduce.v2.api.HSAdminProtocol;
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocolPB;
import org.apache.hadoop.mapreduce.v2.app.security.authorize.ClientHSPolicyProvider;
import org.apache.hadoop.mapreduce.v2.hs.HSAuditLogger;
import org.apache.hadoop.mapreduce.v2.hs.HSAuditLogger.AuditConstants;
import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.HSAdminRefreshProtocolService;
import org.apache.hadoop.mapreduce.v2.hs.protocol.HSAdminProtocol;
import org.apache.hadoop.mapreduce.v2.hs.protocolPB.HSAdminRefreshProtocolPB;
import org.apache.hadoop.mapreduce.v2.hs.protocolPB.HSAdminRefreshProtocolServerSideTranslatorPB;
import com.google.protobuf.BlockingService;
@ -110,6 +111,13 @@ public class HSAdminServer extends AbstractService implements HSAdminProtocol {
addProtocol(conf, HSAdminRefreshProtocolPB.class,
refreshHSAdminProtocolService);
// Enable service authorization?
if (conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
clientRpcServer.refreshServiceAcl(conf, new ClientHSPolicyProvider());
}
adminAcl = new AccessControlList(conf.get(JHAdminConfig.JHS_ADMIN_ACL,
JHAdminConfig.DEFAULT_JHS_ADMIN_ACL));

View File

@ -23,10 +23,12 @@ import static org.junit.Assert.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
@ -39,6 +41,10 @@ import org.apache.hadoop.security.authorize.ProxyUsers;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.verify;
@ -46,7 +52,9 @@ import static org.mockito.Mockito.verify;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogDeletionService;
@RunWith(Parameterized.class)
public class TestHSAdminServer {
private boolean securityEnabled = true;
private HSAdminServer hsAdminServer = null;
private HSAdmin hsAdminClient = null;
JobConf conf = null;
@ -80,6 +88,15 @@ public class TestHSAdminServer {
}
}
@Parameters
public static Collection<Object[]> testParameters() {
return Arrays.asList(new Object[][] { { false }, { true } });
}
public TestHSAdminServer(boolean enableSecurity) {
securityEnabled = enableSecurity;
}
@Before
public void init() throws HadoopIllegalArgumentException, IOException {
conf = new JobConf();
@ -87,6 +104,9 @@ public class TestHSAdminServer {
conf.setClass("hadoop.security.group.mapping", MockUnixGroupsMapping.class,
GroupMappingServiceProvider.class);
conf.setLong("hadoop.security.groups.cache.secs", groupRefreshTimeoutSec);
conf.setBoolean(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
securityEnabled);
Groups.getUserToGroupsMappingService(conf);
jobHistoryService = mock(JobHistory.class);
alds = mock(AggregatedLogDeletionService.class);