HADOOP-9848. Create a MiniKDC for use with security testing. (ywskycn via tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1513308 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2013-08-13 00:23:10 +00:00
parent 19b36f0900
commit 1d1ab587e4
12 changed files with 1114 additions and 0 deletions

View File

@ -309,6 +309,9 @@ Release 2.3.0 - UNRELEASED
HADOOP-9758. Provide configuration option for FileSystem/FileContext
symlink resolution. (Andrew Wang via Colin Patrick McCabe)
HADOOP-9848. Create a MiniKDC for use with security testing.
(ywskycn via tucu)
OPTIMIZATIONS
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)

View File

@ -0,0 +1,55 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId>
<version>3.0.0-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>3.0.0-SNAPSHOT</version>
<description>Apache Hadoop MiniKDC</description>
<name>Apache Hadoop MiniKDC</name>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-all</artifactId>
<version>2.0.0-M14</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>compile</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,42 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.directory.server.kerberos.shared.keytab;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
//This is a hack for ApacheDS 2.0.0-M14 to be able to create
//keytab files with more than one principal.
//It needs to be in this package because the KeytabEncoder class is package
// private.
//This class can be removed once jira DIRSERVER-1882
// (https://issues.apache.org/jira/browse/DIRSERVER-1882) solved
public class HackedKeytab extends Keytab {
private byte[] keytabVersion = VERSION_52;
public void write( File file, int principalCount ) throws IOException
{
HackedKeytabEncoder writer = new HackedKeytabEncoder();
ByteBuffer buffer = writer.write( keytabVersion, getEntries(),
principalCount );
writeFile( buffer, file );
}
}

View File

@ -0,0 +1,121 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.directory.server.kerberos.shared.keytab;
import org.apache.directory.shared.kerberos.components.EncryptionKey;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.List;
//This is a hack for ApacheDS 2.0.0-M14 to be able to create
//keytab files with more than one principal.
//It needs to be in this package because the KeytabEncoder class is package
// private.
//This class can be removed once jira DIRSERVER-1882
// (https://issues.apache.org/jira/browse/DIRSERVER-1882) solved
class HackedKeytabEncoder extends KeytabEncoder {
ByteBuffer write( byte[] keytabVersion, List<KeytabEntry> entries,
int principalCount )
{
ByteBuffer buffer = ByteBuffer.allocate( 512 * principalCount);
putKeytabVersion(buffer, keytabVersion);
putKeytabEntries( buffer, entries );
buffer.flip();
return buffer;
}
private void putKeytabVersion( ByteBuffer buffer, byte[] version )
{
buffer.put( version );
}
private void putKeytabEntries( ByteBuffer buffer, List<KeytabEntry> entries )
{
Iterator<KeytabEntry> iterator = entries.iterator();
while ( iterator.hasNext() )
{
ByteBuffer entryBuffer = putKeytabEntry( iterator.next() );
int size = entryBuffer.position();
entryBuffer.flip();
buffer.putInt( size );
buffer.put( entryBuffer );
}
}
private ByteBuffer putKeytabEntry( KeytabEntry entry )
{
ByteBuffer buffer = ByteBuffer.allocate( 100 );
putPrincipalName( buffer, entry.getPrincipalName() );
buffer.putInt( ( int ) entry.getPrincipalType() );
buffer.putInt( ( int ) ( entry.getTimeStamp().getTime() / 1000 ) );
buffer.put( entry.getKeyVersion() );
putKeyBlock( buffer, entry.getKey() );
return buffer;
}
private void putPrincipalName( ByteBuffer buffer, String principalName )
{
String[] split = principalName.split("@");
String nameComponent = split[0];
String realm = split[1];
String[] nameComponents = nameComponent.split( "/" );
// increment for v1
buffer.putShort( ( short ) nameComponents.length );
putCountedString( buffer, realm );
// write components
for ( int ii = 0; ii < nameComponents.length; ii++ )
{
putCountedString( buffer, nameComponents[ii] );
}
}
private void putKeyBlock( ByteBuffer buffer, EncryptionKey key )
{
buffer.putShort( ( short ) key.getKeyType().getValue() );
putCountedBytes( buffer, key.getKeyValue() );
}
private void putCountedString( ByteBuffer buffer, String string )
{
byte[] data = string.getBytes();
buffer.putShort( ( short ) data.length );
buffer.put( data );
}
private void putCountedBytes( ByteBuffer buffer, byte[] data )
{
buffer.putShort( ( short ) data.length );
buffer.put( data );
}
}

View File

@ -0,0 +1,86 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.minikdc;
import org.junit.After;
import org.junit.Before;
import java.io.File;
import java.util.Properties;
/**
* KerberosSecurityTestcase provides a base class for using MiniKdc with other
* testcases. KerberosSecurityTestcase starts the MiniKdc (@Before) before
* running tests, and stop the MiniKdc (@After) after the testcases, using
* default settings (working dir and kdc configurations).
* <p>
* Users can directly inherit this class and implement their own test functions
* using the default settings, or override functions getTestDir() and
* createMiniKdcConf() to provide new settings.
*
*/
public class KerberosSecurityTestcase {
private MiniKdc kdc;
private File workDir;
private Properties conf;
@Before
public void startMiniKdc() throws Exception {
createTestDir();
createMiniKdcConf();
kdc = new MiniKdc(conf, workDir);
kdc.start();
}
/**
* Create a working directory, it should be the build directory. Under
* this directory an ApacheDS working directory will be created, this
* directory will be deleted when the MiniKdc stops.
*/
public void createTestDir() {
workDir = new File(System.getProperty("test.dir", "target"));
}
/**
* Create a Kdc configuration
*/
public void createMiniKdcConf() {
conf = MiniKdc.createConf();
}
@After
public void stopMiniKdc() {
if (kdc != null) {
kdc.stop();
}
}
public MiniKdc getKdc() {
return kdc;
}
public File getWorkDir() {
return workDir;
}
public Properties getConf() {
return conf;
}
}

View File

@ -0,0 +1,534 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.minikdc;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.text.StrSubstitutor;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.api.ldap.schemaextractor.SchemaLdifExtractor;
import org.apache.directory.api.ldap.schemaextractor.impl.DefaultSchemaLdifExtractor;
import org.apache.directory.api.ldap.schemaloader.LdifSchemaLoader;
import org.apache.directory.api.ldap.schemamanager.impl.DefaultSchemaManager;
import org.apache.directory.server.constants.ServerDNConstants;
import org.apache.directory.server.core.DefaultDirectoryService;
import org.apache.directory.server.core.api.CacheService;
import org.apache.directory.server.core.api.DirectoryService;
import org.apache.directory.server.core.api.InstanceLayout;
import org.apache.directory.server.core.api.schema.SchemaPartition;
import org.apache.directory.server.core.kerberos.KeyDerivationInterceptor;
import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex;
import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition;
import org.apache.directory.server.core.partition.ldif.LdifPartition;
import org.apache.directory.server.kerberos.kdc.KdcServer;
import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory;
import org.apache.directory.server.kerberos.shared.keytab.HackedKeytab;
import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
import org.apache.directory.server.protocol.shared.transport.TcpTransport;
import org.apache.directory.server.protocol.shared.transport.UdpTransport;
import org.apache.directory.shared.kerberos.KerberosTime;
import org.apache.directory.shared.kerberos.codec.types.EncryptionType;
import org.apache.directory.shared.kerberos.components.EncryptionKey;
import org.apache.directory.api.ldap.model.entry.DefaultEntry;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.ldif.LdifEntry;
import org.apache.directory.api.ldap.model.ldif.LdifReader;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.api.ldap.model.schema.registries.SchemaLoader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
/**
* Mini KDC based on Apache Directory Server that can be embedded in testcases
* or used from command line as a standalone KDC.
* <p/>
* <b>From within testcases:</b>
* <p/>
* MiniKdc sets 2 System properties when started and un-sets them when stopped:
* <ul>
* <li>java.security.krb5.conf: set to the MiniKDC real/host/port</li>
* <li>sun.security.krb5.debug: set to the debug value provided in the
* configuration</li>
* </ul>
* Because of this, multiple MiniKdc instances cannot be started in parallel.
* For example, running testcases in parallel that start a KDC each. To
* accomplish this a single MiniKdc should be used for all testcases running
* in parallel.
* <p/>
* MiniKdc default configuration values are:
* <ul>
* <li>org.name=EXAMPLE (used to create the REALM)</li>
* <li>org.domain=COM (used to create the REALM)</li>
* <li>kdc.bind.address=localhost</li>
* <li>kdc.port=0 (ephemeral port)</li>
* <li>instance=DefaultKrbServer</li>
* <li>max.ticket.lifetime=86400000 (1 day)</li>
* <li>max.renewable.lifetime=604800000 (7 days)</li>
* <li>transport=TCP</li>
* <li>debug=false</li>
* </ul>
* The generated krb5.conf forces TCP connections.
* <p/>
*/
public class MiniKdc {
public static void main(String[] args) throws Exception {
if (args.length < 4) {
System.out.println("Arguments: <WORKDIR> <MINIKDCPROPERTIES> " +
"<KEYTABFILE> [<PRINCIPALS>]+");
System.exit(1);
}
File workDir = new File(args[0]);
if (!workDir.exists()) {
throw new RuntimeException("Specified work directory does not exists: "
+ workDir.getAbsolutePath());
}
Properties conf = createConf();
File file = new File(args[1]);
if (!file.exists()) {
throw new RuntimeException("Specified configuration does not exists: "
+ file.getAbsolutePath());
}
Properties userConf = new Properties();
userConf.load(new FileReader(file));
for (Map.Entry entry : userConf.entrySet()) {
conf.put(entry.getKey(), entry.getValue());
}
final MiniKdc miniKdc = new MiniKdc(conf, workDir);
miniKdc.start();
File krb5conf = new File(workDir, "krb5.conf");
if (miniKdc.getKrb5conf().renameTo(krb5conf)) {
File keytabFile = new File(args[2]).getAbsoluteFile();
String[] principals = new String[args.length - 3];
System.arraycopy(args, 3, principals, 0, args.length - 3);
miniKdc.createPrincipal(keytabFile, principals);
System.out.println();
System.out.println("Standalone MiniKdc Running");
System.out.println("---------------------------------------------------");
System.out.println(" Realm : " + miniKdc.getRealm());
System.out.println(" Running at : " + miniKdc.getHost() + ":" +
miniKdc.getHost());
System.out.println(" krb5conf : " + krb5conf);
System.out.println();
System.out.println(" created keytab : " + keytabFile);
System.out.println(" with principals : " + Arrays.asList(principals));
System.out.println();
System.out.println(" Do <CTRL-C> or kill <PID> to stop it");
System.out.println("---------------------------------------------------");
System.out.println();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
miniKdc.stop();
}
});
} else {
throw new RuntimeException("Cannot rename KDC's krb5conf to "
+ krb5conf.getAbsolutePath());
}
}
private static final Logger LOG = LoggerFactory.getLogger(MiniKdc.class);
public static final String ORG_NAME = "org.name";
public static final String ORG_DOMAIN = "org.domain";
public static final String KDC_BIND_ADDRESS = "kdc.bind.address";
public static final String KDC_PORT = "kdc.port";
public static final String INSTANCE = "instance";
public static final String MAX_TICKET_LIFETIME = "max.ticket.lifetime";
public static final String MAX_RENEWABLE_LIFETIME = "max.renewable.lifetime";
public static final String TRANSPORT = "transport";
public static final String DEBUG = "debug";
private static final Set<String> PROPERTIES = new HashSet<String>();
private static final Properties DEFAULT_CONFIG = new Properties();
static {
PROPERTIES.add(ORG_NAME);
PROPERTIES.add(ORG_DOMAIN);
PROPERTIES.add(KDC_BIND_ADDRESS);
PROPERTIES.add(KDC_BIND_ADDRESS);
PROPERTIES.add(KDC_PORT);
PROPERTIES.add(INSTANCE);
PROPERTIES.add(TRANSPORT);
PROPERTIES.add(MAX_TICKET_LIFETIME);
PROPERTIES.add(MAX_RENEWABLE_LIFETIME);
DEFAULT_CONFIG.setProperty(KDC_BIND_ADDRESS, "localhost");
DEFAULT_CONFIG.setProperty(KDC_PORT, "0");
DEFAULT_CONFIG.setProperty(INSTANCE, "DefaultKrbServer");
DEFAULT_CONFIG.setProperty(ORG_NAME, "EXAMPLE");
DEFAULT_CONFIG.setProperty(ORG_DOMAIN, "COM");
DEFAULT_CONFIG.setProperty(TRANSPORT, "TCP");
DEFAULT_CONFIG.setProperty(MAX_TICKET_LIFETIME, "86400000");
DEFAULT_CONFIG.setProperty(MAX_RENEWABLE_LIFETIME, "604800000");
DEFAULT_CONFIG.setProperty(DEBUG, "false");
}
/**
* Convenience method that returns MiniKdc default configuration.
* <p/>
* The returned configuration is a copy, it can be customized before using
* it to create a MiniKdc.
* @return a MiniKdc default configuration.
*/
public static Properties createConf() {
return (Properties) DEFAULT_CONFIG.clone();
}
private Properties conf;
private DirectoryService ds;
private KdcServer kdc;
private int port;
private String realm;
private File workDir;
private File krb5conf;
/**
* Creates a MiniKdc.
*
* @param conf MiniKdc configuration.
* @param workDir working directory, it should be the build directory. Under
* this directory an ApacheDS working directory will be created, this
* directory will be deleted when the MiniKdc stops.
* @throws Exception thrown if the MiniKdc could not be created.
*/
public MiniKdc(Properties conf, File workDir) throws Exception {
if (!conf.keySet().containsAll(PROPERTIES)) {
Set<String> missingProperties = new HashSet<String>(PROPERTIES);
missingProperties.removeAll(conf.keySet());
throw new IllegalArgumentException("Missing configuration properties: "
+ missingProperties);
}
this.workDir = new File(workDir, Long.toString(System.currentTimeMillis()));
if (! workDir.exists()
&& ! workDir.mkdirs()) {
throw new RuntimeException("Cannot create directory " + workDir);
}
LOG.info("Configuration:");
LOG.info("---------------------------------------------------------------");
for (Map.Entry entry : conf.entrySet()) {
LOG.info(" {}: {}", entry.getKey(), entry.getValue());
}
LOG.info("---------------------------------------------------------------");
this.conf = conf;
port = Integer.parseInt(conf.getProperty(KDC_PORT));
if (port == 0) {
ServerSocket ss = new ServerSocket(0, 1, InetAddress.getByName
(conf.getProperty(KDC_BIND_ADDRESS)));
port = ss.getLocalPort();
ss.close();
}
String orgName= conf.getProperty(ORG_NAME);
String orgDomain = conf.getProperty(ORG_DOMAIN);
realm = orgName.toUpperCase() + "." + orgDomain.toUpperCase();
}
/**
* Returns the port of the MiniKdc.
*
* @return the port of the MiniKdc.
*/
public int getPort() {
return port;
}
/**
* Returns the host of the MiniKdc.
*
* @return the host of the MiniKdc.
*/
public String getHost() {
return conf.getProperty(KDC_BIND_ADDRESS);
}
/**
* Returns the realm of the MiniKdc.
*
* @return the realm of the MiniKdc.
*/
public String getRealm() {
return realm;
}
public File getKrb5conf() {
return krb5conf;
}
/**
* Starts the MiniKdc.
*
* @throws Exception thrown if the MiniKdc could not be started.
*/
public synchronized void start() throws Exception {
if (kdc != null) {
throw new RuntimeException("Already started");
}
initDirectoryService();
initKDCServer();
}
@SuppressWarnings("unchecked")
private void initDirectoryService() throws Exception {
ds = new DefaultDirectoryService();
ds.setInstanceLayout(new InstanceLayout(workDir));
CacheService cacheService = new CacheService();
ds.setCacheService(cacheService);
// first load the schema
InstanceLayout instanceLayout = ds.getInstanceLayout();
File schemaPartitionDirectory = new File(
instanceLayout.getPartitionsDirectory(), "schema");
SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor(
instanceLayout.getPartitionsDirectory());
extractor.extractOrCopy();
SchemaLoader loader = new LdifSchemaLoader(schemaPartitionDirectory);
SchemaManager schemaManager = new DefaultSchemaManager(loader);
schemaManager.loadAllEnabled();
ds.setSchemaManager(schemaManager);
// Init the LdifPartition with schema
LdifPartition schemaLdifPartition = new LdifPartition(schemaManager);
schemaLdifPartition.setPartitionPath(schemaPartitionDirectory.toURI());
// The schema partition
SchemaPartition schemaPartition = new SchemaPartition(schemaManager);
schemaPartition.setWrappedPartition(schemaLdifPartition);
ds.setSchemaPartition(schemaPartition);
JdbmPartition systemPartition = new JdbmPartition(ds.getSchemaManager());
systemPartition.setId("system");
systemPartition.setPartitionPath(new File(
ds.getInstanceLayout().getPartitionsDirectory(),
systemPartition.getId()).toURI());
systemPartition.setSuffixDn(new Dn(ServerDNConstants.SYSTEM_DN));
systemPartition.setSchemaManager(ds.getSchemaManager());
ds.setSystemPartition(systemPartition);
ds.getChangeLog().setEnabled(false);
ds.setDenormalizeOpAttrsEnabled(true);
ds.addLast(new KeyDerivationInterceptor());
// create one partition
String orgName= conf.getProperty(ORG_NAME).toLowerCase();
String orgDomain = conf.getProperty(ORG_DOMAIN).toLowerCase();
JdbmPartition partition = new JdbmPartition(ds.getSchemaManager());
partition.setId(orgName);
partition.setPartitionPath(new File(
ds.getInstanceLayout().getPartitionsDirectory(), orgName).toURI());
partition.setSuffixDn(new Dn("dc=" + orgName + ",dc=" + orgDomain));
ds.addPartition(partition);
// indexes
Set indexedAttributes = new HashSet();
indexedAttributes.add(new JdbmIndex<String, Entry>("objectClass", false));
indexedAttributes.add(new JdbmIndex<String, Entry>("dc", false));
indexedAttributes.add(new JdbmIndex<String, Entry>("ou", false));
partition.setIndexedAttributes(indexedAttributes);
// And start the ds
ds.setInstanceId(conf.getProperty(INSTANCE));
ds.startup();
// context entry, after ds.startup()
Dn dn = new Dn("dc=" + orgName + ",dc=" + orgDomain);
Entry entry = ds.newEntry(dn);
entry.add("objectClass", "top", "domain");
entry.add("dc", orgName);
ds.getAdminSession().add(entry);
}
private void initKDCServer() throws Exception {
String orgName= conf.getProperty(ORG_NAME);
String orgDomain = conf.getProperty(ORG_DOMAIN);
String bindAddress = conf.getProperty(KDC_BIND_ADDRESS);
final Map<String, String> map = new HashMap<String, String>();
map.put("0", orgName.toLowerCase());
map.put("1", orgDomain.toLowerCase());
map.put("2", orgName.toUpperCase());
map.put("3", orgDomain.toUpperCase());
map.put("4", bindAddress);
ClassLoader cl = Thread.currentThread().getContextClassLoader();
InputStream is = cl.getResourceAsStream("minikdc.ldiff");
SchemaManager schemaManager = ds.getSchemaManager();
final String content = StrSubstitutor.replace(IOUtils.toString(is), map);
LdifReader reader = new LdifReader(new StringReader(content));
for (LdifEntry ldifEntry : reader) {
ds.getAdminSession().add(new DefaultEntry(schemaManager,
ldifEntry.getEntry()));
}
kdc = new KdcServer();
kdc.setDirectoryService(ds);
// transport
String transport = conf.getProperty(TRANSPORT);
if (transport.trim().equals("TCP")) {
kdc.addTransports(new TcpTransport(bindAddress, port, 3, 50));
} else if (transport.trim().equals("UDP")) {
kdc.addTransports(new UdpTransport(port));
} else {
throw new IllegalArgumentException("Invalid transport: " + transport);
}
kdc.setServiceName(conf.getProperty(INSTANCE));
kdc.getConfig().setMaximumRenewableLifetime(
Long.parseLong(conf.getProperty(MAX_RENEWABLE_LIFETIME)));
kdc.getConfig().setMaximumTicketLifetime(
Long.parseLong(conf.getProperty(MAX_TICKET_LIFETIME)));
kdc.getConfig().setPaEncTimestampRequired(false);
kdc.start();
StringBuilder sb = new StringBuilder();
is = cl.getResourceAsStream("minikdc-krb5.conf");
BufferedReader r = new BufferedReader(new InputStreamReader(is));
String line = r.readLine();
while (line != null) {
sb.append(line).append("{3}");
line = r.readLine();
}
r.close();
krb5conf = new File(workDir, "krb5.conf").getAbsoluteFile();
FileUtils.writeStringToFile(krb5conf,
MessageFormat.format(sb.toString(), getRealm(), getHost(),
Integer.toString(getPort()), System.getProperty("line.separator")));
System.setProperty("java.security.krb5.conf", krb5conf.getAbsolutePath());
System.setProperty("sun.security.krb5.debug", conf.getProperty(DEBUG,
"false"));
LOG.info("MiniKdc listening at port: {}", getPort());
LOG.info("MiniKdc setting JVM krb5.conf to: {}",
krb5conf.getAbsolutePath());
}
/**
* Stops the MiniKdc
* @throws Exception
*/
public synchronized void stop() {
if (kdc != null) {
System.getProperties().remove("java.security.krb5.conf");
System.getProperties().remove("sun.security.krb5.debug");
kdc.stop();
try {
ds.shutdown();
} catch (Exception ex) {
LOG.error("Could not shutdown ApacheDS properly: {}", ex.toString(),
ex);
}
}
delete(workDir);
}
private void delete(File f) {
if (f.isFile()) {
if (! f.delete()) {
LOG.warn("WARNING: cannot delete file " + f.getAbsolutePath());
}
} else {
for (File c: f.listFiles()) {
delete(c);
}
if (! f.delete()) {
LOG.warn("WARNING: cannot delete directory " + f.getAbsolutePath());
}
}
}
/**
* Creates a principal in the KDC with the specified user and password.
*
* @param principal principal name, do not include the domain.
* @param password password.
* @throws Exception thrown if the principal could not be created.
*/
public synchronized void createPrincipal(String principal, String password)
throws Exception {
String orgName= conf.getProperty(ORG_NAME);
String orgDomain = conf.getProperty(ORG_DOMAIN);
String baseDn = "ou=users,dc=" + orgName.toLowerCase() + ",dc=" +
orgDomain.toLowerCase();
String content = "dn: uid=" + principal + "," + baseDn + "\n" +
"objectClass: top\n" +
"objectClass: person\n" +
"objectClass: inetOrgPerson\n" +
"objectClass: krb5principal\n" +
"objectClass: krb5kdcentry\n" +
"cn: " + principal + "\n" +
"sn: " + principal + "\n" +
"uid: " + principal + "\n" +
"userPassword: " + password + "\n" +
"krb5PrincipalName: " + principal + "@" + getRealm() + "\n" +
"krb5KeyVersionNumber: 0";
for (LdifEntry ldifEntry : new LdifReader(new StringReader(content))) {
ds.getAdminSession().add(new DefaultEntry(ds.getSchemaManager(),
ldifEntry.getEntry()));
}
}
/**
* Creates multiple principals in the KDC and adds them to a keytab file.
*
* @param keytabFile keytab file to add the created principal.s
* @param principals principals to add to the KDC, do not include the domain.
* @throws Exception thrown if the principals or the keytab file could not be
* created.
*/
public void createPrincipal(File keytabFile, String ... principals)
throws Exception {
String generatedPassword = UUID.randomUUID().toString();
HackedKeytab keytab = new HackedKeytab();
List<KeytabEntry> entries = new ArrayList<KeytabEntry>();
for (String principal : principals) {
createPrincipal(principal, generatedPassword);
principal = principal + "@" + getRealm();
KerberosTime timestamp = new KerberosTime();
for (Map.Entry<EncryptionType, EncryptionKey> entry : KerberosKeyFactory
.getKerberosKeys(principal, generatedPassword).entrySet()) {
EncryptionKey ekey = entry.getValue();
byte keyVersion = (byte) ekey.getKeyVersion();
entries.add(new KeytabEntry(principal, 1L, timestamp, keyVersion,
ekey));
}
}
keytab.setEntries(entries);
keytab.write(keytabFile, principals.length);
}
}

View File

@ -0,0 +1,31 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# STDOUT Appender
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.err
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %c{1} - %m%n
log4j.rootLogger=INFO, stdout
# Switching off most of Apache DS logqing which is QUITE verbose
log4j.logger.org.apache.directory=OFF
log4j.logger.org.apache.directory.server.kerberos=INFO, stdout
log4j.additivity.org.apache.directory=false
log4j.logger.net.sf.ehcache=INFO, stdout

View File

@ -0,0 +1,25 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
[libdefaults]
default_realm = {0}
udp_preference_limit = 1
[realms]
{0} = '{'
kdc = {1}:{2}
'}'

View File

@ -0,0 +1,47 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
dn: ou=users,dc=${0},dc=${1}
objectClass: organizationalUnit
objectClass: top
ou: users
dn: uid=krbtgt,ou=users,dc=${0},dc=${1}
objectClass: top
objectClass: person
objectClass: inetOrgPerson
objectClass: krb5principal
objectClass: krb5kdcentry
cn: KDC Service
sn: Service
uid: krbtgt
userPassword: secret
krb5PrincipalName: krbtgt/${2}.${3}@${2}.${3}
krb5KeyVersionNumber: 0
dn: uid=ldap,ou=users,dc=${0},dc=${1}
objectClass: top
objectClass: person
objectClass: inetOrgPerson
objectClass: krb5principal
objectClass: krb5kdcentry
cn: LDAP
sn: Service
uid: ldap
userPassword: secret
krb5PrincipalName: ldap/${4}@${2}.${3}
krb5KeyVersionNumber: 0

View File

@ -0,0 +1,163 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.minikdc;
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
import org.junit.Assert;
import org.junit.Test;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import java.io.File;
import java.security.Principal;
import java.util.*;
public class TestMiniKdc extends KerberosSecurityTestcase {
@Test
public void testMiniKdcStart() {
MiniKdc kdc = getKdc();
Assert.assertNotSame(0, kdc.getPort());
}
@Test
public void testKeytabGen() throws Exception {
MiniKdc kdc = getKdc();
File workDir = getWorkDir();
kdc.createPrincipal(new File(workDir, "keytab"), "foo/bar", "bar/foo");
Keytab kt = Keytab.read(new File(workDir, "keytab"));
Set<String> principals = new HashSet<String>();
for (KeytabEntry entry : kt.getEntries()) {
principals.add(entry.getPrincipalName());
}
//here principals use \ instead of /
//because org.apache.directory.server.kerberos.shared.keytab.KeytabDecoder
// .getPrincipalName(IoBuffer buffer) use \\ when generates principal
Assert.assertEquals(new HashSet<String>(Arrays.asList(
"foo\\bar@" + kdc.getRealm(), "bar\\foo@" + kdc.getRealm())),
principals);
}
private static class KerberosConfiguration extends Configuration {
private String principal;
private String keytab;
private boolean isInitiator;
private KerberosConfiguration(String principal, File keytab,
boolean client) {
this.principal = principal;
this.keytab = keytab.getAbsolutePath();
this.isInitiator = client;
}
public static Configuration createClientConfig(String principal,
File keytab) {
return new KerberosConfiguration(principal, keytab, true);
}
public static Configuration createServerConfig(String principal,
File keytab) {
return new KerberosConfiguration(principal, keytab, false);
}
private static String getKrb5LoginModuleName() {
return System.getProperty("java.vendor").contains("IBM")
? "com.ibm.security.auth.module.Krb5LoginModule"
: "com.sun.security.auth.module.Krb5LoginModule";
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, String> options = new HashMap<String, String>();
options.put("keyTab", keytab);
options.put("principal", principal);
options.put("useKeyTab", "true");
options.put("storeKey", "true");
options.put("doNotPrompt", "true");
options.put("useTicketCache", "true");
options.put("renewTGT", "true");
options.put("refreshKrb5Config", "true");
options.put("isInitiator", Boolean.toString(isInitiator));
String ticketCache = System.getenv("KRB5CCNAME");
if (ticketCache != null) {
options.put("ticketCache", ticketCache);
}
options.put("debug", "true");
return new AppConfigurationEntry[]{
new AppConfigurationEntry(getKrb5LoginModuleName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options)};
}
}
@Test
public void testKerberosLogin() throws Exception {
MiniKdc kdc = getKdc();
File workDir = getWorkDir();
LoginContext loginContext = null;
try {
String principal = "foo";
File keytab = new File(workDir, "foo.keytab");
kdc.createPrincipal(keytab, principal);
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(principal));
//client login
Subject subject = new Subject(false, principals, new HashSet<Object>(),
new HashSet<Object>());
loginContext = new LoginContext("", subject, null,
KerberosConfiguration.createClientConfig(principal, keytab));
loginContext.login();
subject = loginContext.getSubject();
Assert.assertEquals(1, subject.getPrincipals().size());
Assert.assertEquals(KerberosPrincipal.class,
subject.getPrincipals().iterator().next().getClass());
Assert.assertEquals(principal + "@" + kdc.getRealm(),
subject.getPrincipals().iterator().next().getName());
loginContext.login();
//server login
subject = new Subject(false, principals, new HashSet<Object>(),
new HashSet<Object>());
loginContext = new LoginContext("", subject, null,
KerberosConfiguration.createServerConfig(principal, keytab));
loginContext.login();
subject = loginContext.getSubject();
Assert.assertEquals(1, subject.getPrincipals().size());
Assert.assertEquals(KerberosPrincipal.class,
subject.getPrincipals().iterator().next().getClass());
Assert.assertEquals(principal + "@" + kdc.getRealm(),
subject.getPrincipals().iterator().next().getName());
loginContext.login();
} finally {
if (loginContext != null) {
loginContext.logout();
}
}
}
}

View File

@ -36,6 +36,7 @@
<module>hadoop-common</module>
<module>hadoop-annotations</module>
<module>hadoop-nfs</module>
<module>hadoop-minikdc</module>
</modules>
<build>

View File

@ -293,6 +293,12 @@
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>