HADOOP-12911. Upgrade Hadoop MiniKDC with Kerby. Contributed by Jiajia Li
This commit is contained in:
parent
34cc21f6d1
commit
916140604f
|
@ -118,29 +118,6 @@
|
||||||
</exclusion>
|
</exclusion>
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.directory.server</groupId>
|
|
||||||
<artifactId>apacheds-kerberos-codec</artifactId>
|
|
||||||
<scope>compile</scope>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.directory.api</groupId>
|
|
||||||
<artifactId>api-asn1-ber</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.directory.api</groupId>
|
|
||||||
<artifactId>api-i18n</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.directory.api</groupId>
|
|
||||||
<artifactId>api-ldap-model</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>net.sf.ehcache</groupId>
|
|
||||||
<artifactId>ehcache-core</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
<artifactId>zookeeper</artifactId>
|
<artifactId>zookeeper</artifactId>
|
||||||
|
@ -154,6 +131,11 @@
|
||||||
<artifactId>curator-test</artifactId>
|
<artifactId>curator-test</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.kerby</groupId>
|
||||||
|
<artifactId>kerb-simplekdc</artifactId>
|
||||||
|
<version>1.0.0-RC2</version>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
|
|
|
@ -33,8 +33,8 @@ import java.util.Locale;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
|
import org.apache.kerby.kerberos.kerb.keytab.Keytab;
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
|
import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
|
||||||
import org.ietf.jgss.GSSException;
|
import org.ietf.jgss.GSSException;
|
||||||
import org.ietf.jgss.Oid;
|
import org.ietf.jgss.Oid;
|
||||||
|
|
||||||
|
@ -200,14 +200,14 @@ public class KerberosUtil {
|
||||||
* If keytab entries cannot be read from the file.
|
* If keytab entries cannot be read from the file.
|
||||||
*/
|
*/
|
||||||
static final String[] getPrincipalNames(String keytabFileName) throws IOException {
|
static final String[] getPrincipalNames(String keytabFileName) throws IOException {
|
||||||
Keytab keytab = Keytab.read(new File(keytabFileName));
|
Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
|
||||||
Set<String> principals = new HashSet<String>();
|
Set<String> principals = new HashSet<String>();
|
||||||
List<KeytabEntry> entries = keytab.getEntries();
|
List<PrincipalName> entries = keytab.getPrincipals();
|
||||||
for (KeytabEntry entry: entries){
|
for (PrincipalName entry : entries) {
|
||||||
principals.add(entry.getPrincipalName().replace("\\", "/"));
|
principals.add(entry.getName().replace("\\", "/"));
|
||||||
}
|
|
||||||
return principals.toArray(new String[0]);
|
|
||||||
}
|
}
|
||||||
|
return principals.toArray(new String[0]);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all the unique principals from keytabfile which matches a pattern.
|
* Get all the unique principals from keytabfile which matches a pattern.
|
||||||
|
|
|
@ -18,7 +18,6 @@ import org.apache.hadoop.security.authentication.KerberosTestUtils;
|
||||||
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
|
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
|
||||||
import org.apache.commons.codec.binary.Base64;
|
import org.apache.commons.codec.binary.Base64;
|
||||||
import org.apache.commons.lang.StringUtils;
|
|
||||||
import org.apache.hadoop.security.authentication.util.KerberosName;
|
import org.apache.hadoop.security.authentication.util.KerberosName;
|
||||||
import org.apache.hadoop.security.authentication.util.KerberosUtil;
|
import org.apache.hadoop.security.authentication.util.KerberosUtil;
|
||||||
import org.ietf.jgss.GSSContext;
|
import org.ietf.jgss.GSSContext;
|
||||||
|
|
|
@ -25,11 +25,12 @@ import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
|
import org.apache.kerby.kerberos.kerb.keytab.Keytab;
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
|
import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry;
|
||||||
import org.apache.directory.shared.kerberos.KerberosTime;
|
import org.apache.kerby.kerberos.kerb.type.KerberosTime;
|
||||||
import org.apache.directory.shared.kerberos.codec.types.EncryptionType;
|
import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
|
||||||
import org.apache.directory.shared.kerberos.components.EncryptionKey;
|
import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
|
||||||
|
import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -102,8 +103,9 @@ public class TestKerberosUtil {
|
||||||
try {
|
try {
|
||||||
KerberosUtil.getPrincipalNames(testKeytab);
|
KerberosUtil.getPrincipalNames(testKeytab);
|
||||||
Assert.fail("Exception should have been thrown");
|
Assert.fail("Exception should have been thrown");
|
||||||
} catch (IOException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
//expects exception
|
//expects exception
|
||||||
|
} catch (IOException e) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,14 +168,14 @@ public class TestKerberosUtil {
|
||||||
// duplicate principals
|
// duplicate principals
|
||||||
for (int kvno=1; kvno <= 3; kvno++) {
|
for (int kvno=1; kvno <= 3; kvno++) {
|
||||||
EncryptionKey key = new EncryptionKey(
|
EncryptionKey key = new EncryptionKey(
|
||||||
EncryptionType.UNKNOWN, "samplekey1".getBytes(), kvno);
|
EncryptionType.NONE, "samplekey1".getBytes(), kvno);
|
||||||
KeytabEntry keytabEntry = new KeytabEntry(
|
KeytabEntry keytabEntry = new KeytabEntry(
|
||||||
principal, 1 , new KerberosTime(), (byte) 1, key);
|
new PrincipalName(principal), new KerberosTime(), (byte) 1, key);
|
||||||
lstEntries.add(keytabEntry);
|
lstEntries.add(keytabEntry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Keytab keytab = Keytab.getInstance();
|
Keytab keytab = new Keytab();
|
||||||
keytab.setEntries(lstEntries);
|
keytab.addKeytabEntries(lstEntries);
|
||||||
keytab.write(new File(testKeytab));
|
keytab.store(new File(testKeytab));
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -295,6 +295,11 @@
|
||||||
<artifactId>bcprov-jdk16</artifactId>
|
<artifactId>bcprov-jdk16</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.kerby</groupId>
|
||||||
|
<artifactId>kerb-simplekdc</artifactId>
|
||||||
|
<version>1.0.0-RC2</version>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
|
|
|
@ -19,9 +19,6 @@
|
||||||
package org.apache.hadoop.security;
|
package org.apache.hadoop.security;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
|
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
|
|
||||||
import org.apache.directory.shared.kerberos.components.EncryptionKey;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
@ -33,6 +30,10 @@ import org.apache.hadoop.util.Shell;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
import org.apache.kerby.kerberos.kerb.keytab.Keytab;
|
||||||
|
import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry;
|
||||||
|
import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
|
||||||
|
import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@ -544,16 +545,25 @@ public class KDiag extends Configured implements Tool, Closeable {
|
||||||
title("Examining keytab %s", keytabFile);
|
title("Examining keytab %s", keytabFile);
|
||||||
File kt = keytabFile.getCanonicalFile();
|
File kt = keytabFile.getCanonicalFile();
|
||||||
verifyFileIsValid(kt, CAT_KERBEROS, "keytab");
|
verifyFileIsValid(kt, CAT_KERBEROS, "keytab");
|
||||||
List<KeytabEntry> entries = Keytab.read(kt).getEntries();
|
|
||||||
println("keytab entry count: %d", entries.size());
|
Keytab loadKeytab = Keytab.loadKeytab(kt);
|
||||||
for (KeytabEntry entry : entries) {
|
List<PrincipalName> principals = loadKeytab.getPrincipals();
|
||||||
EncryptionKey key = entry.getKey();
|
println("keytab princial count: %d", principals.size());
|
||||||
println(" %s: version=%d expires=%s encryption=%s",
|
int entrySize = 0;
|
||||||
entry.getPrincipalName(),
|
for (PrincipalName princ : principals) {
|
||||||
entry.getKeyVersion(),
|
List<KeytabEntry> entries = loadKeytab.getKeytabEntries(princ);
|
||||||
entry.getTimeStamp(),
|
entrySize = entrySize + entries.size();
|
||||||
key.getKeyType());
|
for (KeytabEntry entry : entries) {
|
||||||
|
EncryptionKey key = entry.getKey();
|
||||||
|
println(" %s: version=%d expires=%s encryption=%s",
|
||||||
|
entry.getPrincipal(),
|
||||||
|
entry.getKvno(),
|
||||||
|
entry.getTimestamp(),
|
||||||
|
key.getKeyType());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
println("keytab entry count: %d", entrySize);
|
||||||
|
|
||||||
endln();
|
endln();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,20 +17,21 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
[libdefaults]
|
[libdefaults]
|
||||||
default_realm = EXAMPLE.COM
|
default_realm = EXAMPLE.COM
|
||||||
allow_weak_crypto = true
|
allow_weak_crypto = true
|
||||||
default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
|
kdc_realm = _REALM_
|
||||||
default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
|
udp_preference_limit = _UDP_LIMIT_
|
||||||
|
#_KDC_TCP_PORT_
|
||||||
|
#_KDC_UDP_PORT_
|
||||||
|
|
||||||
[realms]
|
[realms]
|
||||||
EXAMPLE.COM = {
|
_REALM_ = {
|
||||||
kdc = localhost:60088
|
kdc = localhost:_KDC_PORT_
|
||||||
}
|
}
|
||||||
|
|
||||||
[domain_realm]
|
[domain_realm]
|
||||||
.example.com = EXAMPLE.COM
|
.example.com = _REALM_
|
||||||
example.com = EXAMPLE.COM
|
example.com = _REALM_
|
||||||
[login]
|
[login]
|
||||||
krb4_convert = true
|
krb4_convert = true
|
||||||
krb4_get_tickets = false
|
krb4_get_tickets = false
|
||||||
|
|
||||||
|
|
|
@ -1572,7 +1572,6 @@ public class TestKMS {
|
||||||
public Void call() throws Exception {
|
public Void call() throws Exception {
|
||||||
final Configuration conf = new Configuration();
|
final Configuration conf = new Configuration();
|
||||||
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
|
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
|
||||||
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
|
|
||||||
final URI uri = createKMSUri(getKMSUrl());
|
final URI uri = createKMSUri(getKMSUrl());
|
||||||
|
|
||||||
doAs("client", new PrivilegedExceptionAction<Void>() {
|
doAs("client", new PrivilegedExceptionAction<Void>() {
|
||||||
|
@ -1698,7 +1697,7 @@ public class TestKMS {
|
||||||
@Override
|
@Override
|
||||||
public Void call() throws Exception {
|
public Void call() throws Exception {
|
||||||
final Configuration conf = new Configuration();
|
final Configuration conf = new Configuration();
|
||||||
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
|
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
|
||||||
final URI uri = createKMSUri(getKMSUrl());
|
final URI uri = createKMSUri(getKMSUrl());
|
||||||
final Credentials credentials = new Credentials();
|
final Credentials credentials = new Credentials();
|
||||||
final UserGroupInformation nonKerberosUgi =
|
final UserGroupInformation nonKerberosUgi =
|
||||||
|
@ -1882,7 +1881,7 @@ public class TestKMS {
|
||||||
@Override
|
@Override
|
||||||
public Void call() throws Exception {
|
public Void call() throws Exception {
|
||||||
final Configuration conf = new Configuration();
|
final Configuration conf = new Configuration();
|
||||||
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
|
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
|
||||||
final URI uri = createKMSUri(getKMSUrl());
|
final URI uri = createKMSUri(getKMSUrl());
|
||||||
|
|
||||||
UserGroupInformation proxyUgi = null;
|
UserGroupInformation proxyUgi = null;
|
||||||
|
@ -1987,7 +1986,7 @@ public class TestKMS {
|
||||||
@Override
|
@Override
|
||||||
public Void call() throws Exception {
|
public Void call() throws Exception {
|
||||||
final Configuration conf = new Configuration();
|
final Configuration conf = new Configuration();
|
||||||
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
|
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
|
||||||
final URI uri = createKMSUri(getKMSUrl());
|
final URI uri = createKMSUri(getKMSUrl());
|
||||||
|
|
||||||
UserGroupInformation proxyUgi = null;
|
UserGroupInformation proxyUgi = null;
|
||||||
|
|
|
@ -36,110 +36,9 @@
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.directory.server</groupId>
|
<groupId>org.apache.kerby</groupId>
|
||||||
<artifactId>apacheds-core-api</artifactId>
|
<artifactId>kerb-simplekdc</artifactId>
|
||||||
<version>2.0.0-M15</version>
|
<version>1.0.0-RC2</version>
|
||||||
<scope>compile</scope>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.directory.api</groupId>
|
|
||||||
<artifactId>api-ldap-schema-data</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.directory.server</groupId>
|
|
||||||
<artifactId>apacheds-interceptor-kerberos</artifactId>
|
|
||||||
<version>2.0.0-M15</version>
|
|
||||||
<scope>compile</scope>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.directory.api</groupId>
|
|
||||||
<artifactId>api-ldap-schema-data</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.directory.server</groupId>
|
|
||||||
<artifactId>apacheds-protocol-shared</artifactId>
|
|
||||||
<version>2.0.0-M15</version>
|
|
||||||
<scope>compile</scope>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.directory.server</groupId>
|
|
||||||
<artifactId>apacheds-protocol-kerberos</artifactId>
|
|
||||||
<version>2.0.0-M15</version>
|
|
||||||
<scope>compile</scope>
|
|
||||||
<exclusions>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.directory.server</groupId>
|
|
||||||
<artifactId>apacheds-ldif-partition</artifactId>
|
|
||||||
<version>2.0.0-M15</version>
|
|
||||||
<scope>compile</scope>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.directory.api</groupId>
|
|
||||||
<artifactId>api-ldap-schema-data</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.directory.server</groupId>
|
|
||||||
<artifactId>apacheds-mavibot-partition</artifactId>
|
|
||||||
<version>2.0.0-M15</version>
|
|
||||||
<scope>compile</scope>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.directory.api</groupId>
|
|
||||||
<artifactId>api-ldap-schema-data</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.directory.api</groupId>
|
|
||||||
<artifactId>api-all</artifactId>
|
|
||||||
<version>1.0.0-M20</version>
|
|
||||||
<scope>compile</scope>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>xml-apis</groupId>
|
|
||||||
<artifactId>xml-apis</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>xpp3</groupId>
|
|
||||||
<artifactId>xpp3</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>dom4j</groupId>
|
|
||||||
<artifactId>dom4j</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.directory.server</groupId>
|
|
||||||
<artifactId>apacheds-jdbm-partition</artifactId>
|
|
||||||
<version>2.0.0-M15</version>
|
|
||||||
<scope>compile</scope>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.directory.api</groupId>
|
|
||||||
<artifactId>api-ldap-schema-data</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.directory.server</groupId>
|
|
||||||
<artifactId>apacheds-protocol-ldap</artifactId>
|
|
||||||
<version>2.0.0-M15</version>
|
|
||||||
<scope>compile</scope>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.directory.api</groupId>
|
|
||||||
<artifactId>api-ldap-schema-data</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
|
|
|
@ -18,65 +18,25 @@
|
||||||
|
|
||||||
package org.apache.hadoop.minikdc;
|
package org.apache.hadoop.minikdc;
|
||||||
import org.apache.commons.io.Charsets;
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.kerby.kerberos.kerb.KrbException;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
|
||||||
import org.apache.commons.lang.text.StrSubstitutor;
|
import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
|
||||||
import org.apache.directory.api.ldap.model.schema.SchemaManager;
|
import org.apache.kerby.util.IOUtil;
|
||||||
import org.apache.directory.api.ldap.schemaextractor.SchemaLdifExtractor;
|
import org.apache.kerby.util.NetworkUtil;
|
||||||
import org.apache.directory.api.ldap.schemaextractor.impl.DefaultSchemaLdifExtractor;
|
|
||||||
import org.apache.directory.api.ldap.schemaloader.LdifSchemaLoader;
|
|
||||||
import org.apache.directory.api.ldap.schemamanager.impl.DefaultSchemaManager;
|
|
||||||
import org.apache.directory.server.constants.ServerDNConstants;
|
|
||||||
import org.apache.directory.server.core.DefaultDirectoryService;
|
|
||||||
import org.apache.directory.server.core.api.CacheService;
|
|
||||||
import org.apache.directory.server.core.api.DirectoryService;
|
|
||||||
import org.apache.directory.server.core.api.InstanceLayout;
|
|
||||||
import org.apache.directory.server.core.api.schema.SchemaPartition;
|
|
||||||
import org.apache.directory.server.core.kerberos.KeyDerivationInterceptor;
|
|
||||||
import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex;
|
|
||||||
import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition;
|
|
||||||
import org.apache.directory.server.core.partition.ldif.LdifPartition;
|
|
||||||
import org.apache.directory.server.kerberos.KerberosConfig;
|
|
||||||
import org.apache.directory.server.kerberos.kdc.KdcServer;
|
|
||||||
import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory;
|
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
|
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
|
|
||||||
import org.apache.directory.server.protocol.shared.transport.AbstractTransport;
|
|
||||||
import org.apache.directory.server.protocol.shared.transport.TcpTransport;
|
|
||||||
import org.apache.directory.server.protocol.shared.transport.UdpTransport;
|
|
||||||
import org.apache.directory.server.xdbm.Index;
|
|
||||||
import org.apache.directory.shared.kerberos.KerberosTime;
|
|
||||||
import org.apache.directory.shared.kerberos.codec.types.EncryptionType;
|
|
||||||
import org.apache.directory.shared.kerberos.components.EncryptionKey;
|
|
||||||
import org.apache.directory.api.ldap.model.entry.DefaultEntry;
|
|
||||||
import org.apache.directory.api.ldap.model.entry.Entry;
|
|
||||||
import org.apache.directory.api.ldap.model.ldif.LdifEntry;
|
|
||||||
import org.apache.directory.api.ldap.model.ldif.LdifReader;
|
|
||||||
import org.apache.directory.api.ldap.model.name.Dn;
|
|
||||||
import org.apache.directory.api.ldap.model.schema.registries.SchemaLoader;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.StringReader;
|
|
||||||
import java.lang.reflect.Method;
|
|
||||||
import java.net.InetSocketAddress;
|
|
||||||
import java.text.MessageFormat;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.UUID;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Mini KDC based on Apache Directory Server that can be embedded in testcases
|
* Mini KDC based on Apache Directory Server that can be embedded in testcases
|
||||||
|
@ -84,9 +44,8 @@ import java.util.UUID;
|
||||||
* <p>
|
* <p>
|
||||||
* <b>From within testcases:</b>
|
* <b>From within testcases:</b>
|
||||||
* <p>
|
* <p>
|
||||||
* MiniKdc sets 2 System properties when started and un-sets them when stopped:
|
* MiniKdc sets one System property when started and un-set when stopped:
|
||||||
* <ul>
|
* <ul>
|
||||||
* <li>java.security.krb5.conf: set to the MiniKDC real/host/port</li>
|
|
||||||
* <li>sun.security.krb5.debug: set to the debug value provided in the
|
* <li>sun.security.krb5.debug: set to the debug value provided in the
|
||||||
* configuration</li>
|
* configuration</li>
|
||||||
* </ul>
|
* </ul>
|
||||||
|
@ -116,7 +75,7 @@ public class MiniKdc {
|
||||||
public static final String SUN_SECURITY_KRB5_DEBUG =
|
public static final String SUN_SECURITY_KRB5_DEBUG =
|
||||||
"sun.security.krb5.debug";
|
"sun.security.krb5.debug";
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
if (args.length < 4) {
|
if (args.length < 4) {
|
||||||
System.out.println("Arguments: <WORKDIR> <MINIKDCPROPERTIES> " +
|
System.out.println("Arguments: <WORKDIR> <MINIKDCPROPERTIES> " +
|
||||||
"<KEYTABFILE> [<PRINCIPALS>]+");
|
"<KEYTABFILE> [<PRINCIPALS>]+");
|
||||||
|
@ -229,13 +188,17 @@ public class MiniKdc {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Properties conf;
|
private Properties conf;
|
||||||
private DirectoryService ds;
|
private SimpleKdcServer simpleKdc;
|
||||||
private KdcServer kdc;
|
|
||||||
private int port;
|
private int port;
|
||||||
private String realm;
|
private String realm;
|
||||||
private File workDir;
|
private File workDir;
|
||||||
private File krb5conf;
|
private File krb5conf;
|
||||||
|
private String transport;
|
||||||
|
private boolean krb5Debug;
|
||||||
|
|
||||||
|
public void setTransport(String transport) {
|
||||||
|
this.transport = transport;
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Creates a MiniKdc.
|
* Creates a MiniKdc.
|
||||||
*
|
*
|
||||||
|
@ -253,9 +216,9 @@ public class MiniKdc {
|
||||||
+ missingProperties);
|
+ missingProperties);
|
||||||
}
|
}
|
||||||
this.workDir = new File(workDir, Long.toString(System.currentTimeMillis()));
|
this.workDir = new File(workDir, Long.toString(System.currentTimeMillis()));
|
||||||
if (! workDir.exists()
|
if (!this.workDir.exists()
|
||||||
&& ! workDir.mkdirs()) {
|
&& !this.workDir.mkdirs()) {
|
||||||
throw new RuntimeException("Cannot create directory " + workDir);
|
throw new RuntimeException("Cannot create directory " + this.workDir);
|
||||||
}
|
}
|
||||||
LOG.info("Configuration:");
|
LOG.info("Configuration:");
|
||||||
LOG.info("---------------------------------------------------------------");
|
LOG.info("---------------------------------------------------------------");
|
||||||
|
@ -299,6 +262,7 @@ public class MiniKdc {
|
||||||
}
|
}
|
||||||
|
|
||||||
public File getKrb5conf() {
|
public File getKrb5conf() {
|
||||||
|
krb5conf = new File(System.getProperty(JAVA_SECURITY_KRB5_CONF));
|
||||||
return krb5conf;
|
return krb5conf;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -308,226 +272,81 @@ public class MiniKdc {
|
||||||
* @throws Exception thrown if the MiniKdc could not be started.
|
* @throws Exception thrown if the MiniKdc could not be started.
|
||||||
*/
|
*/
|
||||||
public synchronized void start() throws Exception {
|
public synchronized void start() throws Exception {
|
||||||
if (kdc != null) {
|
if (simpleKdc != null) {
|
||||||
throw new RuntimeException("Already started");
|
throw new RuntimeException("Already started");
|
||||||
}
|
}
|
||||||
initDirectoryService();
|
simpleKdc = new SimpleKdcServer();
|
||||||
initKDCServer();
|
prepareKdcServer();
|
||||||
|
simpleKdc.init();
|
||||||
|
resetDefaultRealm();
|
||||||
|
simpleKdc.start();
|
||||||
|
LOG.info("MiniKdc stated.");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void initDirectoryService() throws Exception {
|
private void resetDefaultRealm() throws IOException {
|
||||||
ds = new DefaultDirectoryService();
|
InputStream templateResource = new FileInputStream(
|
||||||
ds.setInstanceLayout(new InstanceLayout(workDir));
|
getKrb5conf().getAbsolutePath());
|
||||||
|
String content = IOUtil.readInput(templateResource);
|
||||||
CacheService cacheService = new CacheService();
|
content = content.replaceAll("default_realm = .*\n",
|
||||||
ds.setCacheService(cacheService);
|
"default_realm = " + getRealm() + "\n");
|
||||||
|
IOUtil.writeFile(content, getKrb5conf());
|
||||||
// first load the schema
|
|
||||||
InstanceLayout instanceLayout = ds.getInstanceLayout();
|
|
||||||
File schemaPartitionDirectory = new File(
|
|
||||||
instanceLayout.getPartitionsDirectory(), "schema");
|
|
||||||
SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor(
|
|
||||||
instanceLayout.getPartitionsDirectory());
|
|
||||||
extractor.extractOrCopy();
|
|
||||||
|
|
||||||
SchemaLoader loader = new LdifSchemaLoader(schemaPartitionDirectory);
|
|
||||||
SchemaManager schemaManager = new DefaultSchemaManager(loader);
|
|
||||||
schemaManager.loadAllEnabled();
|
|
||||||
ds.setSchemaManager(schemaManager);
|
|
||||||
// Init the LdifPartition with schema
|
|
||||||
LdifPartition schemaLdifPartition = new LdifPartition(schemaManager);
|
|
||||||
schemaLdifPartition.setPartitionPath(schemaPartitionDirectory.toURI());
|
|
||||||
|
|
||||||
// The schema partition
|
|
||||||
SchemaPartition schemaPartition = new SchemaPartition(schemaManager);
|
|
||||||
schemaPartition.setWrappedPartition(schemaLdifPartition);
|
|
||||||
ds.setSchemaPartition(schemaPartition);
|
|
||||||
|
|
||||||
JdbmPartition systemPartition = new JdbmPartition(ds.getSchemaManager());
|
|
||||||
systemPartition.setId("system");
|
|
||||||
systemPartition.setPartitionPath(new File(
|
|
||||||
ds.getInstanceLayout().getPartitionsDirectory(),
|
|
||||||
systemPartition.getId()).toURI());
|
|
||||||
systemPartition.setSuffixDn(new Dn(ServerDNConstants.SYSTEM_DN));
|
|
||||||
systemPartition.setSchemaManager(ds.getSchemaManager());
|
|
||||||
ds.setSystemPartition(systemPartition);
|
|
||||||
|
|
||||||
ds.getChangeLog().setEnabled(false);
|
|
||||||
ds.setDenormalizeOpAttrsEnabled(true);
|
|
||||||
ds.addLast(new KeyDerivationInterceptor());
|
|
||||||
|
|
||||||
// create one partition
|
|
||||||
String orgName= conf.getProperty(ORG_NAME).toLowerCase(Locale.ENGLISH);
|
|
||||||
String orgDomain = conf.getProperty(ORG_DOMAIN).toLowerCase(Locale.ENGLISH);
|
|
||||||
|
|
||||||
JdbmPartition partition = new JdbmPartition(ds.getSchemaManager());
|
|
||||||
partition.setId(orgName);
|
|
||||||
partition.setPartitionPath(new File(
|
|
||||||
ds.getInstanceLayout().getPartitionsDirectory(), orgName).toURI());
|
|
||||||
partition.setSuffixDn(new Dn("dc=" + orgName + ",dc=" + orgDomain));
|
|
||||||
ds.addPartition(partition);
|
|
||||||
// indexes
|
|
||||||
Set<Index<?, ?, String>> indexedAttributes = new HashSet<Index<?, ?, String>>();
|
|
||||||
indexedAttributes.add(new JdbmIndex<String, Entry>("objectClass", false));
|
|
||||||
indexedAttributes.add(new JdbmIndex<String, Entry>("dc", false));
|
|
||||||
indexedAttributes.add(new JdbmIndex<String, Entry>("ou", false));
|
|
||||||
partition.setIndexedAttributes(indexedAttributes);
|
|
||||||
|
|
||||||
// And start the ds
|
|
||||||
ds.setInstanceId(conf.getProperty(INSTANCE));
|
|
||||||
ds.startup();
|
|
||||||
// context entry, after ds.startup()
|
|
||||||
Dn dn = new Dn("dc=" + orgName + ",dc=" + orgDomain);
|
|
||||||
Entry entry = ds.newEntry(dn);
|
|
||||||
entry.add("objectClass", "top", "domain");
|
|
||||||
entry.add("dc", orgName);
|
|
||||||
ds.getAdminSession().add(entry);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
private void prepareKdcServer() throws Exception {
|
||||||
* Convenience method that returns a resource as inputstream from the
|
|
||||||
* classpath.
|
|
||||||
* <p>
|
|
||||||
* It first attempts to use the Thread's context classloader and if not
|
|
||||||
* set it uses the class' classloader.
|
|
||||||
*
|
|
||||||
* @param resourceName resource to retrieve.
|
|
||||||
*
|
|
||||||
* @throws IOException thrown if resource cannot be loaded
|
|
||||||
* @return inputstream with the resource.
|
|
||||||
*/
|
|
||||||
public static InputStream getResourceAsStream(String resourceName)
|
|
||||||
throws IOException {
|
|
||||||
ClassLoader cl = Thread.currentThread().getContextClassLoader();
|
|
||||||
if (cl == null) {
|
|
||||||
cl = MiniKdc.class.getClassLoader();
|
|
||||||
}
|
|
||||||
InputStream is = cl.getResourceAsStream(resourceName);
|
|
||||||
if (is == null) {
|
|
||||||
throw new IOException("Can not read resource file '" +
|
|
||||||
resourceName + "'");
|
|
||||||
}
|
|
||||||
return is;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initKDCServer() throws Exception {
|
|
||||||
String orgName= conf.getProperty(ORG_NAME);
|
|
||||||
String orgDomain = conf.getProperty(ORG_DOMAIN);
|
|
||||||
String bindAddress = conf.getProperty(KDC_BIND_ADDRESS);
|
|
||||||
final Map<String, String> map = new HashMap<String, String>();
|
|
||||||
map.put("0", orgName.toLowerCase(Locale.ENGLISH));
|
|
||||||
map.put("1", orgDomain.toLowerCase(Locale.ENGLISH));
|
|
||||||
map.put("2", orgName.toUpperCase(Locale.ENGLISH));
|
|
||||||
map.put("3", orgDomain.toUpperCase(Locale.ENGLISH));
|
|
||||||
map.put("4", bindAddress);
|
|
||||||
|
|
||||||
InputStream is1 = getResourceAsStream("minikdc.ldiff");
|
|
||||||
|
|
||||||
SchemaManager schemaManager = ds.getSchemaManager();
|
|
||||||
LdifReader reader = null;
|
|
||||||
|
|
||||||
try {
|
|
||||||
final String content = StrSubstitutor.replace(IOUtils.toString(is1), map);
|
|
||||||
reader = new LdifReader(new StringReader(content));
|
|
||||||
|
|
||||||
for (LdifEntry ldifEntry : reader) {
|
|
||||||
ds.getAdminSession().add(new DefaultEntry(schemaManager,
|
|
||||||
ldifEntry.getEntry()));
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
IOUtils.closeQuietly(reader);
|
|
||||||
IOUtils.closeQuietly(is1);
|
|
||||||
}
|
|
||||||
|
|
||||||
KerberosConfig kerberosConfig = new KerberosConfig();
|
|
||||||
kerberosConfig.setMaximumRenewableLifetime(Long.parseLong(conf
|
|
||||||
.getProperty(MAX_RENEWABLE_LIFETIME)));
|
|
||||||
kerberosConfig.setMaximumTicketLifetime(Long.parseLong(conf
|
|
||||||
.getProperty(MAX_TICKET_LIFETIME)));
|
|
||||||
kerberosConfig.setSearchBaseDn(String.format("dc=%s,dc=%s", orgName,
|
|
||||||
orgDomain));
|
|
||||||
kerberosConfig.setPaEncTimestampRequired(false);
|
|
||||||
kdc = new KdcServer(kerberosConfig);
|
|
||||||
kdc.setDirectoryService(ds);
|
|
||||||
|
|
||||||
// transport
|
// transport
|
||||||
String transport = conf.getProperty(TRANSPORT);
|
simpleKdc.setWorkDir(workDir);
|
||||||
AbstractTransport absTransport;
|
simpleKdc.setKdcHost(getHost());
|
||||||
if (transport.trim().equals("TCP")) {
|
simpleKdc.setKdcRealm(realm);
|
||||||
absTransport = new TcpTransport(bindAddress, port, 3, 50);
|
if (transport == null) {
|
||||||
} else if (transport.trim().equals("UDP")) {
|
transport = conf.getProperty(TRANSPORT);
|
||||||
absTransport = new UdpTransport(port);
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException("Invalid transport: " + transport);
|
|
||||||
}
|
}
|
||||||
kdc.addTransports(absTransport);
|
|
||||||
kdc.setServiceName(conf.getProperty(INSTANCE));
|
|
||||||
kdc.start();
|
|
||||||
// if using ephemeral port, update port number for binding
|
|
||||||
if (port == 0) {
|
if (port == 0) {
|
||||||
InetSocketAddress addr =
|
port = NetworkUtil.getServerPort();
|
||||||
(InetSocketAddress)absTransport.getAcceptor().getLocalAddress();
|
|
||||||
port = addr.getPort();
|
|
||||||
}
|
}
|
||||||
|
if (transport != null) {
|
||||||
StringBuilder sb = new StringBuilder();
|
if (transport.trim().equals("TCP")) {
|
||||||
InputStream is2 = getResourceAsStream("minikdc-krb5.conf");
|
simpleKdc.setKdcTcpPort(port);
|
||||||
|
simpleKdc.setAllowUdp(false);
|
||||||
BufferedReader r = null;
|
} else if (transport.trim().equals("UDP")) {
|
||||||
|
simpleKdc.setKdcUdpPort(port);
|
||||||
try {
|
simpleKdc.setAllowTcp(false);
|
||||||
r = new BufferedReader(new InputStreamReader(is2, Charsets.UTF_8));
|
} else {
|
||||||
String line = r.readLine();
|
throw new IllegalArgumentException("Invalid transport: " + transport);
|
||||||
|
|
||||||
while (line != null) {
|
|
||||||
sb.append(line).append("{3}");
|
|
||||||
line = r.readLine();
|
|
||||||
}
|
}
|
||||||
} finally {
|
|
||||||
IOUtils.closeQuietly(r);
|
|
||||||
IOUtils.closeQuietly(is2);
|
|
||||||
}
|
|
||||||
|
|
||||||
krb5conf = new File(workDir, "krb5.conf").getAbsoluteFile();
|
|
||||||
FileUtils.writeStringToFile(krb5conf,
|
|
||||||
MessageFormat.format(sb.toString(), getRealm(), getHost(),
|
|
||||||
Integer.toString(getPort()), System.getProperty("line.separator")));
|
|
||||||
System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5conf.getAbsolutePath());
|
|
||||||
|
|
||||||
System.setProperty(SUN_SECURITY_KRB5_DEBUG, conf.getProperty(DEBUG,
|
|
||||||
"false"));
|
|
||||||
|
|
||||||
// refresh the config
|
|
||||||
Class<?> classRef;
|
|
||||||
if (System.getProperty("java.vendor").contains("IBM")) {
|
|
||||||
classRef = Class.forName("com.ibm.security.krb5.internal.Config");
|
|
||||||
} else {
|
} else {
|
||||||
classRef = Class.forName("sun.security.krb5.Config");
|
throw new IllegalArgumentException("Need to set transport!");
|
||||||
|
}
|
||||||
|
simpleKdc.getKdcConfig().setString(KdcConfigKey.KDC_SERVICE_NAME,
|
||||||
|
conf.getProperty(INSTANCE));
|
||||||
|
if (conf.getProperty(DEBUG) != null) {
|
||||||
|
krb5Debug = getAndSet(SUN_SECURITY_KRB5_DEBUG, conf.getProperty(DEBUG));
|
||||||
}
|
}
|
||||||
Method refreshMethod = classRef.getMethod("refresh", new Class[0]);
|
|
||||||
refreshMethod.invoke(classRef, new Object[0]);
|
|
||||||
|
|
||||||
LOG.info("MiniKdc listening at port: {}", getPort());
|
|
||||||
LOG.info("MiniKdc setting JVM krb5.conf to: {}",
|
|
||||||
krb5conf.getAbsolutePath());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stops the MiniKdc
|
* Stops the MiniKdc
|
||||||
*/
|
*/
|
||||||
public synchronized void stop() {
|
public synchronized void stop() {
|
||||||
if (kdc != null) {
|
if (simpleKdc != null) {
|
||||||
System.getProperties().remove(JAVA_SECURITY_KRB5_CONF);
|
|
||||||
System.getProperties().remove(SUN_SECURITY_KRB5_DEBUG);
|
|
||||||
kdc.stop();
|
|
||||||
try {
|
try {
|
||||||
ds.shutdown();
|
simpleKdc.stop();
|
||||||
} catch (Exception ex) {
|
} catch (KrbException e) {
|
||||||
LOG.error("Could not shutdown ApacheDS properly: {}", ex.toString(),
|
e.printStackTrace();
|
||||||
ex);
|
} finally {
|
||||||
|
if(conf.getProperty(DEBUG) != null) {
|
||||||
|
System.setProperty(SUN_SECURITY_KRB5_DEBUG,
|
||||||
|
Boolean.toString(krb5Debug));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
delete(workDir);
|
delete(workDir);
|
||||||
|
try {
|
||||||
|
// Will be fixed in next Kerby version.
|
||||||
|
Thread.sleep(1000);
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
LOG.info("MiniKdc stopped.");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void delete(File f) {
|
private void delete(File f) {
|
||||||
|
@ -554,55 +373,39 @@ public class MiniKdc {
|
||||||
*/
|
*/
|
||||||
public synchronized void createPrincipal(String principal, String password)
|
public synchronized void createPrincipal(String principal, String password)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
String orgName= conf.getProperty(ORG_NAME);
|
simpleKdc.createPrincipal(principal, password);
|
||||||
String orgDomain = conf.getProperty(ORG_DOMAIN);
|
|
||||||
String baseDn = "ou=users,dc=" + orgName.toLowerCase(Locale.ENGLISH)
|
|
||||||
+ ",dc=" + orgDomain.toLowerCase(Locale.ENGLISH);
|
|
||||||
String content = "dn: uid=" + principal + "," + baseDn + "\n" +
|
|
||||||
"objectClass: top\n" +
|
|
||||||
"objectClass: person\n" +
|
|
||||||
"objectClass: inetOrgPerson\n" +
|
|
||||||
"objectClass: krb5principal\n" +
|
|
||||||
"objectClass: krb5kdcentry\n" +
|
|
||||||
"cn: " + principal + "\n" +
|
|
||||||
"sn: " + principal + "\n" +
|
|
||||||
"uid: " + principal + "\n" +
|
|
||||||
"userPassword: " + password + "\n" +
|
|
||||||
"krb5PrincipalName: " + principal + "@" + getRealm() + "\n" +
|
|
||||||
"krb5KeyVersionNumber: 0";
|
|
||||||
|
|
||||||
for (LdifEntry ldifEntry : new LdifReader(new StringReader(content))) {
|
|
||||||
ds.getAdminSession().add(new DefaultEntry(ds.getSchemaManager(),
|
|
||||||
ldifEntry.getEntry()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates multiple principals in the KDC and adds them to a keytab file.
|
* Creates multiple principals in the KDC and adds them to a keytab file.
|
||||||
*
|
*
|
||||||
* @param keytabFile keytab file to add the created principal.s
|
* @param keytabFile keytab file to add the created principals.
|
||||||
* @param principals principals to add to the KDC, do not include the domain.
|
* @param principals principals to add to the KDC, do not include the domain.
|
||||||
* @throws Exception thrown if the principals or the keytab file could not be
|
* @throws Exception thrown if the principals or the keytab file could not be
|
||||||
* created.
|
* created.
|
||||||
*/
|
*/
|
||||||
public void createPrincipal(File keytabFile, String ... principals)
|
public synchronized void createPrincipal(File keytabFile,
|
||||||
|
String ... principals)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
String generatedPassword = UUID.randomUUID().toString();
|
simpleKdc.createPrincipals(principals);
|
||||||
Keytab keytab = new Keytab();
|
if (keytabFile.exists() && !keytabFile.delete()) {
|
||||||
List<KeytabEntry> entries = new ArrayList<KeytabEntry>();
|
LOG.error("Failed to delete keytab file: " + keytabFile);
|
||||||
for (String principal : principals) {
|
|
||||||
createPrincipal(principal, generatedPassword);
|
|
||||||
principal = principal + "@" + getRealm();
|
|
||||||
KerberosTime timestamp = new KerberosTime();
|
|
||||||
for (Map.Entry<EncryptionType, EncryptionKey> entry : KerberosKeyFactory
|
|
||||||
.getKerberosKeys(principal, generatedPassword).entrySet()) {
|
|
||||||
EncryptionKey ekey = entry.getValue();
|
|
||||||
byte keyVersion = (byte) ekey.getKeyVersion();
|
|
||||||
entries.add(new KeytabEntry(principal, 1L, timestamp, keyVersion,
|
|
||||||
ekey));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
keytab.setEntries(entries);
|
for (String principal : principals) {
|
||||||
keytab.write(keytabFile);
|
simpleKdc.getKadmin().exportKeytab(keytabFile, principal);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the System property; return the old value for caching.
|
||||||
|
*
|
||||||
|
* @param sysprop property
|
||||||
|
* @param debug true or false
|
||||||
|
* @return the previous value
|
||||||
|
*/
|
||||||
|
private boolean getAndSet(String sysprop, String debug) {
|
||||||
|
boolean old = Boolean.getBoolean(sysprop);
|
||||||
|
System.setProperty(sysprop, debug);
|
||||||
|
return old;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
#
|
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
# or more contributor license agreements. See the NOTICE file
|
|
||||||
# distributed with this work for additional information
|
|
||||||
# regarding copyright ownership. The ASF licenses this file
|
|
||||||
# to you under the Apache License, Version 2.0 (the
|
|
||||||
# "License"); you may not use this file except in compliance
|
|
||||||
# with the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
[libdefaults]
|
|
||||||
default_realm = {0}
|
|
||||||
udp_preference_limit = 1
|
|
||||||
|
|
||||||
[realms]
|
|
||||||
{0} = '{'
|
|
||||||
kdc = {1}:{2}
|
|
||||||
'}'
|
|
|
@ -1,47 +0,0 @@
|
||||||
#
|
|
||||||
# Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
# or more contributor license agreements. See the NOTICE file
|
|
||||||
# distributed with this work for additional information
|
|
||||||
# regarding copyright ownership. The ASF licenses this file
|
|
||||||
# to you under the Apache License, Version 2.0 (the
|
|
||||||
# "License"); you may not use this file except in compliance
|
|
||||||
# with the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
dn: ou=users,dc=${0},dc=${1}
|
|
||||||
objectClass: organizationalUnit
|
|
||||||
objectClass: top
|
|
||||||
ou: users
|
|
||||||
|
|
||||||
dn: uid=krbtgt,ou=users,dc=${0},dc=${1}
|
|
||||||
objectClass: top
|
|
||||||
objectClass: person
|
|
||||||
objectClass: inetOrgPerson
|
|
||||||
objectClass: krb5principal
|
|
||||||
objectClass: krb5kdcentry
|
|
||||||
cn: KDC Service
|
|
||||||
sn: Service
|
|
||||||
uid: krbtgt
|
|
||||||
userPassword: secret
|
|
||||||
krb5PrincipalName: krbtgt/${2}.${3}@${2}.${3}
|
|
||||||
krb5KeyVersionNumber: 0
|
|
||||||
|
|
||||||
dn: uid=ldap,ou=users,dc=${0},dc=${1}
|
|
||||||
objectClass: top
|
|
||||||
objectClass: person
|
|
||||||
objectClass: inetOrgPerson
|
|
||||||
objectClass: krb5principal
|
|
||||||
objectClass: krb5kdcentry
|
|
||||||
cn: LDAP
|
|
||||||
sn: Service
|
|
||||||
uid: ldap
|
|
||||||
userPassword: secret
|
|
||||||
krb5PrincipalName: ldap/${4}@${2}.${3}
|
|
||||||
krb5KeyVersionNumber: 0
|
|
|
@ -18,8 +18,8 @@
|
||||||
|
|
||||||
package org.apache.hadoop.minikdc;
|
package org.apache.hadoop.minikdc;
|
||||||
|
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
|
import org.apache.kerby.kerberos.kerb.keytab.Keytab;
|
||||||
import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
|
import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -30,6 +30,7 @@ import javax.security.auth.login.Configuration;
|
||||||
import javax.security.auth.login.LoginContext;
|
import javax.security.auth.login.LoginContext;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.security.Principal;
|
import java.security.Principal;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
@ -51,16 +52,16 @@ public class TestMiniKdc extends KerberosSecurityTestcase {
|
||||||
File workDir = getWorkDir();
|
File workDir = getWorkDir();
|
||||||
|
|
||||||
kdc.createPrincipal(new File(workDir, "keytab"), "foo/bar", "bar/foo");
|
kdc.createPrincipal(new File(workDir, "keytab"), "foo/bar", "bar/foo");
|
||||||
Keytab kt = Keytab.read(new File(workDir, "keytab"));
|
List<PrincipalName> principalNameList =
|
||||||
|
Keytab.loadKeytab(new File(workDir, "keytab")).getPrincipals();
|
||||||
|
|
||||||
Set<String> principals = new HashSet<String>();
|
Set<String> principals = new HashSet<String>();
|
||||||
for (KeytabEntry entry : kt.getEntries()) {
|
for (PrincipalName principalName : principalNameList) {
|
||||||
principals.add(entry.getPrincipalName());
|
principals.add(principalName.getName());
|
||||||
}
|
}
|
||||||
//here principals use \ instead of /
|
|
||||||
//because org.apache.directory.server.kerberos.shared.keytab.KeytabDecoder
|
|
||||||
// .getPrincipalName(IoBuffer buffer) use \\ when generates principal
|
|
||||||
Assert.assertEquals(new HashSet<String>(Arrays.asList(
|
Assert.assertEquals(new HashSet<String>(Arrays.asList(
|
||||||
"foo\\bar@" + kdc.getRealm(), "bar\\foo@" + kdc.getRealm())),
|
"foo/bar@" + kdc.getRealm(), "bar/foo@" + kdc.getRealm())),
|
||||||
principals);
|
principals);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,21 +17,22 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
[libdefaults]
|
[libdefaults]
|
||||||
default_realm = EXAMPLE.COM
|
default_realm = EXAMPLE.COM
|
||||||
allow_weak_crypto = true
|
allow_weak_crypto = true
|
||||||
default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
|
kdc_realm = _REALM_
|
||||||
default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
|
udp_preference_limit = _UDP_LIMIT_
|
||||||
|
#_KDC_TCP_PORT_
|
||||||
|
#_KDC_UDP_PORT_
|
||||||
|
|
||||||
[realms]
|
[realms]
|
||||||
EXAMPLE.COM = {
|
_REALM_ = {
|
||||||
kdc = localhost:60088
|
kdc = localhost:_KDC_PORT_
|
||||||
}
|
}
|
||||||
|
|
||||||
[domain_realm]
|
[domain_realm]
|
||||||
.example.com = EXAMPLE.COM
|
.example.com = _REALM_
|
||||||
example.com = EXAMPLE.COM
|
example.com = _REALM_
|
||||||
|
|
||||||
[login]
|
[login]
|
||||||
krb4_convert = true
|
krb4_convert = true
|
||||||
krb4_get_tickets = false
|
krb4_get_tickets = false
|
||||||
|
|
||||||
|
|
|
@ -953,12 +953,6 @@
|
||||||
<version>1.8</version>
|
<version>1.8</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.directory.server</groupId>
|
|
||||||
<artifactId>apacheds-kerberos-codec</artifactId>
|
|
||||||
<version>2.0.0-M15</version>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.microsoft.azure</groupId>
|
<groupId>com.microsoft.azure</groupId>
|
||||||
<artifactId>azure-storage</artifactId>
|
<artifactId>azure-storage</artifactId>
|
||||||
|
|
|
@ -15,14 +15,19 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
[libdefaults]
|
[libdefaults]
|
||||||
default_realm = APACHE.ORG
|
default_realm = APACHE.ORG
|
||||||
udp_preference_limit = 1
|
extra_addresses = 127.0.0.1
|
||||||
extra_addresses = 127.0.0.1
|
kdc_realm = _REALM_
|
||||||
|
udp_preference_limit = _UDP_LIMIT_
|
||||||
|
#_KDC_TCP_PORT_
|
||||||
|
#_KDC_UDP_PORT_
|
||||||
|
|
||||||
[realms]
|
[realms]
|
||||||
APACHE.ORG = {
|
_REALM_ = {
|
||||||
admin_server = localhost:88
|
admin_server = localhost:_KDC_PORT_
|
||||||
kdc = localhost:88
|
kdc = localhost:_KDC_PORT_
|
||||||
}
|
}
|
||||||
[domain_realm]
|
[domain_realm]
|
||||||
localhost = APACHE.ORG
|
localhost = _REALM_
|
||||||
|
|
Loading…
Reference in New Issue