From 916140604ffef59466ba30832478311d3e6249bd Mon Sep 17 00:00:00 2001 From: Kai Zheng Date: Sat, 28 May 2016 14:23:39 +0800 Subject: [PATCH] HADOOP-12911. Upgrade Hadoop MiniKDC with Kerby. Contributed by Jiajia Li --- hadoop-common-project/hadoop-auth/pom.xml | 28 +- .../authentication/util/KerberosUtil.java | 18 +- .../TestKerberosAuthenticationHandler.java | 1 - .../authentication/util/TestKerberosUtil.java | 26 +- hadoop-common-project/hadoop-common/pom.xml | 5 + .../org/apache/hadoop/security/KDiag.java | 34 +- .../src/test/resources/krb5.conf | 23 +- .../hadoop/crypto/key/kms/server/TestKMS.java | 7 +- hadoop-common-project/hadoop-minikdc/pom.xml | 107 +---- .../org/apache/hadoop/minikdc/MiniKdc.java | 383 +++++------------- .../src/main/resources/minikdc-krb5.conf | 25 -- .../src/main/resources/minikdc.ldiff | 47 --- .../apache/hadoop/minikdc/TestMiniKdc.java | 19 +- .../hadoop-hdfs/src/test/resources/krb5.conf | 19 +- hadoop-project/pom.xml | 6 - .../src/test/resources/krb5.conf | 23 +- 16 files changed, 200 insertions(+), 571 deletions(-) delete mode 100644 hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc-krb5.conf delete mode 100644 hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc.ldiff diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml index 172431daddf..27e4547f063 100644 --- a/hadoop-common-project/hadoop-auth/pom.xml +++ b/hadoop-common-project/hadoop-auth/pom.xml @@ -118,29 +118,6 @@ - - org.apache.directory.server - apacheds-kerberos-codec - compile - - - org.apache.directory.api - api-asn1-ber - - - org.apache.directory.api - api-i18n - - - org.apache.directory.api - api-ldap-model - - - net.sf.ehcache - ehcache-core - - - org.apache.zookeeper zookeeper @@ -154,6 +131,11 @@ curator-test test + + org.apache.kerby + kerb-simplekdc + 1.0.0-RC2 + diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java index fd257fccd96..6d33c2d86ee 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java @@ -33,8 +33,8 @@ import java.util.Locale; import java.util.Set; import java.util.regex.Pattern; -import org.apache.directory.server.kerberos.shared.keytab.Keytab; -import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry; +import org.apache.kerby.kerberos.kerb.keytab.Keytab; +import org.apache.kerby.kerberos.kerb.type.base.PrincipalName; import org.ietf.jgss.GSSException; import org.ietf.jgss.Oid; @@ -200,14 +200,14 @@ public class KerberosUtil { * If keytab entries cannot be read from the file. */ static final String[] getPrincipalNames(String keytabFileName) throws IOException { - Keytab keytab = Keytab.read(new File(keytabFileName)); - Set principals = new HashSet(); - List entries = keytab.getEntries(); - for (KeytabEntry entry: entries){ - principals.add(entry.getPrincipalName().replace("\\", "/")); - } - return principals.toArray(new String[0]); + Keytab keytab = Keytab.loadKeytab(new File(keytabFileName)); + Set principals = new HashSet(); + List entries = keytab.getPrincipals(); + for (PrincipalName entry : entries) { + principals.add(entry.getName().replace("\\", "/")); } + return principals.toArray(new String[0]); + } /** * Get all the unique principals from keytabfile which matches a pattern. diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java index e3444ef4b92..e6723912492 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java @@ -18,7 +18,6 @@ import org.apache.hadoop.security.authentication.KerberosTestUtils; import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.hadoop.security.authentication.client.KerberosAuthenticator; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.lang.StringUtils; import org.apache.hadoop.security.authentication.util.KerberosName; import org.apache.hadoop.security.authentication.util.KerberosUtil; import org.ietf.jgss.GSSContext; diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java index a0ae0258c4f..63df9eae947 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java @@ -25,11 +25,12 @@ import java.util.List; import java.util.Locale; import java.util.regex.Pattern; -import org.apache.directory.server.kerberos.shared.keytab.Keytab; -import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry; -import org.apache.directory.shared.kerberos.KerberosTime; -import org.apache.directory.shared.kerberos.codec.types.EncryptionType; -import org.apache.directory.shared.kerberos.components.EncryptionKey; +import org.apache.kerby.kerberos.kerb.keytab.Keytab; +import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry; +import org.apache.kerby.kerberos.kerb.type.KerberosTime; +import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey; +import org.apache.kerby.kerberos.kerb.type.base.EncryptionType; +import org.apache.kerby.kerberos.kerb.type.base.PrincipalName; import org.junit.After; import org.junit.Assert; import org.junit.Test; @@ -96,14 +97,15 @@ public class TestKerberosUtil { KerberosUtil.getServicePrincipal( service, testHost.toLowerCase(Locale.US))); } - + @Test public void testGetPrincipalNamesMissingKeytab() { try { KerberosUtil.getPrincipalNames(testKeytab); Assert.fail("Exception should have been thrown"); - } catch (IOException e) { + } catch (IllegalArgumentException e) { //expects exception + } catch (IOException e) { } } @@ -166,14 +168,14 @@ public class TestKerberosUtil { // duplicate principals for (int kvno=1; kvno <= 3; kvno++) { EncryptionKey key = new EncryptionKey( - EncryptionType.UNKNOWN, "samplekey1".getBytes(), kvno); + EncryptionType.NONE, "samplekey1".getBytes(), kvno); KeytabEntry keytabEntry = new KeytabEntry( - principal, 1 , new KerberosTime(), (byte) 1, key); + new PrincipalName(principal), new KerberosTime(), (byte) 1, key); lstEntries.add(keytabEntry); } } - Keytab keytab = Keytab.getInstance(); - keytab.setEntries(lstEntries); - keytab.write(new File(testKeytab)); + Keytab keytab = new Keytab(); + keytab.addKeytabEntries(lstEntries); + keytab.store(new File(testKeytab)); } } \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index aa25af0edfa..8bf052c109a 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -295,6 +295,11 @@ bcprov-jdk16 test + + org.apache.kerby + kerb-simplekdc + 1.0.0-RC2 + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java index 6cef9627482..266bba0dd0f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java @@ -19,9 +19,6 @@ package org.apache.hadoop.security; import org.apache.commons.io.IOUtils; -import org.apache.directory.server.kerberos.shared.keytab.Keytab; -import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry; -import org.apache.directory.shared.kerberos.components.EncryptionKey; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.io.Text; @@ -33,6 +30,10 @@ import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.apache.kerby.kerberos.kerb.keytab.Keytab; +import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry; +import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey; +import org.apache.kerby.kerberos.kerb.type.base.PrincipalName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -544,16 +545,25 @@ public class KDiag extends Configured implements Tool, Closeable { title("Examining keytab %s", keytabFile); File kt = keytabFile.getCanonicalFile(); verifyFileIsValid(kt, CAT_KERBEROS, "keytab"); - List entries = Keytab.read(kt).getEntries(); - println("keytab entry count: %d", entries.size()); - for (KeytabEntry entry : entries) { - EncryptionKey key = entry.getKey(); - println(" %s: version=%d expires=%s encryption=%s", - entry.getPrincipalName(), - entry.getKeyVersion(), - entry.getTimeStamp(), - key.getKeyType()); + + Keytab loadKeytab = Keytab.loadKeytab(kt); + List principals = loadKeytab.getPrincipals(); + println("keytab princial count: %d", principals.size()); + int entrySize = 0; + for (PrincipalName princ : principals) { + List entries = loadKeytab.getKeytabEntries(princ); + entrySize = entrySize + entries.size(); + for (KeytabEntry entry : entries) { + EncryptionKey key = entry.getKey(); + println(" %s: version=%d expires=%s encryption=%s", + entry.getPrincipal(), + entry.getKvno(), + entry.getTimestamp(), + key.getKeyType()); + } } + println("keytab entry count: %d", entrySize); + endln(); } diff --git a/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf b/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf index 3182436d487..62a9cde0d0e 100644 --- a/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf +++ b/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf @@ -17,20 +17,21 @@ # [libdefaults] - default_realm = EXAMPLE.COM - allow_weak_crypto = true - default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1 - default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1 + default_realm = EXAMPLE.COM + allow_weak_crypto = true + kdc_realm = _REALM_ + udp_preference_limit = _UDP_LIMIT_ + #_KDC_TCP_PORT_ + #_KDC_UDP_PORT_ [realms] - EXAMPLE.COM = { - kdc = localhost:60088 - } + _REALM_ = { + kdc = localhost:_KDC_PORT_ + } [domain_realm] - .example.com = EXAMPLE.COM - example.com = EXAMPLE.COM + .example.com = _REALM_ + example.com = _REALM_ [login] krb4_convert = true - krb4_get_tickets = false - + krb4_get_tickets = false \ No newline at end of file diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java index 8094ae2e6d2..a452a80a26b 100644 --- a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java @@ -1572,7 +1572,6 @@ public class TestKMS { public Void call() throws Exception { final Configuration conf = new Configuration(); conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128); - conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64); final URI uri = createKMSUri(getKMSUrl()); doAs("client", new PrivilegedExceptionAction() { @@ -1698,7 +1697,7 @@ public class TestKMS { @Override public Void call() throws Exception { final Configuration conf = new Configuration(); - conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64); + conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128); final URI uri = createKMSUri(getKMSUrl()); final Credentials credentials = new Credentials(); final UserGroupInformation nonKerberosUgi = @@ -1882,7 +1881,7 @@ public class TestKMS { @Override public Void call() throws Exception { final Configuration conf = new Configuration(); - conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64); + conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128); final URI uri = createKMSUri(getKMSUrl()); UserGroupInformation proxyUgi = null; @@ -1987,7 +1986,7 @@ public class TestKMS { @Override public Void call() throws Exception { final Configuration conf = new Configuration(); - conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64); + conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128); final URI uri = createKMSUri(getKMSUrl()); UserGroupInformation proxyUgi = null; diff --git a/hadoop-common-project/hadoop-minikdc/pom.xml b/hadoop-common-project/hadoop-minikdc/pom.xml index 9811db91a3b..2e22ad05193 100644 --- a/hadoop-common-project/hadoop-minikdc/pom.xml +++ b/hadoop-common-project/hadoop-minikdc/pom.xml @@ -36,110 +36,9 @@ compile - org.apache.directory.server - apacheds-core-api - 2.0.0-M15 - compile - - - org.apache.directory.api - api-ldap-schema-data - - - - - org.apache.directory.server - apacheds-interceptor-kerberos - 2.0.0-M15 - compile - - - org.apache.directory.api - api-ldap-schema-data - - - - - org.apache.directory.server - apacheds-protocol-shared - 2.0.0-M15 - compile - - - org.apache.directory.server - apacheds-protocol-kerberos - 2.0.0-M15 - compile - - - - - org.apache.directory.server - apacheds-ldif-partition - 2.0.0-M15 - compile - - - org.apache.directory.api - api-ldap-schema-data - - - - - org.apache.directory.server - apacheds-mavibot-partition - 2.0.0-M15 - compile - - - org.apache.directory.api - api-ldap-schema-data - - - - - org.apache.directory.api - api-all - 1.0.0-M20 - compile - - - xml-apis - xml-apis - - - xpp3 - xpp3 - - - dom4j - dom4j - - - - - org.apache.directory.server - apacheds-jdbm-partition - 2.0.0-M15 - compile - - - org.apache.directory.api - api-ldap-schema-data - - - - - org.apache.directory.server - apacheds-protocol-ldap - 2.0.0-M15 - compile - - - org.apache.directory.api - api-ldap-schema-data - - + org.apache.kerby + kerb-simplekdc + 1.0.0-RC2 org.slf4j diff --git a/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java b/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java index b089e0ef37d..92786422ae5 100644 --- a/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java +++ b/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java @@ -18,65 +18,25 @@ package org.apache.hadoop.minikdc; import org.apache.commons.io.Charsets; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang.text.StrSubstitutor; -import org.apache.directory.api.ldap.model.schema.SchemaManager; -import org.apache.directory.api.ldap.schemaextractor.SchemaLdifExtractor; -import org.apache.directory.api.ldap.schemaextractor.impl.DefaultSchemaLdifExtractor; -import org.apache.directory.api.ldap.schemaloader.LdifSchemaLoader; -import org.apache.directory.api.ldap.schemamanager.impl.DefaultSchemaManager; -import org.apache.directory.server.constants.ServerDNConstants; -import org.apache.directory.server.core.DefaultDirectoryService; -import org.apache.directory.server.core.api.CacheService; -import org.apache.directory.server.core.api.DirectoryService; -import org.apache.directory.server.core.api.InstanceLayout; -import org.apache.directory.server.core.api.schema.SchemaPartition; -import org.apache.directory.server.core.kerberos.KeyDerivationInterceptor; -import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex; -import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition; -import org.apache.directory.server.core.partition.ldif.LdifPartition; -import org.apache.directory.server.kerberos.KerberosConfig; -import org.apache.directory.server.kerberos.kdc.KdcServer; -import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory; -import org.apache.directory.server.kerberos.shared.keytab.Keytab; -import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry; -import org.apache.directory.server.protocol.shared.transport.AbstractTransport; -import org.apache.directory.server.protocol.shared.transport.TcpTransport; -import org.apache.directory.server.protocol.shared.transport.UdpTransport; -import org.apache.directory.server.xdbm.Index; -import org.apache.directory.shared.kerberos.KerberosTime; -import org.apache.directory.shared.kerberos.codec.types.EncryptionType; -import org.apache.directory.shared.kerberos.components.EncryptionKey; -import org.apache.directory.api.ldap.model.entry.DefaultEntry; -import org.apache.directory.api.ldap.model.entry.Entry; -import org.apache.directory.api.ldap.model.ldif.LdifEntry; -import org.apache.directory.api.ldap.model.ldif.LdifReader; -import org.apache.directory.api.ldap.model.name.Dn; -import org.apache.directory.api.ldap.model.schema.registries.SchemaLoader; +import org.apache.kerby.kerberos.kerb.KrbException; +import org.apache.kerby.kerberos.kerb.server.KdcConfigKey; +import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer; +import org.apache.kerby.util.IOUtil; +import org.apache.kerby.util.NetworkUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.IOException; -import java.io.StringReader; -import java.lang.reflect.Method; -import java.net.InetSocketAddress; -import java.text.MessageFormat; -import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; -import java.util.UUID; /** * Mini KDC based on Apache Directory Server that can be embedded in testcases @@ -84,9 +44,8 @@ import java.util.UUID; *

* From within testcases: *

- * MiniKdc sets 2 System properties when started and un-sets them when stopped: + * MiniKdc sets one System property when started and un-set when stopped: *

    - *
  • java.security.krb5.conf: set to the MiniKDC real/host/port
  • *
  • sun.security.krb5.debug: set to the debug value provided in the * configuration
  • *
@@ -116,7 +75,7 @@ public class MiniKdc { public static final String SUN_SECURITY_KRB5_DEBUG = "sun.security.krb5.debug"; - public static void main(String[] args) throws Exception { + public static void main(String[] args) throws Exception { if (args.length < 4) { System.out.println("Arguments: " + " []+"); @@ -229,13 +188,17 @@ public class MiniKdc { } private Properties conf; - private DirectoryService ds; - private KdcServer kdc; + private SimpleKdcServer simpleKdc; private int port; private String realm; private File workDir; private File krb5conf; + private String transport; + private boolean krb5Debug; + public void setTransport(String transport) { + this.transport = transport; + } /** * Creates a MiniKdc. * @@ -253,9 +216,9 @@ public class MiniKdc { + missingProperties); } this.workDir = new File(workDir, Long.toString(System.currentTimeMillis())); - if (! workDir.exists() - && ! workDir.mkdirs()) { - throw new RuntimeException("Cannot create directory " + workDir); + if (!this.workDir.exists() + && !this.workDir.mkdirs()) { + throw new RuntimeException("Cannot create directory " + this.workDir); } LOG.info("Configuration:"); LOG.info("---------------------------------------------------------------"); @@ -299,6 +262,7 @@ public class MiniKdc { } public File getKrb5conf() { + krb5conf = new File(System.getProperty(JAVA_SECURITY_KRB5_CONF)); return krb5conf; } @@ -308,226 +272,81 @@ public class MiniKdc { * @throws Exception thrown if the MiniKdc could not be started. */ public synchronized void start() throws Exception { - if (kdc != null) { + if (simpleKdc != null) { throw new RuntimeException("Already started"); } - initDirectoryService(); - initKDCServer(); + simpleKdc = new SimpleKdcServer(); + prepareKdcServer(); + simpleKdc.init(); + resetDefaultRealm(); + simpleKdc.start(); + LOG.info("MiniKdc stated."); } - private void initDirectoryService() throws Exception { - ds = new DefaultDirectoryService(); - ds.setInstanceLayout(new InstanceLayout(workDir)); - - CacheService cacheService = new CacheService(); - ds.setCacheService(cacheService); - - // first load the schema - InstanceLayout instanceLayout = ds.getInstanceLayout(); - File schemaPartitionDirectory = new File( - instanceLayout.getPartitionsDirectory(), "schema"); - SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( - instanceLayout.getPartitionsDirectory()); - extractor.extractOrCopy(); - - SchemaLoader loader = new LdifSchemaLoader(schemaPartitionDirectory); - SchemaManager schemaManager = new DefaultSchemaManager(loader); - schemaManager.loadAllEnabled(); - ds.setSchemaManager(schemaManager); - // Init the LdifPartition with schema - LdifPartition schemaLdifPartition = new LdifPartition(schemaManager); - schemaLdifPartition.setPartitionPath(schemaPartitionDirectory.toURI()); - - // The schema partition - SchemaPartition schemaPartition = new SchemaPartition(schemaManager); - schemaPartition.setWrappedPartition(schemaLdifPartition); - ds.setSchemaPartition(schemaPartition); - - JdbmPartition systemPartition = new JdbmPartition(ds.getSchemaManager()); - systemPartition.setId("system"); - systemPartition.setPartitionPath(new File( - ds.getInstanceLayout().getPartitionsDirectory(), - systemPartition.getId()).toURI()); - systemPartition.setSuffixDn(new Dn(ServerDNConstants.SYSTEM_DN)); - systemPartition.setSchemaManager(ds.getSchemaManager()); - ds.setSystemPartition(systemPartition); - - ds.getChangeLog().setEnabled(false); - ds.setDenormalizeOpAttrsEnabled(true); - ds.addLast(new KeyDerivationInterceptor()); - - // create one partition - String orgName= conf.getProperty(ORG_NAME).toLowerCase(Locale.ENGLISH); - String orgDomain = conf.getProperty(ORG_DOMAIN).toLowerCase(Locale.ENGLISH); - - JdbmPartition partition = new JdbmPartition(ds.getSchemaManager()); - partition.setId(orgName); - partition.setPartitionPath(new File( - ds.getInstanceLayout().getPartitionsDirectory(), orgName).toURI()); - partition.setSuffixDn(new Dn("dc=" + orgName + ",dc=" + orgDomain)); - ds.addPartition(partition); - // indexes - Set> indexedAttributes = new HashSet>(); - indexedAttributes.add(new JdbmIndex("objectClass", false)); - indexedAttributes.add(new JdbmIndex("dc", false)); - indexedAttributes.add(new JdbmIndex("ou", false)); - partition.setIndexedAttributes(indexedAttributes); - - // And start the ds - ds.setInstanceId(conf.getProperty(INSTANCE)); - ds.startup(); - // context entry, after ds.startup() - Dn dn = new Dn("dc=" + orgName + ",dc=" + orgDomain); - Entry entry = ds.newEntry(dn); - entry.add("objectClass", "top", "domain"); - entry.add("dc", orgName); - ds.getAdminSession().add(entry); + private void resetDefaultRealm() throws IOException { + InputStream templateResource = new FileInputStream( + getKrb5conf().getAbsolutePath()); + String content = IOUtil.readInput(templateResource); + content = content.replaceAll("default_realm = .*\n", + "default_realm = " + getRealm() + "\n"); + IOUtil.writeFile(content, getKrb5conf()); } - /** - * Convenience method that returns a resource as inputstream from the - * classpath. - *

- * It first attempts to use the Thread's context classloader and if not - * set it uses the class' classloader. - * - * @param resourceName resource to retrieve. - * - * @throws IOException thrown if resource cannot be loaded - * @return inputstream with the resource. - */ - public static InputStream getResourceAsStream(String resourceName) - throws IOException { - ClassLoader cl = Thread.currentThread().getContextClassLoader(); - if (cl == null) { - cl = MiniKdc.class.getClassLoader(); - } - InputStream is = cl.getResourceAsStream(resourceName); - if (is == null) { - throw new IOException("Can not read resource file '" + - resourceName + "'"); - } - return is; - } - - private void initKDCServer() throws Exception { - String orgName= conf.getProperty(ORG_NAME); - String orgDomain = conf.getProperty(ORG_DOMAIN); - String bindAddress = conf.getProperty(KDC_BIND_ADDRESS); - final Map map = new HashMap(); - map.put("0", orgName.toLowerCase(Locale.ENGLISH)); - map.put("1", orgDomain.toLowerCase(Locale.ENGLISH)); - map.put("2", orgName.toUpperCase(Locale.ENGLISH)); - map.put("3", orgDomain.toUpperCase(Locale.ENGLISH)); - map.put("4", bindAddress); - - InputStream is1 = getResourceAsStream("minikdc.ldiff"); - - SchemaManager schemaManager = ds.getSchemaManager(); - LdifReader reader = null; - - try { - final String content = StrSubstitutor.replace(IOUtils.toString(is1), map); - reader = new LdifReader(new StringReader(content)); - - for (LdifEntry ldifEntry : reader) { - ds.getAdminSession().add(new DefaultEntry(schemaManager, - ldifEntry.getEntry())); - } - } finally { - IOUtils.closeQuietly(reader); - IOUtils.closeQuietly(is1); - } - - KerberosConfig kerberosConfig = new KerberosConfig(); - kerberosConfig.setMaximumRenewableLifetime(Long.parseLong(conf - .getProperty(MAX_RENEWABLE_LIFETIME))); - kerberosConfig.setMaximumTicketLifetime(Long.parseLong(conf - .getProperty(MAX_TICKET_LIFETIME))); - kerberosConfig.setSearchBaseDn(String.format("dc=%s,dc=%s", orgName, - orgDomain)); - kerberosConfig.setPaEncTimestampRequired(false); - kdc = new KdcServer(kerberosConfig); - kdc.setDirectoryService(ds); - + private void prepareKdcServer() throws Exception { // transport - String transport = conf.getProperty(TRANSPORT); - AbstractTransport absTransport; - if (transport.trim().equals("TCP")) { - absTransport = new TcpTransport(bindAddress, port, 3, 50); - } else if (transport.trim().equals("UDP")) { - absTransport = new UdpTransport(port); - } else { - throw new IllegalArgumentException("Invalid transport: " + transport); + simpleKdc.setWorkDir(workDir); + simpleKdc.setKdcHost(getHost()); + simpleKdc.setKdcRealm(realm); + if (transport == null) { + transport = conf.getProperty(TRANSPORT); } - kdc.addTransports(absTransport); - kdc.setServiceName(conf.getProperty(INSTANCE)); - kdc.start(); - // if using ephemeral port, update port number for binding if (port == 0) { - InetSocketAddress addr = - (InetSocketAddress)absTransport.getAcceptor().getLocalAddress(); - port = addr.getPort(); + port = NetworkUtil.getServerPort(); } - - StringBuilder sb = new StringBuilder(); - InputStream is2 = getResourceAsStream("minikdc-krb5.conf"); - - BufferedReader r = null; - - try { - r = new BufferedReader(new InputStreamReader(is2, Charsets.UTF_8)); - String line = r.readLine(); - - while (line != null) { - sb.append(line).append("{3}"); - line = r.readLine(); + if (transport != null) { + if (transport.trim().equals("TCP")) { + simpleKdc.setKdcTcpPort(port); + simpleKdc.setAllowUdp(false); + } else if (transport.trim().equals("UDP")) { + simpleKdc.setKdcUdpPort(port); + simpleKdc.setAllowTcp(false); + } else { + throw new IllegalArgumentException("Invalid transport: " + transport); } - } finally { - IOUtils.closeQuietly(r); - IOUtils.closeQuietly(is2); - } - - krb5conf = new File(workDir, "krb5.conf").getAbsoluteFile(); - FileUtils.writeStringToFile(krb5conf, - MessageFormat.format(sb.toString(), getRealm(), getHost(), - Integer.toString(getPort()), System.getProperty("line.separator"))); - System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5conf.getAbsolutePath()); - - System.setProperty(SUN_SECURITY_KRB5_DEBUG, conf.getProperty(DEBUG, - "false")); - - // refresh the config - Class classRef; - if (System.getProperty("java.vendor").contains("IBM")) { - classRef = Class.forName("com.ibm.security.krb5.internal.Config"); } else { - classRef = Class.forName("sun.security.krb5.Config"); + throw new IllegalArgumentException("Need to set transport!"); + } + simpleKdc.getKdcConfig().setString(KdcConfigKey.KDC_SERVICE_NAME, + conf.getProperty(INSTANCE)); + if (conf.getProperty(DEBUG) != null) { + krb5Debug = getAndSet(SUN_SECURITY_KRB5_DEBUG, conf.getProperty(DEBUG)); } - Method refreshMethod = classRef.getMethod("refresh", new Class[0]); - refreshMethod.invoke(classRef, new Object[0]); - - LOG.info("MiniKdc listening at port: {}", getPort()); - LOG.info("MiniKdc setting JVM krb5.conf to: {}", - krb5conf.getAbsolutePath()); } /** * Stops the MiniKdc */ public synchronized void stop() { - if (kdc != null) { - System.getProperties().remove(JAVA_SECURITY_KRB5_CONF); - System.getProperties().remove(SUN_SECURITY_KRB5_DEBUG); - kdc.stop(); + if (simpleKdc != null) { try { - ds.shutdown(); - } catch (Exception ex) { - LOG.error("Could not shutdown ApacheDS properly: {}", ex.toString(), - ex); + simpleKdc.stop(); + } catch (KrbException e) { + e.printStackTrace(); + } finally { + if(conf.getProperty(DEBUG) != null) { + System.setProperty(SUN_SECURITY_KRB5_DEBUG, + Boolean.toString(krb5Debug)); + } } } delete(workDir); + try { + // Will be fixed in next Kerby version. + Thread.sleep(1000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + LOG.info("MiniKdc stopped."); } private void delete(File f) { @@ -554,55 +373,39 @@ public class MiniKdc { */ public synchronized void createPrincipal(String principal, String password) throws Exception { - String orgName= conf.getProperty(ORG_NAME); - String orgDomain = conf.getProperty(ORG_DOMAIN); - String baseDn = "ou=users,dc=" + orgName.toLowerCase(Locale.ENGLISH) - + ",dc=" + orgDomain.toLowerCase(Locale.ENGLISH); - String content = "dn: uid=" + principal + "," + baseDn + "\n" + - "objectClass: top\n" + - "objectClass: person\n" + - "objectClass: inetOrgPerson\n" + - "objectClass: krb5principal\n" + - "objectClass: krb5kdcentry\n" + - "cn: " + principal + "\n" + - "sn: " + principal + "\n" + - "uid: " + principal + "\n" + - "userPassword: " + password + "\n" + - "krb5PrincipalName: " + principal + "@" + getRealm() + "\n" + - "krb5KeyVersionNumber: 0"; - - for (LdifEntry ldifEntry : new LdifReader(new StringReader(content))) { - ds.getAdminSession().add(new DefaultEntry(ds.getSchemaManager(), - ldifEntry.getEntry())); - } + simpleKdc.createPrincipal(principal, password); } /** - * Creates multiple principals in the KDC and adds them to a keytab file. + * Creates multiple principals in the KDC and adds them to a keytab file. * - * @param keytabFile keytab file to add the created principal.s + * @param keytabFile keytab file to add the created principals. * @param principals principals to add to the KDC, do not include the domain. * @throws Exception thrown if the principals or the keytab file could not be * created. */ - public void createPrincipal(File keytabFile, String ... principals) + public synchronized void createPrincipal(File keytabFile, + String ... principals) throws Exception { - String generatedPassword = UUID.randomUUID().toString(); - Keytab keytab = new Keytab(); - List entries = new ArrayList(); - for (String principal : principals) { - createPrincipal(principal, generatedPassword); - principal = principal + "@" + getRealm(); - KerberosTime timestamp = new KerberosTime(); - for (Map.Entry entry : KerberosKeyFactory - .getKerberosKeys(principal, generatedPassword).entrySet()) { - EncryptionKey ekey = entry.getValue(); - byte keyVersion = (byte) ekey.getKeyVersion(); - entries.add(new KeytabEntry(principal, 1L, timestamp, keyVersion, - ekey)); - } + simpleKdc.createPrincipals(principals); + if (keytabFile.exists() && !keytabFile.delete()) { + LOG.error("Failed to delete keytab file: " + keytabFile); } - keytab.setEntries(entries); - keytab.write(keytabFile); + for (String principal : principals) { + simpleKdc.getKadmin().exportKeytab(keytabFile, principal); + } + } + + /** + * Set the System property; return the old value for caching. + * + * @param sysprop property + * @param debug true or false + * @return the previous value + */ + private boolean getAndSet(String sysprop, String debug) { + boolean old = Boolean.getBoolean(sysprop); + System.setProperty(sysprop, debug); + return old; } } diff --git a/hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc-krb5.conf b/hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc-krb5.conf deleted file mode 100644 index d118dd15fab..00000000000 --- a/hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc-krb5.conf +++ /dev/null @@ -1,25 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[libdefaults] - default_realm = {0} - udp_preference_limit = 1 - -[realms] - {0} = '{' - kdc = {1}:{2} - '}' \ No newline at end of file diff --git a/hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc.ldiff b/hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc.ldiff deleted file mode 100644 index 603ccb5fd94..00000000000 --- a/hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc.ldiff +++ /dev/null @@ -1,47 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -dn: ou=users,dc=${0},dc=${1} -objectClass: organizationalUnit -objectClass: top -ou: users - -dn: uid=krbtgt,ou=users,dc=${0},dc=${1} -objectClass: top -objectClass: person -objectClass: inetOrgPerson -objectClass: krb5principal -objectClass: krb5kdcentry -cn: KDC Service -sn: Service -uid: krbtgt -userPassword: secret -krb5PrincipalName: krbtgt/${2}.${3}@${2}.${3} -krb5KeyVersionNumber: 0 - -dn: uid=ldap,ou=users,dc=${0},dc=${1} -objectClass: top -objectClass: person -objectClass: inetOrgPerson -objectClass: krb5principal -objectClass: krb5kdcentry -cn: LDAP -sn: Service -uid: ldap -userPassword: secret -krb5PrincipalName: ldap/${4}@${2}.${3} -krb5KeyVersionNumber: 0 \ No newline at end of file diff --git a/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java b/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java index fac7f0fbd0d..dafa1c1ea62 100644 --- a/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java +++ b/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java @@ -18,8 +18,8 @@ package org.apache.hadoop.minikdc; -import org.apache.directory.server.kerberos.shared.keytab.Keytab; -import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry; +import org.apache.kerby.kerberos.kerb.keytab.Keytab; +import org.apache.kerby.kerberos.kerb.type.base.PrincipalName; import org.junit.Assert; import org.junit.Test; @@ -30,6 +30,7 @@ import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; import java.io.File; import java.security.Principal; +import java.util.List; import java.util.Set; import java.util.Map; import java.util.HashSet; @@ -51,16 +52,16 @@ public class TestMiniKdc extends KerberosSecurityTestcase { File workDir = getWorkDir(); kdc.createPrincipal(new File(workDir, "keytab"), "foo/bar", "bar/foo"); - Keytab kt = Keytab.read(new File(workDir, "keytab")); + List principalNameList = + Keytab.loadKeytab(new File(workDir, "keytab")).getPrincipals(); + Set principals = new HashSet(); - for (KeytabEntry entry : kt.getEntries()) { - principals.add(entry.getPrincipalName()); + for (PrincipalName principalName : principalNameList) { + principals.add(principalName.getName()); } - //here principals use \ instead of / - //because org.apache.directory.server.kerberos.shared.keytab.KeytabDecoder - // .getPrincipalName(IoBuffer buffer) use \\ when generates principal + Assert.assertEquals(new HashSet(Arrays.asList( - "foo\\bar@" + kdc.getRealm(), "bar\\foo@" + kdc.getRealm())), + "foo/bar@" + kdc.getRealm(), "bar/foo@" + kdc.getRealm())), principals); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf index 20205d19083..240ef40e796 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf @@ -17,21 +17,22 @@ # [libdefaults] - default_realm = EXAMPLE.COM - allow_weak_crypto = true - default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1 - default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1 + default_realm = EXAMPLE.COM + allow_weak_crypto = true + kdc_realm = _REALM_ + udp_preference_limit = _UDP_LIMIT_ + #_KDC_TCP_PORT_ + #_KDC_UDP_PORT_ [realms] - EXAMPLE.COM = { - kdc = localhost:60088 + _REALM_ = { + kdc = localhost:_KDC_PORT_ } [domain_realm] - .example.com = EXAMPLE.COM - example.com = EXAMPLE.COM + .example.com = _REALM_ + example.com = _REALM_ [login] krb4_convert = true krb4_get_tickets = false - diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index bee2e5810c4..aa47f6cd51e 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -953,12 +953,6 @@ 1.8 - - org.apache.directory.server - apacheds-kerberos-codec - 2.0.0-M15 - - com.microsoft.azure azure-storage diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/resources/krb5.conf b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/resources/krb5.conf index 121ac6d9b98..6cdd3d6923f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/resources/krb5.conf +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/resources/krb5.conf @@ -14,15 +14,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# +# + [libdefaults] - default_realm = APACHE.ORG - udp_preference_limit = 1 - extra_addresses = 127.0.0.1 + default_realm = APACHE.ORG + extra_addresses = 127.0.0.1 + kdc_realm = _REALM_ + udp_preference_limit = _UDP_LIMIT_ + #_KDC_TCP_PORT_ + #_KDC_UDP_PORT_ + [realms] - APACHE.ORG = { - admin_server = localhost:88 - kdc = localhost:88 - } + _REALM_ = { + admin_server = localhost:_KDC_PORT_ + kdc = localhost:_KDC_PORT_ + } [domain_realm] - localhost = APACHE.ORG + localhost = _REALM_