From 17d4fbbf0af9b93de9502afb274e91533140d1c6 Mon Sep 17 00:00:00 2001 From: Alejandro Abdelnur Date: Mon, 5 May 2014 21:43:14 +0000 Subject: [PATCH] HADOOP-10433. Key Management Server based on KeyProvider API. (tucu) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1592637 13f79535-47bb-0310-9956-ffa450edef68 --- .gitignore | 1 + .../resources/assemblies/hadoop-kms-dist.xml | 52 ++ .../hadoop-common/CHANGES.txt | 2 + .../dev-support/findbugsExcludeFile.xml | 6 + .../crypto/key/kms/KMSClientProvider.java | 519 +++++++++++ .../crypto/key/kms/KMSRESTConstants.java | 53 ++ ...pache.hadoop.crypto.key.KeyProviderFactory | 1 + .../dev-support/findbugsExcludeFile.xml | 41 + hadoop-common-project/hadoop-kms/pom.xml | 408 +++++++++ .../hadoop-kms/src/main/conf/kms-acls.xml | 82 ++ .../hadoop-kms/src/main/conf/kms-env.sh | 45 + .../src/main/conf/kms-log4j.properties | 38 + .../hadoop-kms/src/main/conf/kms-site.xml | 71 ++ .../hadoop/crypto/key/kms/server/KMS.java | 305 +++++++ .../hadoop/crypto/key/kms/server/KMSACLs.java | 133 +++ .../crypto/key/kms/server/KMSAudit.java | 62 ++ .../kms/server/KMSAuthenticationFilter.java | 123 +++ .../key/kms/server/KMSCacheKeyProvider.java | 180 ++++ .../key/kms/server/KMSConfiguration.java | 94 ++ .../key/kms/server/KMSExceptionsProvider.java | 113 +++ .../crypto/key/kms/server/KMSJSONReader.java | 54 ++ .../crypto/key/kms/server/KMSJSONWriter.java | 70 ++ .../crypto/key/kms/server/KMSMDCFilter.java | 92 ++ .../key/kms/server/KMSServerJSONUtils.java | 80 ++ .../crypto/key/kms/server/KMSWebApp.java | 214 +++++ .../hadoop-kms/src/main/libexec/kms-config.sh | 181 ++++ .../hadoop-kms/src/main/sbin/kms.sh | 60 ++ .../src/main/tomcat/ROOT/WEB-INF/web.xml | 16 + .../src/main/tomcat/ROOT/index.html | 27 + .../src/main/tomcat/logging.properties | 67 ++ .../hadoop-kms/src/main/tomcat/server.xml | 153 ++++ .../hadoop-kms/src/main/tomcat/ssl-server.xml | 135 +++ .../src/main/webapp/WEB-INF/web.xml | 78 ++ .../hadoop-kms/src/site/apt/index.apt.vm | 487 +++++++++++ .../src/site/resources/css/site.css | 29 + .../hadoop-kms/src/site/site.xml | 29 + .../hadoop/crypto/key/kms/server/TestKMS.java | 806 ++++++++++++++++++ .../crypto/key/kms/server/TestKMSACLs.java | 47 + .../kms/server/TestKMSCacheKeyProvider.java | 120 +++ .../src/test/resources/log4j.properties | 31 + hadoop-common-project/pom.xml | 1 + hadoop-dist/pom.xml | 1 + hadoop-project/pom.xml | 8 +- hadoop-project/src/site/site.xml | 1 + 44 files changed, 5115 insertions(+), 1 deletion(-) create mode 100644 hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSRESTConstants.java create mode 100644 hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml create mode 100644 hadoop-common-project/hadoop-kms/pom.xml create mode 100644 hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml create mode 100644 hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh create mode 100644 hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties create mode 100644 hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSCacheKeyProvider.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSExceptionsProvider.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSMDCFilter.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh create mode 100644 hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh create mode 100644 hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/WEB-INF/web.xml create mode 100644 hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/index.html create mode 100644 hadoop-common-project/hadoop-kms/src/main/tomcat/logging.properties create mode 100644 hadoop-common-project/hadoop-kms/src/main/tomcat/server.xml create mode 100644 hadoop-common-project/hadoop-kms/src/main/tomcat/ssl-server.xml create mode 100644 hadoop-common-project/hadoop-kms/src/main/webapp/WEB-INF/web.xml create mode 100644 hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm create mode 100644 hadoop-common-project/hadoop-kms/src/site/resources/css/site.css create mode 100644 hadoop-common-project/hadoop-kms/src/site/site.xml create mode 100644 hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java create mode 100644 hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSACLs.java create mode 100644 hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSCacheKeyProvider.java create mode 100644 hadoop-common-project/hadoop-kms/src/test/resources/log4j.properties diff --git a/.gitignore b/.gitignore index 93e755ce9ce..ac8222ad8de 100644 --- a/.gitignore +++ b/.gitignore @@ -7,5 +7,6 @@ .project .settings target +hadoop-common-project/hadoop-kms/downloads/ hadoop-hdfs-project/hadoop-hdfs/downloads hadoop-hdfs-project/hadoop-hdfs-httpfs/downloads diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml new file mode 100644 index 00000000000..9bbd0eb76ae --- /dev/null +++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml @@ -0,0 +1,52 @@ + + + + hadoop-kms-dist + + dir + + false + + + + ${basedir}/src/main/conf + /etc/hadoop + + * + + + + ${basedir}/src/main/sbin + /sbin + + * + + 0755 + + + ${basedir}/src/main/libexec + /libexec + + * + + 0755 + + + + ${project.build.directory}/site + /share/doc/hadoop/kms + + + diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 6837f57dcfc..75ef0bbef58 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -8,6 +8,8 @@ Trunk (Unreleased) FSDataOutputStream.sync() and Syncable.sync(). (szetszwo) NEW FEATURES + + HADOOP-10433. Key Management Server based on KeyProvider API. (tucu) IMPROVEMENTS diff --git a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml index 66da5424ce9..e8f80dd3967 100644 --- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml +++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml @@ -357,4 +357,10 @@ + + + + + + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java new file mode 100644 index 00000000000..1bbbf9d8760 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -0,0 +1,519 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms; + +import com.google.common.annotations.VisibleForTesting; +import org.apache.commons.codec.binary.Base64; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.crypto.key.KeyProvider; +import org.apache.hadoop.crypto.key.KeyProviderFactory; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.authentication.client.AuthenticatedURL; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authentication.client.PseudoAuthenticator; +import org.apache.hadoop.security.ssl.SSLFactory; +import org.apache.http.client.utils.URIBuilder; +import org.codehaus.jackson.map.ObjectMapper; + +import javax.net.ssl.HttpsURLConnection; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.lang.reflect.Constructor; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLEncoder; +import java.security.GeneralSecurityException; +import java.security.NoSuchAlgorithmException; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * KMS client KeyProvider implementation. + */ +@InterfaceAudience.Private +public class KMSClientProvider extends KeyProvider { + + public static final String SCHEME_NAME = "kms"; + + private static final String UTF8 = "UTF-8"; + + private static final String CONTENT_TYPE = "Content-Type"; + private static final String APPLICATION_JSON_MIME = "application/json"; + + private static final String HTTP_GET = "GET"; + private static final String HTTP_POST = "POST"; + private static final String HTTP_PUT = "PUT"; + private static final String HTTP_DELETE = "DELETE"; + + private static KeyVersion parseJSONKeyVersion(Map valueMap) { + KeyVersion keyVersion = null; + if (!valueMap.isEmpty()) { + byte[] material = (valueMap.containsKey(KMSRESTConstants.MATERIAL_FIELD)) + ? Base64.decodeBase64((String) valueMap.get(KMSRESTConstants.MATERIAL_FIELD)) + : null; + keyVersion = new KMSKeyVersion((String) + valueMap.get(KMSRESTConstants.VERSION_NAME_FIELD), material); + } + return keyVersion; + } + + private static Metadata parseJSONMetadata(Map valueMap) { + Metadata metadata = null; + if (!valueMap.isEmpty()) { + metadata = new KMSMetadata( + (String) valueMap.get(KMSRESTConstants.CIPHER_FIELD), + (Integer) valueMap.get(KMSRESTConstants.LENGTH_FIELD), + (String) valueMap.get(KMSRESTConstants.DESCRIPTION_FIELD), + new Date((Long) valueMap.get(KMSRESTConstants.CREATED_FIELD)), + (Integer) valueMap.get(KMSRESTConstants.VERSIONS_FIELD)); + } + return metadata; + } + + private static void writeJson(Map map, OutputStream os) throws IOException { + Writer writer = new OutputStreamWriter(os); + ObjectMapper jsonMapper = new ObjectMapper(); + jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map); + } + + /** + * The factory to create KMSClientProvider, which is used by the + * ServiceLoader. + */ + public static class Factory extends KeyProviderFactory { + + @Override + public KeyProvider createProvider(URI providerName, Configuration conf) + throws IOException { + if (SCHEME_NAME.equals(providerName.getScheme())) { + return new KMSClientProvider(providerName, conf); + } + return null; + } + } + + public static T checkNotNull(T o, String name) + throws IllegalArgumentException { + if (o == null) { + throw new IllegalArgumentException("Parameter '" + name + + "' cannot be null"); + } + return o; + } + + + public static String checkNotEmpty(String s, String name) + throws IllegalArgumentException { + checkNotNull(s, name); + if (s.isEmpty()) { + throw new IllegalArgumentException("Parameter '" + name + + "' cannot be empty"); + } + return s; + } + + private String kmsUrl; + private SSLFactory sslFactory; + + public KMSClientProvider(URI uri, Configuration conf) throws IOException { + Path path = unnestUri(uri); + URL url = path.toUri().toURL(); + kmsUrl = createServiceURL(url); + if ("https".equalsIgnoreCase(url.getProtocol())) { + sslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, conf); + try { + sslFactory.init(); + } catch (GeneralSecurityException ex) { + throw new IOException(ex); + } + } + } + + private String createServiceURL(URL url) throws IOException { + String str = url.toExternalForm(); + if (str.endsWith("/")) { + str = str.substring(0, str.length() - 1); + } + return new URL(str + KMSRESTConstants.SERVICE_VERSION + "/"). + toExternalForm(); + } + + private URL createURL(String collection, String resource, String subResource, + Map parameters) throws IOException { + try { + StringBuilder sb = new StringBuilder(); + sb.append(kmsUrl); + sb.append(collection); + if (resource != null) { + sb.append("/").append(URLEncoder.encode(resource, UTF8)); + } + if (subResource != null) { + sb.append("/").append(subResource); + } + URIBuilder uriBuilder = new URIBuilder(sb.toString()); + if (parameters != null) { + for (Map.Entry param : parameters.entrySet()) { + Object value = param.getValue(); + if (value instanceof String) { + uriBuilder.addParameter(param.getKey(), (String) value); + } else { + for (String s : (String[]) value) { + uriBuilder.addParameter(param.getKey(), s); + } + } + } + } + return uriBuilder.build().toURL(); + } catch (URISyntaxException ex) { + throw new IOException(ex); + } + } + + private HttpURLConnection configureConnection(HttpURLConnection conn) + throws IOException { + if (sslFactory != null) { + HttpsURLConnection httpsConn = (HttpsURLConnection) conn; + try { + httpsConn.setSSLSocketFactory(sslFactory.createSSLSocketFactory()); + } catch (GeneralSecurityException ex) { + throw new IOException(ex); + } + httpsConn.setHostnameVerifier(sslFactory.getHostnameVerifier()); + } + return conn; + } + + private HttpURLConnection createConnection(URL url, String method) + throws IOException { + HttpURLConnection conn; + try { + AuthenticatedURL authUrl = new AuthenticatedURL(new PseudoAuthenticator(), + sslFactory); + conn = authUrl.openConnection(url, new AuthenticatedURL.Token()); + } catch (AuthenticationException ex) { + throw new IOException(ex); + } + conn.setUseCaches(false); + conn.setRequestMethod(method); + if (method.equals(HTTP_POST) || method.equals(HTTP_PUT)) { + conn.setDoOutput(true); + } + conn = configureConnection(conn); + return conn; + } + + // trick, riding on generics to throw an undeclared exception + + private static void throwEx(Throwable ex) { + KMSClientProvider.throwException(ex); + } + + @SuppressWarnings("unchecked") + private static void throwException(Throwable ex) + throws E { + throw (E) ex; + } + + @SuppressWarnings("unchecked") + private static void validateResponse(HttpURLConnection conn, int expected) + throws IOException { + int status = conn.getResponseCode(); + if (status != expected) { + InputStream es = null; + try { + es = conn.getErrorStream(); + ObjectMapper mapper = new ObjectMapper(); + Map json = mapper.readValue(es, Map.class); + String exClass = (String) json.get( + KMSRESTConstants.ERROR_EXCEPTION_JSON); + String exMsg = (String) + json.get(KMSRESTConstants.ERROR_MESSAGE_JSON); + Exception toThrow; + try { + ClassLoader cl = KMSClientProvider.class.getClassLoader(); + Class klass = cl.loadClass(exClass); + Constructor constr = klass.getConstructor(String.class); + toThrow = (Exception) constr.newInstance(exMsg); + } catch (Exception ex) { + toThrow = new IOException(MessageFormat.format( + "HTTP status [{0}], {1}", status, conn.getResponseMessage())); + } + throwEx(toThrow); + } finally { + if (es != null) { + es.close(); + } + } + } + } + + private static T call(HttpURLConnection conn, Map jsonOutput, + int expectedResponse, Class klass) + throws IOException { + T ret = null; + try { + if (jsonOutput != null) { + writeJson(jsonOutput, conn.getOutputStream()); + } + } catch (IOException ex) { + conn.getInputStream().close(); + throw ex; + } + validateResponse(conn, expectedResponse); + if (APPLICATION_JSON_MIME.equalsIgnoreCase(conn.getContentType()) + && klass != null) { + ObjectMapper mapper = new ObjectMapper(); + InputStream is = null; + try { + is = conn.getInputStream(); + ret = mapper.readValue(is, klass); + } catch (IOException ex) { + if (is != null) { + is.close(); + } + throw ex; + } finally { + if (is != null) { + is.close(); + } + } + } + return ret; + } + + public static class KMSKeyVersion extends KeyVersion { + public KMSKeyVersion(String versionName, byte[] material) { + super(versionName, material); + } + } + + @Override + public KeyVersion getKeyVersion(String versionName) throws IOException { + checkNotEmpty(versionName, "versionName"); + URL url = createURL(KMSRESTConstants.KEY_VERSION_RESOURCE, + versionName, null, null); + HttpURLConnection conn = createConnection(url, HTTP_GET); + Map response = call(conn, null, HttpURLConnection.HTTP_OK, Map.class); + return parseJSONKeyVersion(response); + } + + @Override + public KeyVersion getCurrentKey(String name) throws IOException { + checkNotEmpty(name, "name"); + URL url = createURL(KMSRESTConstants.KEY_RESOURCE, name, + KMSRESTConstants.CURRENT_VERSION_SUB_RESOURCE, null); + HttpURLConnection conn = createConnection(url, HTTP_GET); + Map response = call(conn, null, HttpURLConnection.HTTP_OK, Map.class); + return parseJSONKeyVersion(response); + } + + @Override + @SuppressWarnings("unchecked") + public List getKeys() throws IOException { + URL url = createURL(KMSRESTConstants.KEYS_NAMES_RESOURCE, null, null, + null); + HttpURLConnection conn = createConnection(url, HTTP_GET); + List response = call(conn, null, HttpURLConnection.HTTP_OK, List.class); + return (List) response; + } + + public static class KMSMetadata extends Metadata { + public KMSMetadata(String cipher, int bitLength, String description, + Date created, int versions) { + super(cipher, bitLength, description, created, versions); + } + } + + // breaking keyNames into sets to keep resulting URL undler 2000 chars + private List createKeySets(String[] keyNames) { + List list = new ArrayList(); + List batch = new ArrayList(); + int batchLen = 0; + for (String name : keyNames) { + int additionalLen = KMSRESTConstants.KEY_OP.length() + 1 + name.length(); + batchLen += additionalLen; + // topping at 1500 to account for initial URL and encoded names + if (batchLen > 1500) { + list.add(batch.toArray(new String[batch.size()])); + batch = new ArrayList(); + batchLen = additionalLen; + } + batch.add(name); + } + if (!batch.isEmpty()) { + list.add(batch.toArray(new String[batch.size()])); + } + return list; + } + + @Override + @SuppressWarnings("unchecked") + public Metadata[] getKeysMetadata(String ... keyNames) throws IOException { + List keysMetadata = new ArrayList(); + List keySets = createKeySets(keyNames); + for (String[] keySet : keySets) { + if (keyNames.length > 0) { + Map queryStr = new HashMap(); + queryStr.put(KMSRESTConstants.KEY_OP, keySet); + URL url = createURL(KMSRESTConstants.KEYS_METADATA_RESOURCE, null, + null, queryStr); + HttpURLConnection conn = createConnection(url, HTTP_GET); + List list = call(conn, null, HttpURLConnection.HTTP_OK, List.class); + for (Map map : list) { + keysMetadata.add(parseJSONMetadata(map)); + } + } + } + return keysMetadata.toArray(new Metadata[keysMetadata.size()]); + } + + private KeyVersion createKeyInternal(String name, byte[] material, + Options options) + throws NoSuchAlgorithmException, IOException { + checkNotEmpty(name, "name"); + checkNotNull(options, "options"); + Map jsonKey = new HashMap(); + jsonKey.put(KMSRESTConstants.NAME_FIELD, name); + jsonKey.put(KMSRESTConstants.CIPHER_FIELD, options.getCipher()); + jsonKey.put(KMSRESTConstants.LENGTH_FIELD, options.getBitLength()); + if (material != null) { + jsonKey.put(KMSRESTConstants.MATERIAL_FIELD, + Base64.encodeBase64String(material)); + } + if (options.getDescription() != null) { + jsonKey.put(KMSRESTConstants.DESCRIPTION_FIELD, + options.getDescription()); + } + URL url = createURL(KMSRESTConstants.KEYS_RESOURCE, null, null, null); + HttpURLConnection conn = createConnection(url, HTTP_POST); + conn.setRequestProperty(CONTENT_TYPE, APPLICATION_JSON_MIME); + Map response = call(conn, jsonKey, HttpURLConnection.HTTP_CREATED, + Map.class); + return parseJSONKeyVersion(response); + } + + @Override + public KeyVersion createKey(String name, Options options) + throws NoSuchAlgorithmException, IOException { + return createKeyInternal(name, null, options); + } + + @Override + public KeyVersion createKey(String name, byte[] material, Options options) + throws IOException { + checkNotNull(material, "material"); + try { + return createKeyInternal(name, material, options); + } catch (NoSuchAlgorithmException ex) { + throw new RuntimeException("It should not happen", ex); + } + } + + private KeyVersion rollNewVersionInternal(String name, byte[] material) + throws NoSuchAlgorithmException, IOException { + checkNotEmpty(name, "name"); + Map jsonMaterial = new HashMap(); + if (material != null) { + jsonMaterial.put(KMSRESTConstants.MATERIAL_FIELD, + Base64.encodeBase64String(material)); + } + URL url = createURL(KMSRESTConstants.KEY_RESOURCE, name, null, null); + HttpURLConnection conn = createConnection(url, HTTP_POST); + conn.setRequestProperty(CONTENT_TYPE, APPLICATION_JSON_MIME); + Map response = call(conn, jsonMaterial, + HttpURLConnection.HTTP_OK, Map.class); + return parseJSONKeyVersion(response); + } + + + @Override + public KeyVersion rollNewVersion(String name) + throws NoSuchAlgorithmException, IOException { + return rollNewVersionInternal(name, null); + } + + @Override + public KeyVersion rollNewVersion(String name, byte[] material) + throws IOException { + checkNotNull(material, "material"); + try { + return rollNewVersionInternal(name, material); + } catch (NoSuchAlgorithmException ex) { + throw new RuntimeException("It should not happen", ex); + } + } + + @Override + public List getKeyVersions(String name) throws IOException { + checkNotEmpty(name, "name"); + URL url = createURL(KMSRESTConstants.KEY_RESOURCE, name, + KMSRESTConstants.VERSIONS_SUB_RESOURCE, null); + HttpURLConnection conn = createConnection(url, HTTP_GET); + List response = call(conn, null, HttpURLConnection.HTTP_OK, List.class); + List versions = null; + if (!response.isEmpty()) { + versions = new ArrayList(); + for (Object obj : response) { + versions.add(parseJSONKeyVersion((Map) obj)); + } + } + return versions; + } + + @Override + public Metadata getMetadata(String name) throws IOException { + checkNotEmpty(name, "name"); + URL url = createURL(KMSRESTConstants.KEY_RESOURCE, name, + KMSRESTConstants.METADATA_SUB_RESOURCE, null); + HttpURLConnection conn = createConnection(url, HTTP_GET); + Map response = call(conn, null, HttpURLConnection.HTTP_OK, Map.class); + return parseJSONMetadata(response); + } + + @Override + public void deleteKey(String name) throws IOException { + checkNotEmpty(name, "name"); + URL url = createURL(KMSRESTConstants.KEY_RESOURCE, name, null, null); + HttpURLConnection conn = createConnection(url, HTTP_DELETE); + call(conn, null, HttpURLConnection.HTTP_OK, null); + } + + @Override + public void flush() throws IOException { + // NOP + // the client does not keep any local state, thus flushing is not required + // because of the client. + // the server should not keep in memory state on behalf of clients either. + } + + @VisibleForTesting + public static String buildVersionName(String name, int version) { + return KeyProvider.buildVersionName(name, version); + } + +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSRESTConstants.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSRESTConstants.java new file mode 100644 index 00000000000..3d2ea349974 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSRESTConstants.java @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms; + +import org.apache.hadoop.classification.InterfaceAudience; + +/** + * KMS REST and JSON constants and utility methods for the KMSServer. + */ +@InterfaceAudience.Private +public class KMSRESTConstants { + + public static final String SERVICE_VERSION = "/v1"; + public static final String KEY_RESOURCE = "key"; + public static final String KEYS_RESOURCE = "keys"; + public static final String KEYS_METADATA_RESOURCE = KEYS_RESOURCE + + "/metadata"; + public static final String KEYS_NAMES_RESOURCE = KEYS_RESOURCE + "/names"; + public static final String KEY_VERSION_RESOURCE = "keyversion"; + public static final String METADATA_SUB_RESOURCE = "_metadata"; + public static final String VERSIONS_SUB_RESOURCE = "_versions"; + public static final String CURRENT_VERSION_SUB_RESOURCE = "_currentversion"; + + public static final String KEY_OP = "key"; + + public static final String NAME_FIELD = "name"; + public static final String CIPHER_FIELD = "cipher"; + public static final String LENGTH_FIELD = "length"; + public static final String DESCRIPTION_FIELD = "description"; + public static final String CREATED_FIELD = "created"; + public static final String VERSIONS_FIELD = "versions"; + public static final String MATERIAL_FIELD = "material"; + public static final String VERSION_NAME_FIELD = "versionName"; + + public static final String ERROR_EXCEPTION_JSON = "exception"; + public static final String ERROR_MESSAGE_JSON = "message"; + +} diff --git a/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory b/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory index dbce4d5274a..80b67fb0882 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory +++ b/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory @@ -15,3 +15,4 @@ org.apache.hadoop.crypto.key.JavaKeyStoreProvider$Factory org.apache.hadoop.crypto.key.UserProvider$Factory +org.apache.hadoop.crypto.key.kms.KMSClientProvider$Factory diff --git a/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml new file mode 100644 index 00000000000..bc92ed73f22 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + diff --git a/hadoop-common-project/hadoop-kms/pom.xml b/hadoop-common-project/hadoop-kms/pom.xml new file mode 100644 index 00000000000..b65e67a5bbc --- /dev/null +++ b/hadoop-common-project/hadoop-kms/pom.xml @@ -0,0 +1,408 @@ + + + + 4.0.0 + + org.apache.hadoop + hadoop-project + 3.0.0-SNAPSHOT + ../../hadoop-project + + org.apache.hadoop + hadoop-kms + 3.0.0-SNAPSHOT + war + + Apache Hadoop KMS + Apache Hadoop KMS + + + 6.0.36 + + ${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/kms/tomcat + + + http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz + + + + + + org.apache.hadoop + hadoop-minikdc + test + + + junit + junit + test + + + org.mockito + mockito-all + test + + + org.apache.hadoop + hadoop-auth + compile + + + com.google.guava + guava + compile + + + com.sun.jersey + jersey-core + compile + + + com.sun.jersey + jersey-server + compile + + + javax.servlet + servlet-api + provided + + + org.mortbay.jetty + jetty + test + + + org.apache.hadoop + hadoop-common + compile + + + javax.xml.stream + stax-api + + + commons-httpclient + commons-httpclient + + + tomcat + jasper-compiler + + + tomcat + jasper-runtime + + + javax.servlet + servlet-api + + + javax.servlet + jsp-api + + + javax.servlet.jsp + jsp-api + + + org.mortbay.jetty + jetty + + + org.mortbay.jetty + jetty-util + + + org.mortbay.jetty + jsp-api-2.1 + + + org.mortbay.jetty + servlet-api-2.5 + + + net.java.dev.jets3t + jets3t + + + org.eclipse.jdt + core + + + commons-el + commons-el + + + + + org.apache.hadoop + hadoop-common + test + test-jar + + + log4j + log4j + compile + + + org.slf4j + slf4j-api + compile + + + org.slf4j + slf4j-log4j12 + runtime + + + org.slf4j + jul-to-slf4j + compile + + + org.mortbay.jetty + jetty-util + compile + + + com.codahale.metrics + metrics-core + compile + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + always + 1 + 600 + + + listener + org.apache.hadoop.test.TimedOutTestsListener + + + + **/${test.exclude}.java + ${test.exclude.pattern} + + + + + org.apache.maven.plugins + maven-antrun-plugin + + + create-web-xmls + generate-test-resources + + run + + + + + + + + + + + + + + + org.apache.maven.plugins + maven-war-plugin + + + default-war + package + + war + + + kms + ${project.build.directory}/kms + + + + + + + org.codehaus.mojo + findbugs-maven-plugin + + ${basedir}/dev-support/findbugsExcludeFile.xml + + + + + + + + + docs + + false + + + + + org.apache.maven.plugins + maven-site-plugin + + + docs + prepare-package + + site + + + + + + + + + + dist + + false + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + org.apache.hadoop + hadoop-assemblies + ${project.version} + + + + + dist + package + + single + + + ${project.artifactId}-${project.version} + + false + false + + hadoop-kms-dist + + + + + + + + org.apache.maven.plugins + maven-antrun-plugin + + + dist + + run + + package + + + + + + + + + + cd "${project.build.directory}/tomcat.exp" + gzip -cd ../../downloads/apache-tomcat-${tomcat.version}.tar.gz | tar xf - + + + + + + + + + + + + + + + + + + + + + + + + + + tar + package + + run + + + + + + cd "${project.build.directory}" + tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz + + + + + + + + + + + + + + diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml b/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml new file mode 100644 index 00000000000..ab6c993e72c --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml @@ -0,0 +1,82 @@ + + + + + + + + + + hadoop.kms.acl.CREATE + * + + ACL for create-key operations. + If the user does is not in the GET ACL, the key material is not returned + as part of the response. + + + + + hadoop.kms.acl.DELETE + * + + ACL for delete-key operations. + + + + + hadoop.kms.acl.ROLLOVER + * + + ACL for rollover-key operations. + If the user does is not in the GET ACL, the key material is not returned + as part of the response. + + + + + hadoop.kms.acl.GET + * + + ACL for get-key-version and get-current-key operations. + + + + + hadoop.kms.acl.GET_KEYS + * + + ACL for get-keys operation. + + + + + hadoop.kms.acl.GET_METADATA + * + + ACL for get-key-metadata an get-keys-metadata operations. + + + + + hadoop.kms.acl.SET_KEY_MATERIAL + * + + Complimentary ACL for CREATE and ROLLOVER operation to allow the client + to provide the key material when creating or rolling a key. + + + + diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh new file mode 100644 index 00000000000..1bbec7f1856 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. See accompanying LICENSE file. +# + +# Set kms specific environment variables here. + +# Settings for the Embedded Tomcat that runs KMS +# Java System properties for KMS should be specified in this variable +# +# export CATALINA_OPTS= + +# KMS logs directory +# +# export KMS_LOG=${KMS_HOME}/logs + +# KMS temporary directory +# +# export KMS_TEMP=${KMS_HOME}/temp + +# The HTTP port used by KMS +# +# export KMS_HTTP_PORT=16000 + +# The Admin port used by KMS +# +# export KMS_ADMIN_PORT=`expr ${KMS_HTTP_PORT} + 1` + +# The location of the SSL keystore if using SSL +# +# export KMS_SSL_KEYSTORE_FILE=${HOME}/.keystore + +# The password of the SSL keystore if using SSL +# +# export KMS_SSL_KEYSTORE_PASS=password diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties b/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties new file mode 100644 index 00000000000..8e6d9099502 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties @@ -0,0 +1,38 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. See accompanying LICENSE file. +# + +# If the Java System property 'kms.log.dir' is not defined at KMS start up time +# Setup sets its value to '${kms.home}/logs' + +log4j.appender.kms=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kms.DatePattern='.'yyyy-MM-dd +log4j.appender.kms.File=${kms.log.dir}/kms.log +log4j.appender.kms.Append=true +log4j.appender.kms.layout=org.apache.log4j.PatternLayout +log4j.appender.kms.layout.ConversionPattern=%d{ISO8601} %-5p %c{1} - %m%n + +log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kms-audit.DatePattern='.'yyyy-MM-dd +log4j.appender.kms-audit.File=${kms.log.dir}/kms-audit.log +log4j.appender.kms-audit.Append=true +log4j.appender.kms-audit.layout=org.apache.log4j.PatternLayout +log4j.appender.kms-audit.layout.ConversionPattern=%d{ISO8601} %m%n + +log4j.logger.kms-audit=INFO, kms-audit +log4j.additivity.kms-audit=false + +log4j.rootLogger=ALL, kms +log4j.logger.org.apache.hadoop.conf=ERROR +log4j.logger.org.apache.hadoop=INFO +log4j.logger.com.sun.jersey.server.wadl.generators.WadlGeneratorJAXBGrammarGenerator=OFF \ No newline at end of file diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml b/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml new file mode 100644 index 00000000000..20896fc2873 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml @@ -0,0 +1,71 @@ + + + + + + + hadoop.security.key.provider.path + jceks://file@/${user.home}/kms.keystore + + + + + + hadoop.security.keystore.JavaKeyStoreProvider.password + none + + + + + + + hadoop.kms.cache.timeout.ms + 10000 + + + + + + + + hadoop.kms.authentication.type + simple + + simple or kerberos + + + + + hadoop.kms.authentication.kerberos.keytab + ${user.home}/kms.keytab + + + + + + hadoop.kms.authentication.kerberos.principal + HTTP/localhost + + + + + + hadoop.kms.authentication.kerberos.name.rules + DEFAULT + + + + + diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java new file mode 100644 index 00000000000..de8d8443d39 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java @@ -0,0 +1,305 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.commons.codec.binary.Base64; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.crypto.key.KeyProvider; +import org.apache.hadoop.crypto.key.kms.KMSRESTConstants; +import org.apache.hadoop.security.AccessControlException; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.crypto.key.kms.KMSClientProvider; +import org.apache.hadoop.util.StringUtils; + +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.SecurityContext; +import java.net.URI; +import java.net.URISyntaxException; +import java.security.Principal; +import java.text.MessageFormat; +import java.util.List; +import java.util.Map; + +/** + * Class providing the REST bindings, via Jersey, for the KMS. + */ +@Path(KMSRESTConstants.SERVICE_VERSION) +@InterfaceAudience.Private +public class KMS { + private static final String CREATE_KEY = "CREATE_KEY"; + private static final String DELETE_KEY = "DELETE_KEY"; + private static final String ROLL_NEW_VERSION = "ROLL_NEW_VERSION"; + private static final String GET_KEYS = "GET_KEYS"; + private static final String GET_KEYS_METADATA = "GET_KEYS_METADATA"; + private static final String GET_KEY_VERSION = "GET_KEY_VERSION"; + private static final String GET_CURRENT_KEY = "GET_CURRENT_KEY"; + private static final String GET_KEY_VERSIONS = "GET_KEY_VERSIONS"; + private static final String GET_METADATA = "GET_METADATA"; + + private KeyProvider provider; + + public KMS() throws Exception { + provider = KMSWebApp.getKeyProvider(); + } + + private static Principal getPrincipal(SecurityContext securityContext) + throws AuthenticationException{ + Principal user = securityContext.getUserPrincipal(); + if (user == null) { + throw new AuthenticationException("User must be authenticated"); + } + return user; + } + + private static void assertAccess(KMSACLs.Type aclType, Principal principal, + String operation, String key) throws AccessControlException { + if (!KMSWebApp.getACLs().hasAccess(aclType, principal.getName())) { + KMSWebApp.getUnauthorizedCallsMeter().mark(); + KMSAudit.unauthorized(principal, operation, key); + throw new AuthorizationException(MessageFormat.format( + "User:{0} not allowed to do ''{1}'' on ''{2}''", + principal.getName(), operation, key)); + } + } + + private static KeyProvider.KeyVersion removeKeyMaterial( + KeyProvider.KeyVersion keyVersion) { + return new KMSClientProvider.KMSKeyVersion(keyVersion.getVersionName(), + null); + } + + private static URI getKeyURI(String name) throws URISyntaxException { + return new URI(KMSRESTConstants.SERVICE_VERSION + "/" + + KMSRESTConstants.KEY_RESOURCE + "/" + name); + } + + @POST + @Path(KMSRESTConstants.KEYS_RESOURCE) + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public Response createKey(@Context SecurityContext securityContext, + Map jsonKey) throws Exception { + KMSWebApp.getAdminCallsMeter().mark(); + Principal user = getPrincipal(securityContext); + String name = (String) jsonKey.get(KMSRESTConstants.NAME_FIELD); + KMSClientProvider.checkNotEmpty(name, KMSRESTConstants.NAME_FIELD); + assertAccess(KMSACLs.Type.CREATE, user, CREATE_KEY, name); + String cipher = (String) jsonKey.get(KMSRESTConstants.CIPHER_FIELD); + String material = (String) jsonKey.get(KMSRESTConstants.MATERIAL_FIELD); + int length = (jsonKey.containsKey(KMSRESTConstants.LENGTH_FIELD)) + ? (Integer) jsonKey.get(KMSRESTConstants.LENGTH_FIELD) : 0; + String description = (String) + jsonKey.get(KMSRESTConstants.DESCRIPTION_FIELD); + + if (material != null) { + assertAccess(KMSACLs.Type.SET_KEY_MATERIAL, user, + CREATE_KEY + " with user provided material", name); + } + KeyProvider.Options options = new KeyProvider.Options( + KMSWebApp.getConfiguration()); + if (cipher != null) { + options.setCipher(cipher); + } + if (length != 0) { + options.setBitLength(length); + } + options.setDescription(description); + + KeyProvider.KeyVersion keyVersion = (material != null) + ? provider.createKey(name, Base64.decodeBase64(material), options) + : provider.createKey(name, options); + + provider.flush(); + + KMSAudit.ok(user, CREATE_KEY, name, "UserProvidedMaterial:" + + (material != null) + " Description:" + description); + + if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user.getName())) { + keyVersion = removeKeyMaterial(keyVersion); + } + Map json = KMSServerJSONUtils.toJSON(keyVersion); + String requestURL = KMSMDCFilter.getURL(); + int idx = requestURL.lastIndexOf(KMSRESTConstants.KEYS_RESOURCE); + requestURL = requestURL.substring(0, idx); + String keyURL = requestURL + KMSRESTConstants.KEY_RESOURCE + "/" + name; + return Response.created(getKeyURI(name)).type(MediaType.APPLICATION_JSON). + header("Location", keyURL).entity(json).build(); + } + + @DELETE + @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}") + public Response deleteKey(@Context SecurityContext securityContext, + @PathParam("name") String name) throws Exception { + KMSWebApp.getAdminCallsMeter().mark(); + Principal user = getPrincipal(securityContext); + assertAccess(KMSACLs.Type.DELETE, user, DELETE_KEY, name); + KMSClientProvider.checkNotEmpty(name, "name"); + provider.deleteKey(name); + provider.flush(); + + KMSAudit.ok(user, DELETE_KEY, name, ""); + + return Response.ok().build(); + } + + @POST + @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}") + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public Response rolloverKey(@Context SecurityContext securityContext, + @PathParam("name") String name, Map jsonMaterial) + throws Exception { + KMSWebApp.getAdminCallsMeter().mark(); + Principal user = getPrincipal(securityContext); + assertAccess(KMSACLs.Type.ROLLOVER, user, ROLL_NEW_VERSION, name); + KMSClientProvider.checkNotEmpty(name, "name"); + String material = (String) + jsonMaterial.get(KMSRESTConstants.MATERIAL_FIELD); + if (material != null) { + assertAccess(KMSACLs.Type.SET_KEY_MATERIAL, user, + ROLL_NEW_VERSION + " with user provided material", name); + } + KeyProvider.KeyVersion keyVersion = (material != null) + ? provider.rollNewVersion(name, Base64.decodeBase64(material)) + : provider.rollNewVersion(name); + + provider.flush(); + + KMSAudit.ok(user, ROLL_NEW_VERSION, name, "UserProvidedMaterial:" + + (material != null) + " NewVersion:" + keyVersion.getVersionName()); + + if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user.getName())) { + keyVersion = removeKeyMaterial(keyVersion); + } + Map json = KMSServerJSONUtils.toJSON(keyVersion); + return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build(); + } + + @GET + @Path(KMSRESTConstants.KEYS_METADATA_RESOURCE) + @Produces(MediaType.APPLICATION_JSON) + public Response getKeysMetadata(@Context SecurityContext securityContext, + @QueryParam(KMSRESTConstants.KEY_OP) List keyNamesList) + throws Exception { + KMSWebApp.getAdminCallsMeter().mark(); + Principal user = getPrincipal(securityContext); + String[] keyNames = keyNamesList.toArray(new String[keyNamesList.size()]); + String names = StringUtils.arrayToString(keyNames); + assertAccess(KMSACLs.Type.GET_METADATA, user, GET_KEYS_METADATA, names); + KeyProvider.Metadata[] keysMeta = provider.getKeysMetadata(keyNames); + Object json = KMSServerJSONUtils.toJSON(keyNames, keysMeta); + KMSAudit.ok(user, GET_KEYS_METADATA, names, ""); + return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build(); + } + + @GET + @Path(KMSRESTConstants.KEYS_NAMES_RESOURCE) + @Produces(MediaType.APPLICATION_JSON) + public Response getKeyNames(@Context SecurityContext securityContext) + throws Exception { + KMSWebApp.getAdminCallsMeter().mark(); + Principal user = getPrincipal(securityContext); + assertAccess(KMSACLs.Type.GET_KEYS, user, GET_KEYS, "*"); + Object json = provider.getKeys(); + KMSAudit.ok(user, GET_KEYS, "*", ""); + return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build(); + } + + @GET + @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}") + public Response getKey(@Context SecurityContext securityContext, + @PathParam("name") String name) + throws Exception { + return getMetadata(securityContext, name); + } + + @GET + @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" + + KMSRESTConstants.METADATA_SUB_RESOURCE) + @Produces(MediaType.APPLICATION_JSON) + public Response getMetadata(@Context SecurityContext securityContext, + @PathParam("name") String name) + throws Exception { + Principal user = getPrincipal(securityContext); + KMSClientProvider.checkNotEmpty(name, "name"); + KMSWebApp.getAdminCallsMeter().mark(); + assertAccess(KMSACLs.Type.GET_METADATA, user, GET_METADATA, name); + Object json = KMSServerJSONUtils.toJSON(name, provider.getMetadata(name)); + KMSAudit.ok(user, GET_METADATA, name, ""); + return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build(); + } + + @GET + @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" + + KMSRESTConstants.CURRENT_VERSION_SUB_RESOURCE) + @Produces(MediaType.APPLICATION_JSON) + public Response getCurrentVersion(@Context SecurityContext securityContext, + @PathParam("name") String name) + throws Exception { + Principal user = getPrincipal(securityContext); + KMSClientProvider.checkNotEmpty(name, "name"); + KMSWebApp.getKeyCallsMeter().mark(); + assertAccess(KMSACLs.Type.GET, user, GET_CURRENT_KEY, name); + Object json = KMSServerJSONUtils.toJSON(provider.getCurrentKey(name)); + KMSAudit.ok(user, GET_CURRENT_KEY, name, ""); + return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build(); + } + + @GET + @Path(KMSRESTConstants.KEY_VERSION_RESOURCE + "/{versionName:.*}") + @Produces(MediaType.APPLICATION_JSON) + public Response getKeyVersion(@Context SecurityContext securityContext, + @PathParam("versionName") String versionName) + throws Exception { + Principal user = getPrincipal(securityContext); + KMSClientProvider.checkNotEmpty(versionName, "versionName"); + KMSWebApp.getKeyCallsMeter().mark(); + assertAccess(KMSACLs.Type.GET, user, GET_KEY_VERSION, versionName); + Object json = KMSServerJSONUtils.toJSON(provider.getKeyVersion(versionName)); + KMSAudit.ok(user, GET_KEY_VERSION, versionName, ""); + return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build(); + } + + @GET + @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" + + KMSRESTConstants.VERSIONS_SUB_RESOURCE) + @Produces(MediaType.APPLICATION_JSON) + public Response getKeyVersions(@Context SecurityContext securityContext, + @PathParam("name") String name) + throws Exception { + Principal user = getPrincipal(securityContext); + KMSClientProvider.checkNotEmpty(name, "name"); + KMSWebApp.getKeyCallsMeter().mark(); + assertAccess(KMSACLs.Type.GET, user, GET_KEY_VERSIONS, name); + Object json = KMSServerJSONUtils.toJSON(provider.getKeyVersions(name)); + KMSAudit.ok(user, GET_KEY_VERSIONS, name, ""); + return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build(); + } + +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java new file mode 100644 index 00000000000..d04a7142bd0 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java @@ -0,0 +1,133 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.AccessControlList; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +/** + * Provides access to the AccessControlLists used by KMS, + * hot-reloading them if the kms-acls.xml file where the ACLs + * are defined has been updated. + */ +public class KMSACLs implements Runnable { + private static final Logger LOG = LoggerFactory.getLogger(KMSACLs.class); + + + public enum Type { + CREATE, DELETE, ROLLOVER, GET, GET_KEYS, GET_METADATA, SET_KEY_MATERIAL; + + public String getConfigKey() { + return KMSConfiguration.CONFIG_PREFIX + "acl." + this.toString(); + } + } + + public static final String ACL_DEFAULT = AccessControlList.WILDCARD_ACL_VALUE; + + public static final int RELOADER_SLEEP_MILLIS = 1000; + + Map acls; + private ReadWriteLock lock; + private ScheduledExecutorService executorService; + private long lastReload; + + KMSACLs(Configuration conf) { + lock = new ReentrantReadWriteLock(); + if (conf == null) { + conf = loadACLs(); + } + setACLs(conf); + } + + public KMSACLs() { + this(null); + } + + private void setACLs(Configuration conf) { + lock.writeLock().lock(); + try { + acls = new HashMap(); + for (Type aclType : Type.values()) { + String aclStr = conf.get(aclType.getConfigKey(), ACL_DEFAULT); + acls.put(aclType, new AccessControlList(aclStr)); + LOG.info("'{}' ACL '{}'", aclType, aclStr); + } + } finally { + lock.writeLock().unlock(); + } + } + + @Override + public void run() { + try { + if (KMSConfiguration.isACLsFileNewer(lastReload)) { + setACLs(loadACLs()); + } + } catch (Exception ex) { + LOG.warn("Could not reload ACLs file: " + ex.toString(), ex); + } + } + + public synchronized void startReloader() { + if (executorService == null) { + executorService = Executors.newScheduledThreadPool(1); + executorService.scheduleAtFixedRate(this, RELOADER_SLEEP_MILLIS, + RELOADER_SLEEP_MILLIS, TimeUnit.MILLISECONDS); + } + } + + public synchronized void stopReloader() { + if (executorService != null) { + executorService.shutdownNow(); + executorService = null; + } + } + + private Configuration loadACLs() { + LOG.debug("Loading ACLs file"); + lastReload = System.currentTimeMillis(); + Configuration conf = KMSConfiguration.getACLsConf(); + // triggering the resource loading. + conf.get(Type.CREATE.getConfigKey()); + return conf; + } + + public boolean hasAccess(Type type, String user) { + UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user); + AccessControlList acl = null; + lock.readLock().lock(); + try { + acl = acls.get(type); + } finally { + lock.readLock().unlock(); + } + return acl.isUserAllowed(ugi); + } + +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java new file mode 100644 index 00000000000..e212d7d97a1 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.security.Principal; + +/** + * Provides convenience methods for audit logging consistently the different + * types of events. + */ +public class KMSAudit { + public static final String KMS_LOGGER_NAME = "kms-audit"; + + private static Logger AUDIT_LOG = LoggerFactory.getLogger(KMS_LOGGER_NAME); + + private static void op(String status, String op, Principal user, String key, + String extraMsg) { + AUDIT_LOG.info("Status:{} User:{} Op:{} Name:{}{}", status, user.getName(), + op, key, extraMsg); + } + + public static void ok(Principal user, String op, String key, + String extraMsg) { + op("OK", op, user, key, extraMsg); + } + + public static void unauthorized(Principal user, String op, String key) { + op("UNAUTHORIZED", op, user, key, ""); + } + + public static void error(Principal user, String method, String url, + String extraMsg) { + AUDIT_LOG.info("Status:ERROR User:{} Method:{} URL:{} Exception:'{}'", + user.getName(), method, url, extraMsg); + } + + public static void unauthenticated(String remoteHost, String method, + String url, String extraMsg) { + AUDIT_LOG.info( + "Status:UNAUTHENTICATED RemoteHost:{} Method:{} URL:{} ErrorMsg:'{}'", + remoteHost, method, url, extraMsg); + } + +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java new file mode 100644 index 00000000000..f1872a24e8b --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java @@ -0,0 +1,123 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.authentication.server.AuthenticationFilter; + +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpServletResponseWrapper; +import java.io.IOException; +import java.util.Map; +import java.util.Properties; + +/** + * Authentication filter that takes the configuration from the KMS configuration + * file. + */ +@InterfaceAudience.Private +public class KMSAuthenticationFilter extends AuthenticationFilter { + private static final String CONF_PREFIX = KMSConfiguration.CONFIG_PREFIX + + "authentication."; + + @Override + protected Properties getConfiguration(String configPrefix, + FilterConfig filterConfig) { + Properties props = new Properties(); + Configuration conf = KMSWebApp.getConfiguration(); + for (Map.Entry entry : conf) { + String name = entry.getKey(); + if (name.startsWith(CONF_PREFIX)) { + String value = conf.get(name); + name = name.substring(CONF_PREFIX.length()); + props.setProperty(name, value); + } + } + return props; + } + + private static class KMSResponse extends HttpServletResponseWrapper { + public int statusCode; + public String msg; + + public KMSResponse(ServletResponse response) { + super((HttpServletResponse)response); + } + + @Override + public void setStatus(int sc) { + statusCode = sc; + super.setStatus(sc); + } + + @Override + public void sendError(int sc, String msg) throws IOException { + statusCode = sc; + this.msg = msg; + super.sendError(sc, msg); + } + + @Override + public void sendError(int sc) throws IOException { + statusCode = sc; + super.sendError(sc); + } + + @Override + public void setStatus(int sc, String sm) { + statusCode = sc; + msg = sm; + super.setStatus(sc, sm); + } + } + + @Override + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain filterChain) throws IOException, ServletException { + KMSResponse kmsResponse = new KMSResponse(response); + super.doFilter(request, kmsResponse, filterChain); + + if (kmsResponse.statusCode != HttpServletResponse.SC_OK && + kmsResponse.statusCode != HttpServletResponse.SC_CREATED && + kmsResponse.statusCode != HttpServletResponse.SC_UNAUTHORIZED) { + KMSWebApp.getInvalidCallsMeter().mark(); + } + + // HttpServletResponse.SC_UNAUTHORIZED is because the request does not + // belong to an authenticated user. + if (kmsResponse.statusCode == HttpServletResponse.SC_UNAUTHORIZED) { + KMSWebApp.getUnauthenticatedCallsMeter().mark(); + String method = ((HttpServletRequest) request).getMethod(); + StringBuffer requestURL = ((HttpServletRequest) request).getRequestURL(); + String queryString = ((HttpServletRequest) request).getQueryString(); + if (queryString != null) { + requestURL.append("?").append(queryString); + } + KMSAudit.unauthenticated(request.getRemoteHost(), method, + requestURL.toString(), kmsResponse.msg); + } + } + +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSCacheKeyProvider.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSCacheKeyProvider.java new file mode 100644 index 00000000000..835326fad50 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSCacheKeyProvider.java @@ -0,0 +1,180 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import org.apache.hadoop.crypto.key.KeyProvider; + +import java.io.IOException; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +/** + * A KeyProvider proxy implementation providing a short lived + * cache for KeyVersions to avoid burst of requests to hit the + * underlying KeyProvider. + */ +public class KMSCacheKeyProvider extends KeyProvider { + private final KeyProvider provider; + private LoadingCache keyVersionCache; + private LoadingCache currentKeyCache; + + private static class KeyNotFoundException extends Exception { + private static final long serialVersionUID = 1L; + } + + public KMSCacheKeyProvider(KeyProvider prov, long timeoutMillis) { + this.provider = prov; + keyVersionCache = CacheBuilder.newBuilder().expireAfterAccess(timeoutMillis, + TimeUnit.MILLISECONDS).build(new CacheLoader() { + @Override + public KeyVersion load(String key) throws Exception { + KeyVersion kv = provider.getKeyVersion(key); + if (kv == null) { + throw new KeyNotFoundException(); + } + return kv; + } + }); + // for current key we don't want to go stale for more than 1 sec + currentKeyCache = CacheBuilder.newBuilder().expireAfterWrite(1000, + TimeUnit.MILLISECONDS).build(new CacheLoader() { + @Override + public KeyVersion load(String key) throws Exception { + KeyVersion kv = provider.getCurrentKey(key); + if (kv == null) { + throw new KeyNotFoundException(); + } + return kv; + } + }); + } + + @Override + public KeyVersion getCurrentKey(String name) throws IOException { + try { + return currentKeyCache.get(name); + } catch (ExecutionException ex) { + Throwable cause = ex.getCause(); + if (cause instanceof KeyNotFoundException) { + return null; + } else if (cause instanceof IOException) { + throw (IOException) cause; + } else { + throw new IOException(cause); + } + } + } + + @Override + public KeyVersion getKeyVersion(String versionName) + throws IOException { + try { + return keyVersionCache.get(versionName); + } catch (ExecutionException ex) { + Throwable cause = ex.getCause(); + if (cause instanceof KeyNotFoundException) { + return null; + } else if (cause instanceof IOException) { + throw (IOException) cause; + } else { + throw new IOException(cause); + } + } + } + + @Override + public List getKeys() throws IOException { + return provider.getKeys(); + } + + @Override + public List getKeyVersions(String name) + throws IOException { + return provider.getKeyVersions(name); + } + + @Override + public Metadata getMetadata(String name) throws IOException { + return provider.getMetadata(name); + } + + @Override + public KeyVersion createKey(String name, byte[] material, + Options options) throws IOException { + return provider.createKey(name, material, options); + } + + @Override + public KeyVersion createKey(String name, + Options options) + throws NoSuchAlgorithmException, IOException { + return provider.createKey(name, options); + } + + @Override + public void deleteKey(String name) throws IOException { + Metadata metadata = provider.getMetadata(name); + List versions = new ArrayList(metadata.getVersions()); + for (int i = 0; i < metadata.getVersions(); i++) { + versions.add(KeyProvider.buildVersionName(name, i)); + } + provider.deleteKey(name); + currentKeyCache.invalidate(name); + keyVersionCache.invalidateAll(versions); + } + + @Override + public KeyVersion rollNewVersion(String name, byte[] material) + throws IOException { + KeyVersion key = provider.rollNewVersion(name, material); + currentKeyCache.invalidate(name); + return key; + } + + @Override + public KeyVersion rollNewVersion(String name) + throws NoSuchAlgorithmException, IOException { + KeyVersion key = provider.rollNewVersion(name); + currentKeyCache.invalidate(name); + return key; + } + + @Override + public void flush() throws IOException { + provider.flush(); + } + + @Override + public Metadata[] getKeysMetadata(String ... keyNames) + throws IOException { + return provider.getKeysMetadata(keyNames); + } + + @Override + public boolean isTransient() { + return provider.isTransient(); + } + +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java new file mode 100644 index 00000000000..b2209d4cc77 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java @@ -0,0 +1,94 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.conf.Configuration; + +import java.io.File; +import java.net.MalformedURLException; +import java.net.URL; + +/** + * Utility class to load KMS configuration files. + */ +public class KMSConfiguration { + + public static final String KMS_CONFIG_DIR = "kms.config.dir"; + public static final String KMS_SITE_XML = "kms-site.xml"; + public static final String KMS_ACLS_XML = "kms-acls.xml"; + + public static final String CONFIG_PREFIX = "hadoop.kms."; + + public static final String KEY_CACHE_TIMEOUT_KEY = CONFIG_PREFIX + + "cache.timeout.ms"; + public static final long KEY_CACHE_TIMEOUT_DEFAULT = 10 * 1000; // 10 secs + + static Configuration getConfiguration(boolean loadHadoopDefaults, + String ... resources) { + Configuration conf = new Configuration(loadHadoopDefaults); + String confDir = System.getProperty(KMS_CONFIG_DIR); + if (confDir != null) { + try { + if (!confDir.startsWith("/")) { + throw new RuntimeException("System property '" + KMS_CONFIG_DIR + + "' must be an absolute path: " + confDir); + } + if (!confDir.endsWith("/")) { + confDir += "/"; + } + for (String resource : resources) { + conf.addResource(new URL("file://" + confDir + resource)); + } + } catch (MalformedURLException ex) { + throw new RuntimeException(ex); + } + } else { + for (String resource : resources) { + conf.addResource(resource); + } + } + return conf; + } + + public static Configuration getKMSConf() { + return getConfiguration(true, "core-site.xml", KMS_SITE_XML); + } + + public static Configuration getACLsConf() { + return getConfiguration(false, KMS_ACLS_XML); + } + + public static boolean isACLsFileNewer(long time) { + boolean newer = false; + String confDir = System.getProperty(KMS_CONFIG_DIR); + if (confDir != null) { + if (!confDir.startsWith("/")) { + throw new RuntimeException("System property '" + KMS_CONFIG_DIR + + "' must be an absolute path: " + confDir); + } + if (!confDir.endsWith("/")) { + confDir += "/"; + } + File f = new File(confDir, KMS_ACLS_XML); + // at least 100ms newer than time, we do this to ensure the file + // has been properly closed/flushed + newer = f.lastModified() - time > 100; + } + return newer; + } +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSExceptionsProvider.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSExceptionsProvider.java new file mode 100644 index 00000000000..1c4c32ddb7f --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSExceptionsProvider.java @@ -0,0 +1,113 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.classification.InterfaceAudience; + +import com.sun.jersey.api.container.ContainerException; +import org.apache.hadoop.crypto.key.kms.KMSRESTConstants; +import org.apache.hadoop.security.AccessControlException; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.ext.ExceptionMapper; +import javax.ws.rs.ext.Provider; +import java.io.IOException; +import java.security.Principal; +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * Jersey provider that converts KMS exceptions into detailed HTTP errors. + */ +@Provider +@InterfaceAudience.Private +public class KMSExceptionsProvider implements ExceptionMapper { + private static Logger LOG = + LoggerFactory.getLogger(KMSExceptionsProvider.class); + + private static final String ENTER = System.getProperty("line.separator"); + + protected Response createResponse(Response.Status status, Throwable ex) { + Map json = new LinkedHashMap(); + json.put(KMSRESTConstants.ERROR_EXCEPTION_JSON, ex.getClass().getName()); + json.put(KMSRESTConstants.ERROR_MESSAGE_JSON, getOneLineMessage(ex)); + log(status, ex); + return Response.status(status).type(MediaType.APPLICATION_JSON). + entity(json).build(); + } + + protected String getOneLineMessage(Throwable exception) { + String message = exception.getMessage(); + if (message != null) { + int i = message.indexOf(ENTER); + if (i > -1) { + message = message.substring(0, i); + } + } + return message; + } + + /** + * Maps different exceptions thrown by KMS to HTTP status codes. + */ + @Override + public Response toResponse(Exception exception) { + Response.Status status; + boolean doAudit = true; + Throwable throwable = exception; + if (exception instanceof ContainerException) { + throwable = exception.getCause(); + } + if (throwable instanceof SecurityException) { + status = Response.Status.FORBIDDEN; + } else if (throwable instanceof AuthenticationException) { + status = Response.Status.FORBIDDEN; + // we don't audit here because we did it already when checking access + doAudit = false; + } else if (throwable instanceof AccessControlException) { + status = Response.Status.FORBIDDEN; + } else if (exception instanceof IOException) { + status = Response.Status.INTERNAL_SERVER_ERROR; + } else if (exception instanceof UnsupportedOperationException) { + status = Response.Status.BAD_REQUEST; + } else if (exception instanceof IllegalArgumentException) { + status = Response.Status.BAD_REQUEST; + } else { + status = Response.Status.INTERNAL_SERVER_ERROR; + } + if (doAudit) { + KMSAudit.error(KMSMDCFilter.getPrincipal(), KMSMDCFilter.getMethod(), + KMSMDCFilter.getURL(), getOneLineMessage(exception)); + } + return createResponse(status, throwable); + } + + protected void log(Response.Status status, Throwable ex) { + Principal principal = KMSMDCFilter.getPrincipal(); + String method = KMSMDCFilter.getMethod(); + String url = KMSMDCFilter.getURL(); + String msg = getOneLineMessage(ex); + LOG.warn("User:{} Method:{} URL:{} Response:{}-{}", principal, method, url, + status, msg, ex); + } + +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java new file mode 100644 index 00000000000..d3e0064db82 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.codehaus.jackson.map.ObjectMapper; + +import javax.ws.rs.Consumes; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.MultivaluedMap; +import javax.ws.rs.ext.MessageBodyReader; +import javax.ws.rs.ext.Provider; +import java.io.IOException; +import java.io.InputStream; +import java.lang.annotation.Annotation; +import java.lang.reflect.Type; +import java.util.Map; + +@Provider +@Consumes(MediaType.APPLICATION_JSON) +@InterfaceAudience.Private +public class KMSJSONReader implements MessageBodyReader { + + @Override + public boolean isReadable(Class type, Type genericType, + Annotation[] annotations, MediaType mediaType) { + return type.isAssignableFrom(Map.class); + } + + @Override + public Map readFrom(Class type, Type genericType, + Annotation[] annotations, MediaType mediaType, + MultivaluedMap httpHeaders, InputStream entityStream) + throws IOException, WebApplicationException { + ObjectMapper mapper = new ObjectMapper(); + return mapper.readValue(entityStream, type); + } +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java new file mode 100644 index 00000000000..3674e7a87aa --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.codehaus.jackson.map.ObjectMapper; + +import javax.ws.rs.Produces; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.MultivaluedMap; +import javax.ws.rs.ext.MessageBodyWriter; +import javax.ws.rs.ext.Provider; +import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.lang.annotation.Annotation; +import java.lang.reflect.Type; +import java.util.List; +import java.util.Map; + +/** + * Jersey provider that converts Maps and Lists + * to their JSON representation. + */ +@Provider +@Produces(MediaType.APPLICATION_JSON) +@InterfaceAudience.Private +public class KMSJSONWriter implements MessageBodyWriter { + + @Override + public boolean isWriteable(Class aClass, Type type, + Annotation[] annotations, MediaType mediaType) { + return Map.class.isAssignableFrom(aClass) || + List.class.isAssignableFrom(aClass); + } + + @Override + public long getSize(Object obj, Class aClass, Type type, + Annotation[] annotations, MediaType mediaType) { + return -1; + } + + @Override + public void writeTo(Object obj, Class aClass, Type type, + Annotation[] annotations, MediaType mediaType, + MultivaluedMap stringObjectMultivaluedMap, + OutputStream outputStream) throws IOException, WebApplicationException { + Writer writer = new OutputStreamWriter(outputStream); + ObjectMapper jsonMapper = new ObjectMapper(); + jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, obj); + } + +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSMDCFilter.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSMDCFilter.java new file mode 100644 index 00000000000..8d24c7ce476 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSMDCFilter.java @@ -0,0 +1,92 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.classification.InterfaceAudience; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import java.io.IOException; +import java.security.Principal; + +/** + * Servlet filter that captures context of the HTTP request to be use in the + * scope of KMS calls on the server side. + */ +@InterfaceAudience.Private +public class KMSMDCFilter implements Filter { + + private static class Data { + private Principal principal; + private String method; + private StringBuffer url; + + private Data(Principal principal, String method, StringBuffer url) { + this.principal = principal; + this.method = method; + this.url = url; + } + } + + private static ThreadLocal DATA_TL = new ThreadLocal(); + + public static Principal getPrincipal() { + return DATA_TL.get().principal; + } + + public static String getMethod() { + return DATA_TL.get().method; + } + + public static String getURL() { + return DATA_TL.get().url.toString(); + } + + @Override + public void init(FilterConfig config) throws ServletException { + } + + @Override + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain chain) + throws IOException, ServletException { + try { + DATA_TL.remove(); + Principal principal = ((HttpServletRequest) request).getUserPrincipal(); + String method = ((HttpServletRequest) request).getMethod(); + StringBuffer requestURL = ((HttpServletRequest) request).getRequestURL(); + String queryString = ((HttpServletRequest) request).getQueryString(); + if (queryString != null) { + requestURL.append("?").append(queryString); + } + DATA_TL.set(new Data(principal, method, requestURL)); + chain.doFilter(request, response); + } finally { + DATA_TL.remove(); + } + } + + @Override + public void destroy() { + } +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java new file mode 100644 index 00000000000..cc995cdf12b --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java @@ -0,0 +1,80 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.crypto.key.KeyProvider; +import org.apache.hadoop.crypto.key.kms.KMSRESTConstants; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * JSON utility methods for the KMS. + */ +@InterfaceAudience.Private +public class KMSServerJSONUtils { + @SuppressWarnings("unchecked") + public static Map toJSON(KeyProvider.KeyVersion keyVersion) { + Map json = new LinkedHashMap(); + if (keyVersion != null) { + json.put(KMSRESTConstants.VERSION_NAME_FIELD, + keyVersion.getVersionName()); + json.put(KMSRESTConstants.MATERIAL_FIELD, keyVersion.getMaterial()); + } + return json; + } + + @SuppressWarnings("unchecked") + public static List toJSON(List keyVersions) { + List json = new ArrayList(); + if (keyVersions != null) { + for (KeyProvider.KeyVersion version : keyVersions) { + json.add(toJSON(version)); + } + } + return json; + } + + @SuppressWarnings("unchecked") + public static Map toJSON(String keyName, KeyProvider.Metadata meta) { + Map json = new LinkedHashMap(); + if (meta != null) { + json.put(KMSRESTConstants.NAME_FIELD, keyName); + json.put(KMSRESTConstants.CIPHER_FIELD, meta.getCipher()); + json.put(KMSRESTConstants.LENGTH_FIELD, meta.getBitLength()); + json.put(KMSRESTConstants.DESCRIPTION_FIELD, meta.getDescription()); + json.put(KMSRESTConstants.CREATED_FIELD, + meta.getCreated().getTime()); + json.put(KMSRESTConstants.VERSIONS_FIELD, + (long) meta.getVersions()); + } + return json; + } + + @SuppressWarnings("unchecked") + public static List toJSON(String[] keyNames, KeyProvider.Metadata[] metas) { + List json = new ArrayList(); + for (int i = 0; i < keyNames.length; i++) { + json.add(toJSON(keyNames[i], metas[i])); + } + return json; + } +} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java new file mode 100644 index 00000000000..046753e55be --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java @@ -0,0 +1,214 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import com.codahale.metrics.JmxReporter; +import com.codahale.metrics.Meter; +import com.codahale.metrics.MetricRegistry; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.crypto.key.KeyProvider; +import org.apache.hadoop.crypto.key.KeyProviderFactory; +import org.apache.hadoop.http.HttpServer2; +import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.VersionInfo; +import org.apache.log4j.PropertyConfigurator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.bridge.SLF4JBridgeHandler; + +import javax.servlet.ServletContextEvent; +import javax.servlet.ServletContextListener; +import java.io.File; +import java.net.URL; +import java.util.List; + +@InterfaceAudience.Private +public class KMSWebApp implements ServletContextListener { + + private static final String LOG4J_PROPERTIES = "kms-log4j.properties"; + + private static final String METRICS_PREFIX = "hadoop.kms."; + private static final String ADMIN_CALLS_METER = METRICS_PREFIX + + "admin.calls.meter"; + private static final String KEY_CALLS_METER = METRICS_PREFIX + + "key.calls.meter"; + private static final String INVALID_CALLS_METER = METRICS_PREFIX + + "invalid.calls.meter"; + private static final String UNAUTHORIZED_CALLS_METER = METRICS_PREFIX + + "unauthorized.calls.meter"; + private static final String UNAUTHENTICATED_CALLS_METER = METRICS_PREFIX + + "unauthenticated.calls.meter"; + + private static Logger LOG; + private static MetricRegistry metricRegistry; + + private JmxReporter jmxReporter; + private static Configuration kmsConf; + private static KMSACLs acls; + private static Meter adminCallsMeter; + private static Meter keyCallsMeter; + private static Meter unauthorizedCallsMeter; + private static Meter unauthenticatedCallsMeter; + private static Meter invalidCallsMeter; + private static KeyProvider keyProvider; + + static { + SLF4JBridgeHandler.removeHandlersForRootLogger(); + SLF4JBridgeHandler.install(); + } + + private void initLogging(String confDir) { + if (System.getProperty("log4j.configuration") == null) { + System.setProperty("log4j.defaultInitOverride", "true"); + boolean fromClasspath = true; + File log4jConf = new File(confDir, LOG4J_PROPERTIES).getAbsoluteFile(); + if (log4jConf.exists()) { + PropertyConfigurator.configureAndWatch(log4jConf.getPath(), 1000); + fromClasspath = false; + } else { + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + URL log4jUrl = cl.getResource(LOG4J_PROPERTIES); + if (log4jUrl != null) { + PropertyConfigurator.configure(log4jUrl); + } + } + LOG = LoggerFactory.getLogger(KMSWebApp.class); + LOG.debug("KMS log starting"); + if (fromClasspath) { + LOG.warn("Log4j configuration file '{}' not found", LOG4J_PROPERTIES); + LOG.warn("Logging with INFO level to standard output"); + } + } else { + LOG = LoggerFactory.getLogger(KMSWebApp.class); + } + } + + @Override + public void contextInitialized(ServletContextEvent sce) { + try { + String confDir = System.getProperty(KMSConfiguration.KMS_CONFIG_DIR); + if (confDir == null) { + throw new RuntimeException("System property '" + + KMSConfiguration.KMS_CONFIG_DIR + "' not defined"); + } + kmsConf = KMSConfiguration.getKMSConf(); + initLogging(confDir); + LOG.info("-------------------------------------------------------------"); + LOG.info(" Java runtime version : {}", System.getProperty( + "java.runtime.version")); + LOG.info(" KMS Hadoop Version: " + VersionInfo.getVersion()); + LOG.info("-------------------------------------------------------------"); + + acls = new KMSACLs(); + acls.startReloader(); + + metricRegistry = new MetricRegistry(); + jmxReporter = JmxReporter.forRegistry(metricRegistry).build(); + jmxReporter.start(); + adminCallsMeter = metricRegistry.register(ADMIN_CALLS_METER, new Meter()); + keyCallsMeter = metricRegistry.register(KEY_CALLS_METER, new Meter()); + invalidCallsMeter = metricRegistry.register(INVALID_CALLS_METER, + new Meter()); + unauthorizedCallsMeter = metricRegistry.register(UNAUTHORIZED_CALLS_METER, + new Meter()); + unauthenticatedCallsMeter = metricRegistry.register( + UNAUTHENTICATED_CALLS_METER, new Meter()); + + // this is required for the the JMXJsonServlet to work properly. + // the JMXJsonServlet is behind the authentication filter, + // thus the '*' ACL. + sce.getServletContext().setAttribute(HttpServer2.CONF_CONTEXT_ATTRIBUTE, + kmsConf); + sce.getServletContext().setAttribute(HttpServer2.ADMINS_ACL, + new AccessControlList(AccessControlList.WILDCARD_ACL_VALUE)); + + // intializing the KeyProvider + + List providers = KeyProviderFactory.getProviders(kmsConf); + if (providers.isEmpty()) { + throw new IllegalStateException("No KeyProvider has been defined"); + } + if (providers.size() > 1) { + LOG.warn("There is more than one KeyProvider configured '{}', using " + + "the first provider", + kmsConf.get(KeyProviderFactory.KEY_PROVIDER_PATH)); + } + keyProvider = providers.get(0); + long timeOutMillis = + kmsConf.getLong(KMSConfiguration.KEY_CACHE_TIMEOUT_KEY, + KMSConfiguration.KEY_CACHE_TIMEOUT_DEFAULT); + keyProvider = new KMSCacheKeyProvider(keyProvider, timeOutMillis); + + LOG.info("KMS Started"); + } catch (Throwable ex) { + System.out.println(); + System.out.println("ERROR: Hadoop KMS could not be started"); + System.out.println(); + System.out.println("REASON: " + ex.toString()); + System.out.println(); + System.out.println("Stacktrace:"); + System.out.println("---------------------------------------------------"); + ex.printStackTrace(System.out); + System.out.println("---------------------------------------------------"); + System.out.println(); + System.exit(1); + } + } + + @Override + public void contextDestroyed(ServletContextEvent sce) { + acls.stopReloader(); + jmxReporter.stop(); + jmxReporter.close(); + metricRegistry = null; + LOG.info("KMS Stopped"); + } + + public static Configuration getConfiguration() { + return new Configuration(kmsConf); + } + + public static KMSACLs getACLs() { + return acls; + } + + public static Meter getAdminCallsMeter() { + return adminCallsMeter; + } + + public static Meter getKeyCallsMeter() { + return keyCallsMeter; + } + + public static Meter getInvalidCallsMeter() { + return invalidCallsMeter; + } + + public static Meter getUnauthorizedCallsMeter() { + return unauthorizedCallsMeter; + } + + public static Meter getUnauthenticatedCallsMeter() { + return unauthenticatedCallsMeter; + } + + public static KeyProvider getKeyProvider() { + return keyProvider; + } +} diff --git a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh new file mode 100644 index 00000000000..3e203995de4 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh @@ -0,0 +1,181 @@ +#!/bin/bash +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# resolve links - $0 may be a softlink +PRG="${0}" + +while [ -h "${PRG}" ]; do + ls=`ls -ld "${PRG}"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "${PRG}"`/"$link" + fi +done + +BASEDIR=`dirname ${PRG}` +BASEDIR=`cd ${BASEDIR}/..;pwd` + + +function print() { + if [ "${KMS_SILENT}" != "true" ]; then + echo "$@" + fi +} + +# if KMS_HOME is already set warn it will be ignored +# +if [ "${KMS_HOME}" != "" ]; then + echo "WARNING: current setting of KMS_HOME ignored" +fi + +print + +# setting KMS_HOME to the installation dir, it cannot be changed +# +export KMS_HOME=${BASEDIR} +kms_home=${KMS_HOME} +print "Setting KMS_HOME: ${KMS_HOME}" + +# if the installation has a env file, source it +# this is for native packages installations +# +if [ -e "${KMS_HOME}/bin/kms-env.sh" ]; then + print "Sourcing: ${KMS_HOME}/bin/kms-env.sh" + source ${KMS_HOME}/bin/kms-env.sh + grep "^ *export " ${KMS_HOME}/bin/kms-env.sh | sed 's/ *export/ setting/' +fi + +# verify that the sourced env file didn't change KMS_HOME +# if so, warn and revert +# +if [ "${KMS_HOME}" != "${kms_home}" ]; then + print "WARN: KMS_HOME resetting to ''${KMS_HOME}'' ignored" + export KMS_HOME=${kms_home} + print " using KMS_HOME: ${KMS_HOME}" +fi + +if [ "${KMS_CONFIG}" = "" ]; then + export KMS_CONFIG=${KMS_HOME}/etc/hadoop + print "Setting KMS_CONFIG: ${KMS_CONFIG}" +else + print "Using KMS_CONFIG: ${KMS_CONFIG}" +fi +kms_config=${KMS_CONFIG} + +# if the configuration dir has a env file, source it +# +if [ -e "${KMS_CONFIG}/kms-env.sh" ]; then + print "Sourcing: ${KMS_CONFIG}/kms-env.sh" + source ${KMS_CONFIG}/kms-env.sh + grep "^ *export " ${KMS_CONFIG}/kms-env.sh | sed 's/ *export/ setting/' +fi + +# verify that the sourced env file didn't change KMS_HOME +# if so, warn and revert +# +if [ "${KMS_HOME}" != "${kms_home}" ]; then + echo "WARN: KMS_HOME resetting to ''${KMS_HOME}'' ignored" + export KMS_HOME=${kms_home} +fi + +# verify that the sourced env file didn't change KMS_CONFIG +# if so, warn and revert +# +if [ "${KMS_CONFIG}" != "${kms_config}" ]; then + echo "WARN: KMS_CONFIG resetting to ''${KMS_CONFIG}'' ignored" + export KMS_CONFIG=${kms_config} +fi + +if [ "${KMS_LOG}" = "" ]; then + export KMS_LOG=${KMS_HOME}/logs + print "Setting KMS_LOG: ${KMS_LOG}" +else + print "Using KMS_LOG: ${KMS_LOG}" +fi + +if [ ! -f ${KMS_LOG} ]; then + mkdir -p ${KMS_LOG} +fi + +if [ "${KMS_TEMP}" = "" ]; then + export KMS_TEMP=${KMS_HOME}/temp + print "Setting KMS_TEMP: ${KMS_TEMP}" +else + print "Using KMS_TEMP: ${KMS_TEMP}" +fi + +if [ ! -f ${KMS_TEMP} ]; then + mkdir -p ${KMS_TEMP} +fi + +if [ "${KMS_HTTP_PORT}" = "" ]; then + export KMS_HTTP_PORT=16000 + print "Setting KMS_HTTP_PORT: ${KMS_HTTP_PORT}" +else + print "Using KMS_HTTP_PORT: ${KMS_HTTP_PORT}" +fi + +if [ "${KMS_ADMIN_PORT}" = "" ]; then + export KMS_ADMIN_PORT=`expr $KMS_HTTP_PORT + 1` + print "Setting KMS_ADMIN_PORT: ${KMS_ADMIN_PORT}" +else + print "Using KMS_ADMIN_PORT: ${KMS_ADMIN_PORT}" +fi + +if [ "${KMS_SSL_KEYSTORE_FILE}" = "" ]; then + export KMS_SSL_KEYSTORE_FILE=${HOME}/.keystore + print "Setting KMS_SSL_KEYSTORE_FILE: ${KMS_SSL_KEYSTORE_FILE}" +else + print "Using KMS_SSL_KEYSTORE_FILE: ${KMS_SSL_KEYSTORE_FILE}" +fi + +if [ "${KMS_SSL_KEYSTORE_PASS}" = "" ]; then + export KMS_SSL_KEYSTORE_PASS=password + print "Setting KMS_SSL_KEYSTORE_PASS: ${KMS_SSL_KEYSTORE_PASS}" +else + print "Using KMS_SSL_KEYSTORE_PASS: ${KMS_SSL_KEYSTORE_PASS}" +fi + +if [ "${CATALINA_BASE}" = "" ]; then + export CATALINA_BASE=${KMS_HOME}/share/hadoop/kms/tomcat + print "Setting CATALINA_BASE: ${CATALINA_BASE}" +else + print "Using CATALINA_BASE: ${CATALINA_BASE}" +fi + +if [ "${KMS_CATALINA_HOME}" = "" ]; then + export KMS_CATALINA_HOME=${CATALINA_BASE} + print "Setting KMS_CATALINA_HOME: ${KMS_CATALINA_HOME}" +else + print "Using KMS_CATALINA_HOME: ${KMS_CATALINA_HOME}" +fi + +if [ "${CATALINA_OUT}" = "" ]; then + export CATALINA_OUT=${KMS_LOG}/kms-catalina.out + print "Setting CATALINA_OUT: ${CATALINA_OUT}" +else + print "Using CATALINA_OUT: ${CATALINA_OUT}" +fi + +if [ "${CATALINA_PID}" = "" ]; then + export CATALINA_PID=/tmp/kms.pid + print "Setting CATALINA_PID: ${CATALINA_PID}" +else + print "Using CATALINA_PID: ${CATALINA_PID}" +fi + +print diff --git a/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh b/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh new file mode 100644 index 00000000000..588c2c87acd --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh @@ -0,0 +1,60 @@ +#!/bin/bash +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# resolve links - $0 may be a softlink +PRG="${0}" + +while [ -h "${PRG}" ]; do + ls=`ls -ld "${PRG}"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "${PRG}"`/"$link" + fi +done + +BASEDIR=`dirname ${PRG}` +BASEDIR=`cd ${BASEDIR}/..;pwd` + +KMS_SILENT=${KMS_SILENT:-true} + +source ${HADOOP_LIBEXEC_DIR:-${BASEDIR}/libexec}/kms-config.sh + +# The Java System property 'kms.http.port' it is not used by Kms, +# it is used in Tomcat's server.xml configuration file +# +print "Using CATALINA_OPTS: ${CATALINA_OPTS}" + +catalina_opts="-Dkms.home.dir=${KMS_HOME}"; +catalina_opts="${catalina_opts} -Dkms.config.dir=${KMS_CONFIG}"; +catalina_opts="${catalina_opts} -Dkms.log.dir=${KMS_LOG}"; +catalina_opts="${catalina_opts} -Dkms.temp.dir=${KMS_TEMP}"; +catalina_opts="${catalina_opts} -Dkms.admin.port=${KMS_ADMIN_PORT}"; +catalina_opts="${catalina_opts} -Dkms.http.port=${KMS_HTTP_PORT}"; +catalina_opts="${catalina_opts} -Dkms.ssl.keystore.file=${KMS_SSL_KEYSTORE_FILE}"; +catalina_opts="${catalina_opts} -Dkms.ssl.keystore.pass=${KMS_SSL_KEYSTORE_PASS}"; + +print "Adding to CATALINA_OPTS: ${catalina_opts}" + +export CATALINA_OPTS="${CATALINA_OPTS} ${catalina_opts}" + +# A bug in catalina.sh script does not use CATALINA_OPTS for stopping the server +# +if [ "${1}" = "stop" ]; then + export JAVA_OPTS=${CATALINA_OPTS} +fi + +exec ${KMS_CATALINA_HOME}/bin/catalina.sh "$@" diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/WEB-INF/web.xml b/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/WEB-INF/web.xml new file mode 100644 index 00000000000..9d0ae0db4c8 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/WEB-INF/web.xml @@ -0,0 +1,16 @@ + + + + diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/index.html b/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/index.html new file mode 100644 index 00000000000..e9e45121b15 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/index.html @@ -0,0 +1,27 @@ + + + + Hadoop KMS + + +

Hadoop KMS

+ + + diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/logging.properties b/hadoop-common-project/hadoop-kms/src/main/tomcat/logging.properties new file mode 100644 index 00000000000..75627192373 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/tomcat/logging.properties @@ -0,0 +1,67 @@ +# +# All Rights Reserved. +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +handlers = 1catalina.org.apache.juli.FileHandler, 2localhost.org.apache.juli.FileHandler, 3manager.org.apache.juli.FileHandler, 4host-manager.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler + +.handlers = 1catalina.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler + +############################################################ +# Handler specific properties. +# Describes specific configuration info for Handlers. +############################################################ + +1catalina.org.apache.juli.FileHandler.level = FINE +1catalina.org.apache.juli.FileHandler.directory = ${kms.log.dir} +1catalina.org.apache.juli.FileHandler.prefix = kms-catalina. + +2localhost.org.apache.juli.FileHandler.level = FINE +2localhost.org.apache.juli.FileHandler.directory = ${kms.log.dir} +2localhost.org.apache.juli.FileHandler.prefix = kms-localhost. + +3manager.org.apache.juli.FileHandler.level = FINE +3manager.org.apache.juli.FileHandler.directory = ${kms.log.dir} +3manager.org.apache.juli.FileHandler.prefix = kms-manager. + +4host-manager.org.apache.juli.FileHandler.level = FINE +4host-manager.org.apache.juli.FileHandler.directory = ${kms.log.dir} +4host-manager.org.apache.juli.FileHandler.prefix = kms-host-manager. + +java.util.logging.ConsoleHandler.level = FINE +java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter + + +############################################################ +# Facility specific properties. +# Provides extra control for each logger. +############################################################ + +org.apache.catalina.core.ContainerBase.[Catalina].[localhost].level = INFO +org.apache.catalina.core.ContainerBase.[Catalina].[localhost].handlers = 2localhost.org.apache.juli.FileHandler + +org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/manager].level = INFO +org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/manager].handlers = 3manager.org.apache.juli.FileHandler + +org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/host-manager].level = INFO +org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/host-manager].handlers = 4host-manager.org.apache.juli.FileHandler + +# For example, set the com.xyz.foo logger to only log SEVERE +# messages: +#org.apache.catalina.startup.ContextConfig.level = FINE +#org.apache.catalina.startup.HostConfig.level = FINE +#org.apache.catalina.session.ManagerBase.level = FINE +#org.apache.catalina.core.AprLifecycleListener.level=FINE diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/server.xml b/hadoop-common-project/hadoop-kms/src/main/tomcat/server.xml new file mode 100644 index 00000000000..d070b58f9ed --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/tomcat/server.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/ssl-server.xml b/hadoop-common-project/hadoop-kms/src/main/tomcat/ssl-server.xml new file mode 100644 index 00000000000..3a464ca47cf --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/tomcat/ssl-server.xml @@ -0,0 +1,135 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/hadoop-common-project/hadoop-kms/src/main/webapp/WEB-INF/web.xml b/hadoop-common-project/hadoop-kms/src/main/webapp/WEB-INF/web.xml new file mode 100644 index 00000000000..4ec5cffc8a6 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/webapp/WEB-INF/web.xml @@ -0,0 +1,78 @@ + + + + + + org.apache.hadoop.crypto.key.kms.server.KMSWebApp + + + + webservices-driver + com.sun.jersey.spi.container.servlet.ServletContainer + + com.sun.jersey.config.property.packages + org.apache.hadoop.crypto.key.kms.server + + + + + 1 + + + + jmx-servlet + org.apache.hadoop.jmx.JMXJsonServlet + + + + webservices-driver + /* + + + + jmx-servlet + /jmx + + + + authFilter + org.apache.hadoop.crypto.key.kms.server.KMSAuthenticationFilter + + + + MDCFilter + org.apache.hadoop.crypto.key.kms.server.KMSMDCFilter + + + + authFilter + /* + + + + MDCFilter + /* + + + diff --git a/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm b/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm new file mode 100644 index 00000000000..e4d445dcccf --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm @@ -0,0 +1,487 @@ +~~ Licensed under the Apache License, Version 2.0 (the "License"); +~~ you may not use this file except in compliance with the License. +~~ You may obtain a copy of the License at +~~ +~~ http://www.apache.org/licenses/LICENSE-2.0 +~~ +~~ Unless required by applicable law or agreed to in writing, software +~~ distributed under the License is distributed on an "AS IS" BASIS, +~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +~~ See the License for the specific language governing permissions and +~~ limitations under the License. + + --- + Hadoop KMS - Documentation Sets ${project.version} + --- + --- + ${maven.build.timestamp} + +Hadoop Key Management Server (KMS) - Documentation Sets ${project.version} + + Hadoop KMS is a cryptographic key management server based on Hadoop's + <> API. + + It provides a client and a server components which communicate over + HTTP using a REST API. + + The client is a KeyProvider implementation interacts with the KMS + using the KMS HTTP REST API. + + KMS and its client have built-in security and they support HTTP SPNEGO + Kerberos authentication and HTTPS secure transport. + + KMS is a Java web-application and it runs using a pre-configured Tomcat + bundled with the Hadoop distribution. + +* KMS Client Configuration + + The KMS client <<>> uses the <> scheme, and the embedded + URL must be the URL of the KMS. For example, for a KMS running + on <<>>, the KeyProvider URI is + <<>>. And, for a KMS running on + <<>>, the KeyProvider URI is + <<>> + +* KMS + +** KMS Configuration + + Configure the KMS backing KeyProvider properties + in the <<>> configuration file: + ++---+ + + hadoop.security.key.provider.path + jceks://file@/${user.home}/kms.keystore + + + + hadoop.security.keystore.java-keystore-provider.password-file + kms.keystore.password + ++---+ + + The password file is looked up in the Hadoop's configuration directory via the + classpath. + + NOTE: You need to restart the KMS for the configuration changes to take + effect. + +** KMS Cache + + KMS caches keys for short period of time to avoid excessive hits to the + underlying key provider. + + The cache is used with the following 2 methods only, <<>> + and <<>>. + + For the <<>> method, cached entries are kept for a maximum + of 1000 millisecond regardless the number of times the key is being access + (to avoid stale keys to be considered current). + + For the <<>> method, cached entries are kept with a default + inactivity timeout of 10000 milliseconds. This time out is configurable via + the following property in the <<>> configuration + file: + ++---+ + + hadoop.kms.cache.timeout.ms + 10000 + ++---+ + +** Start/Stop the KMS + + To start/stop KMS use KMS's bin/kms.sh script. For example: + ++---+ +hadoop-${project.version} $ sbin/kms.sh start ++---+ + + NOTE: Invoking the script without any parameters list all possible + parameters (start, stop, run, etc.). The <<>> script is a wrapper + for Tomcat's <<>> script that sets the environment variables + and Java System properties required to run KMS. + +** Embedded Tomcat Configuration + + To configure the embedded Tomcat go to the <<>>. + + KMS pre-configures the HTTP and Admin ports in Tomcat's <<>> to + 16000 and 16001. + + Tomcat logs are also preconfigured to go to Hadoop's <<>> directory. + + The following environment variables (which can be set in KMS's + <<>> script) can be used to alter those values: + + * KMS_HTTP_PORT + + * KMS_ADMIN_PORT + + * KMS_LOG + + NOTE: You need to restart the KMS for the configuration changes to take + effect. + +** KMS Security Configuration + +*** Enabling Kerberos HTTP SPNEGO Authentication + + Configure the Kerberos <<>> file with the information of your + KDC server. + + Create a service principal and its keytab for the KMS, it must be an + <<>> service principal. + + Configure KMS <<>> with the correct security values, + for example: + ++---+ + + hadoop.kms.authentication.type + kerberos + + + + hadoop.kms.authentication.kerberos.keytab + ${user.home}/kms.keytab + + + + hadoop.kms.authentication.kerberos.principal + HTTP/localhost + + + + hadoop.kms.authentication.kerberos.name.rules + DEFAULT + ++---+ + + NOTE: You need to restart the KMS for the configuration changes to take + effect. + +*** KMS over HTTPS (SSL) + + To configure KMS to work over HTTPS the following 2 properties must be + set in the <<>> script (shown with default values): + + * KMS_SSL_KEYSTORE_FILE=${HOME}/.keystore + + * KMS_SSL_KEYSTORE_PASS=password + + In the KMS <<>> directory, replace the <<>> file + with the provided <<>> file. + + You need to create an SSL certificate for the KMS. As the + <<>> Unix user, using the Java <<>> command to create the + SSL certificate: + ++---+ +$ keytool -genkey -alias tomcat -keyalg RSA ++---+ + + You will be asked a series of questions in an interactive prompt. It will + create the keystore file, which will be named <<.keystore>> and located in the + <<>> user home directory. + + The password you enter for "keystore password" must match the value of the + <<>> environment variable set in the + <<>> script in the configuration directory. + + The answer to "What is your first and last name?" (i.e. "CN") must be the + hostname of the machine where the KMS will be running. + + NOTE: You need to restart the KMS for the configuration changes to take + effect. + +*** KMS Access Control + + KMS ACLs configuration are defined in the KMS <<>> + configuration file. This file is hot-reloaded when it changes. + + KMS supports a fine grained access control via a set ACL + configuration properties: + ++---+ + + hadoop.kms.acl.CREATE + * + + ACL for create-key operations. + If the user does is not in the GET ACL, the key material is not returned + as part of the response. + + + + + hadoop.kms.acl.DELETE + * + + ACL for delete-key operations. + + + + + hadoop.kms.acl.ROLLOVER + * + + ACL for rollover-key operations. + If the user does is not in the GET ACL, the key material is not returned + as part of the response. + + + + + hadoop.kms.acl.GET + * + + ACL for get-key-version and get-current-key operations. + + + + + hadoop.kms.acl.GET_KEYS + * + + ACL for get-keys operation. + + + + + hadoop.kms.acl.GET_METADATA + * + + ACL for get-key-metadata and get-keys-metadata operations. + + + + + hadoop.kms.acl.SET_KEY_MATERIAL + * + + Complimentary ACL for CREATE and ROLLOVER operation to allow the client + to provide the key material when creating or rolling a key. + + ++---+ + +** KMS HTTP REST API + +*** Create a Key + + + ++---+ +POST http://HOST:PORT/kms/v1/keys +Content-Type: application/json + +{ + "name" : "", + "cipher" : "", + "length" : , //int + "material" : "", //base64 + "description" : "" +} ++---+ + + + ++---+ +201 CREATED +LOCATION: http://HOST:PORT/kms/v1/key/ +Content-Type: application/json + +{ + "name" : "versionName", + "material" : "", //base64, not present without GET ACL +} ++---+ + +*** Rollover Key + + + ++---+ +POST http://HOST:PORT/kms/v1/key/ +Content-Type: application/json + +{ + "material" : "", +} ++---+ + + + ++---+ +200 OK +Content-Type: application/json + +{ + "name" : "versionName", + "material" : "", //base64, not present without GET ACL +} ++---+ + +*** Delete Key + + + ++---+ +DELETE http://HOST:PORT/kms/v1/key/ ++---+ + + + ++---+ +200 OK ++---+ + +*** Get Key Metadata + + + ++---+ +GET http://HOST:PORT/kms/v1/key//_metadata ++---+ + + + ++---+ +200 OK +Content-Type: application/json + +{ + "name" : "", + "cipher" : "", + "length" : , //int + "description" : "", + "created" : , //long + "versions" : //int +} ++---+ + +*** Get Current Key + + + ++---+ +GET http://HOST:PORT/kms/v1/key//_currentversion ++---+ + + + ++---+ +200 OK +Content-Type: application/json + +{ + "name" : "versionName", + "material" : "", //base64 +} ++---+ + +*** Get Key Version + + + ++---+ +GET http://HOST:PORT/kms/v1/keyversion/ ++---+ + + + ++---+ +200 OK +Content-Type: application/json + +{ + "name" : "versionName", + "material" : "", //base64 +} ++---+ + +*** Get Key Versions + + + ++---+ +GET http://HOST:PORT/kms/v1/key//_versions ++---+ + + + ++---+ +200 OK +Content-Type: application/json + +[ + { + "name" : "versionName", + "material" : "", //base64 + }, + { + "name" : "versionName", + "material" : "", //base64 + }, + ... +] ++---+ + +*** Get Key Names + + + ++---+ +GET http://HOST:PORT/kms/v1/keys/names ++---+ + + + ++---+ +200 OK +Content-Type: application/json + +[ + "", + "", + ... +] ++---+ + +*** Get Keys Metadata + ++---+ +GET http://HOST:PORT/kms/v1/keys/metadata?key=&key=,... ++---+ + + + ++---+ +200 OK +Content-Type: application/json + +[ + { + "name" : "", + "cipher" : "", + "length" : , //int + "description" : "", + "created" : , //long + "versions" : //int + }, + { + "name" : "", + "cipher" : "", + "length" : , //int + "description" : "", + "created" : , //long + "versions" : //int + }, + ... +] ++---+ + + \[ {{{./index.html}Go Back}} \] diff --git a/hadoop-common-project/hadoop-kms/src/site/resources/css/site.css b/hadoop-common-project/hadoop-kms/src/site/resources/css/site.css new file mode 100644 index 00000000000..7315db31e53 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/site/resources/css/site.css @@ -0,0 +1,29 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one or more +* contributor license agreements. See the NOTICE file distributed with +* this work for additional information regarding copyright ownership. +* The ASF licenses this file to You under the Apache License, Version 2.0 +* (the "License"); you may not use this file except in compliance with +* the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +#banner { + height: 93px; + background: none; +} + +#bannerLeft img { + margin-left: 30px; + margin-top: 10px; +} + +#bannerRight img { + margin: 17px; +} diff --git a/hadoop-common-project/hadoop-kms/src/site/site.xml b/hadoop-common-project/hadoop-kms/src/site/site.xml new file mode 100644 index 00000000000..4adfee108a3 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/site/site.xml @@ -0,0 +1,29 @@ + + + + + + org.apache.maven.skins + maven-stylus-skin + 1.2 + + + + + + + + + diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java new file mode 100644 index 00000000000..6e6523465ac --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java @@ -0,0 +1,806 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.crypto.key.KeyProvider; +import org.apache.hadoop.crypto.key.kms.KMSClientProvider; +import org.apache.hadoop.minikdc.MiniKdc; +import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mortbay.jetty.Connector; +import org.mortbay.jetty.Server; +import org.mortbay.jetty.security.SslSocketConnector; +import org.mortbay.jetty.webapp.WebAppContext; + +import javax.security.auth.Subject; +import javax.security.auth.kerberos.KerberosPrincipal; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.LoginContext; +import java.io.File; +import java.io.FileWriter; +import java.io.Writer; +import java.net.InetAddress; +import java.net.MalformedURLException; +import java.net.ServerSocket; +import java.net.URI; +import java.net.URL; +import java.security.Principal; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.Callable; + +public class TestKMS { + + public static File getTestDir() throws Exception { + File file = new File("dummy"); + file = file.getAbsoluteFile(); + file = file.getParentFile(); + file = new File(file, "target"); + file = new File(file, UUID.randomUUID().toString()); + if (!file.mkdirs()) { + throw new RuntimeException("Could not create test directory: " + file); + } + return file; + } + + public static Server createJettyServer(String keyStore, String password) { + try { + boolean ssl = keyStore != null; + InetAddress localhost = InetAddress.getByName("localhost"); + String host = "localhost"; + ServerSocket ss = new ServerSocket(0, 50, localhost); + int port = ss.getLocalPort(); + ss.close(); + Server server = new Server(0); + if (!ssl) { + server.getConnectors()[0].setHost(host); + server.getConnectors()[0].setPort(port); + } else { + SslSocketConnector c = new SslSocketConnector(); + c.setHost(host); + c.setPort(port); + c.setNeedClientAuth(false); + c.setKeystore(keyStore); + c.setKeystoreType("jks"); + c.setKeyPassword(password); + server.setConnectors(new Connector[]{c}); + } + return server; + } catch (Exception ex) { + throw new RuntimeException("Could not start embedded servlet container, " + + ex.getMessage(), ex); + } + } + + public static URL getJettyURL(Server server) { + boolean ssl = server.getConnectors()[0].getClass() + == SslSocketConnector.class; + try { + String scheme = (ssl) ? "https" : "http"; + return new URL(scheme + "://" + + server.getConnectors()[0].getHost() + ":" + + server.getConnectors()[0].getPort()); + } catch (MalformedURLException ex) { + throw new RuntimeException("It should never happen, " + ex.getMessage(), + ex); + } + } + + public static abstract class KMSCallable implements Callable { + private URL kmsUrl; + + protected URL getKMSUrl() { + return kmsUrl; + } + } + + protected void runServer(String keystore, String password, File confDir, + KMSCallable callable) throws Exception { + System.setProperty(KMSConfiguration.KMS_CONFIG_DIR, + confDir.getAbsolutePath()); + System.setProperty("log4j.configuration", "log4j.properties"); + Server jetty = createJettyServer(keystore, password); + try { + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + URL url = cl.getResource("webapp"); + if (url == null) { + throw new RuntimeException( + "Could not find webapp/ dir in test classpath"); + } + WebAppContext context = new WebAppContext(url.getPath(), "/kms"); + jetty.addHandler(context); + jetty.start(); + url = new URL(getJettyURL(jetty), "kms"); + System.out.println("Test KMS running at: " + url); + callable.kmsUrl = url; + callable.call(); + } finally { + if (jetty != null && jetty.isRunning()) { + try { + jetty.stop(); + } catch (Exception ex) { + throw new RuntimeException("Could not stop embedded Jetty, " + + ex.getMessage(), ex); + } + } + } + } + + protected Configuration createBaseKMSConf(File keyStoreDir) throws Exception { + Configuration conf = new Configuration(false); + conf.set("hadoop.security.key.provider.path", + "jceks://file@/" + keyStoreDir.getAbsolutePath() + "/kms.keystore"); + conf.set("hadoop.kms.authentication.type", "simple"); + return conf; + } + + protected void writeConf(File confDir, Configuration conf) throws Exception { + Writer writer = new FileWriter(new File(confDir, + KMSConfiguration.KMS_SITE_XML)); + conf.writeXml(writer); + writer.close(); + + writer = new FileWriter(new File(confDir, KMSConfiguration.KMS_ACLS_XML)); + conf.writeXml(writer); + writer.close(); + + //create empty core-site.xml + writer = new FileWriter(new File(confDir, "core-site.xml")); + new Configuration(false).writeXml(writer); + writer.close(); + } + + protected URI createKMSUri(URL kmsUrl) throws Exception { + String str = kmsUrl.toString(); + str = str.replaceFirst("://", "@"); + return new URI("kms://" + str); + } + + + private static class KerberosConfiguration + extends javax.security.auth.login.Configuration { + private String principal; + private String keytab; + private boolean isInitiator; + + private KerberosConfiguration(String principal, File keytab, + boolean client) { + this.principal = principal; + this.keytab = keytab.getAbsolutePath(); + this.isInitiator = client; + } + + public static javax.security.auth.login.Configuration createClientConfig( + String principal, + File keytab) { + return new KerberosConfiguration(principal, keytab, true); + } + + private static String getKrb5LoginModuleName() { + return System.getProperty("java.vendor").contains("IBM") + ? "com.ibm.security.auth.module.Krb5LoginModule" + : "com.sun.security.auth.module.Krb5LoginModule"; + } + + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(String name) { + Map options = new HashMap(); + options.put("keyTab", keytab); + options.put("principal", principal); + options.put("useKeyTab", "true"); + options.put("storeKey", "true"); + options.put("doNotPrompt", "true"); + options.put("useTicketCache", "true"); + options.put("renewTGT", "true"); + options.put("refreshKrb5Config", "true"); + options.put("isInitiator", Boolean.toString(isInitiator)); + String ticketCache = System.getenv("KRB5CCNAME"); + if (ticketCache != null) { + options.put("ticketCache", ticketCache); + } + options.put("debug", "true"); + + return new AppConfigurationEntry[]{ + new AppConfigurationEntry(getKrb5LoginModuleName(), + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, + options)}; + } + } + + private static MiniKdc kdc; + private static File keytab; + + @BeforeClass + public static void setUpMiniKdc() throws Exception { + File kdcDir = getTestDir(); + Properties kdcConf = MiniKdc.createConf(); + kdc = new MiniKdc(kdcConf, kdcDir); + kdc.start(); + keytab = new File(kdcDir, "keytab"); + List principals = new ArrayList(); + principals.add("HTTP/localhost"); + principals.add("client"); + principals.add("client/host"); + for (KMSACLs.Type type : KMSACLs.Type.values()) { + principals.add(type.toString()); + } + principals.add("CREATE_MATERIAL"); + principals.add("ROLLOVER_MATERIAL"); + kdc.createPrincipal(keytab, + principals.toArray(new String[principals.size()])); + } + + @AfterClass + public static void tearDownMiniKdc() throws Exception { + if (kdc != null) { + kdc.stop(); + } + } + + private void doAs(String user, final PrivilegedExceptionAction action) + throws Exception { + Set principals = new HashSet(); + principals.add(new KerberosPrincipal(user)); + + //client login + Subject subject = new Subject(false, principals, + new HashSet(), new HashSet()); + LoginContext loginContext = new LoginContext("", subject, null, + KerberosConfiguration.createClientConfig(user, keytab)); + try { + loginContext.login(); + subject = loginContext.getSubject(); + Subject.doAs(subject, action); + } finally { + loginContext.logout(); + } + } + + public void testStartStop(final boolean ssl, final boolean kerberos) + throws Exception { + File testDir = getTestDir(); + Configuration conf = createBaseKMSConf(testDir); + + final String keystore; + final String password; + if (ssl) { + String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestKMS.class); + KeyStoreTestUtil.setupSSLConfig(testDir.getAbsolutePath(), sslConfDir, + conf, false); + keystore = testDir.getAbsolutePath() + "/serverKS.jks"; + password = "serverP"; + } else { + keystore = null; + password = null; + } + + if (kerberos) { + conf.set("hadoop.kms.authentication.type", "kerberos"); + conf.set("hadoop.kms.authentication.kerberos.keytab", + keytab.getAbsolutePath()); + conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost"); + conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT"); + } + + writeConf(testDir, conf); + + runServer(keystore, password, testDir, new KMSCallable() { + @Override + public Void call() throws Exception { + Configuration conf = new Configuration(); + URL url = getKMSUrl(); + Assert.assertEquals(keystore != null, + url.getProtocol().equals("https")); + URI uri = createKMSUri(getKMSUrl()); + final KeyProvider kp = new KMSClientProvider(uri, conf); + + if (kerberos) { + for (String user : new String[]{"client", "client/host"}) { + doAs(user, new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + // getKeys() empty + Assert.assertTrue(kp.getKeys().isEmpty()); + return null; + } + }); + } + } else { + // getKeys() empty + Assert.assertTrue(kp.getKeys().isEmpty()); + } + return null; + } + }); + } + + @Test + public void testStartStopHttpPseudo() throws Exception { + testStartStop(false, false); + } + + @Test + public void testStartStopHttpsPseudo() throws Exception { + testStartStop(true, false); + } + + @Test + public void testStartStopHttpKerberos() throws Exception { + testStartStop(false, true); + } + + @Test + public void testStartStopHttpsKerberos() throws Exception { + testStartStop(true, true); + } + + @Test + public void testKMSProvider() throws Exception { + File confDir = getTestDir(); + Configuration conf = createBaseKMSConf(confDir); + writeConf(confDir, conf); + + runServer(null, null, confDir, new KMSCallable() { + @Override + public Void call() throws Exception { + Date started = new Date(); + Configuration conf = new Configuration(); + URI uri = createKMSUri(getKMSUrl()); + KeyProvider kp = new KMSClientProvider(uri, conf); + + // getKeys() empty + Assert.assertTrue(kp.getKeys().isEmpty()); + + // getKeysMetadata() empty + Assert.assertEquals(0, kp.getKeysMetadata().length); + + // createKey() + KeyProvider.Options options = new KeyProvider.Options(conf); + options.setCipher("AES/CTR/NoPadding"); + options.setBitLength(128); + options.setDescription("l1"); + KeyProvider.KeyVersion kv0 = kp.createKey("k1", options); + Assert.assertNotNull(kv0); + Assert.assertNotNull(kv0.getVersionName()); + Assert.assertNotNull(kv0.getMaterial()); + + // getKeyVersion() + KeyProvider.KeyVersion kv1 = kp.getKeyVersion(kv0.getVersionName()); + Assert.assertEquals(kv0.getVersionName(), kv1.getVersionName()); + Assert.assertNotNull(kv1.getMaterial()); + + // getCurrent() + KeyProvider.KeyVersion cv1 = kp.getCurrentKey("k1"); + Assert.assertEquals(kv0.getVersionName(), cv1.getVersionName()); + Assert.assertNotNull(cv1.getMaterial()); + + // getKeyMetadata() 1 version + KeyProvider.Metadata m1 = kp.getMetadata("k1"); + Assert.assertEquals("AES/CTR/NoPadding", m1.getCipher()); + Assert.assertEquals("AES", m1.getAlgorithm()); + Assert.assertEquals(128, m1.getBitLength()); + Assert.assertEquals(1, m1.getVersions()); + Assert.assertNotNull(m1.getCreated()); + Assert.assertTrue(started.before(m1.getCreated())); + + // getKeyVersions() 1 version + List lkv1 = kp.getKeyVersions("k1"); + Assert.assertEquals(1, lkv1.size()); + Assert.assertEquals(kv0.getVersionName(), lkv1.get(0).getVersionName()); + Assert.assertNotNull(kv1.getMaterial()); + + // rollNewVersion() + KeyProvider.KeyVersion kv2 = kp.rollNewVersion("k1"); + Assert.assertNotSame(kv0.getVersionName(), kv2.getVersionName()); + Assert.assertNotNull(kv2.getMaterial()); + + // getKeyVersion() + kv2 = kp.getKeyVersion(kv2.getVersionName()); + boolean eq = true; + for (int i = 0; i < kv1.getMaterial().length; i++) { + eq = eq && kv1.getMaterial()[i] == kv2.getMaterial()[i]; + } + Assert.assertFalse(eq); + + // getCurrent() + KeyProvider.KeyVersion cv2 = kp.getCurrentKey("k1"); + Assert.assertEquals(kv2.getVersionName(), cv2.getVersionName()); + Assert.assertNotNull(cv2.getMaterial()); + eq = true; + for (int i = 0; i < kv1.getMaterial().length; i++) { + eq = eq && cv2.getMaterial()[i] == kv2.getMaterial()[i]; + } + Assert.assertTrue(eq); + + // getKeyVersions() 2 versions + List lkv2 = kp.getKeyVersions("k1"); + Assert.assertEquals(2, lkv2.size()); + Assert.assertEquals(kv1.getVersionName(), lkv2.get(0).getVersionName()); + Assert.assertNotNull(lkv2.get(0).getMaterial()); + Assert.assertEquals(kv2.getVersionName(), lkv2.get(1).getVersionName()); + Assert.assertNotNull(lkv2.get(1).getMaterial()); + + // getKeyMetadata() 2 version + KeyProvider.Metadata m2 = kp.getMetadata("k1"); + Assert.assertEquals("AES/CTR/NoPadding", m2.getCipher()); + Assert.assertEquals("AES", m2.getAlgorithm()); + Assert.assertEquals(128, m2.getBitLength()); + Assert.assertEquals(2, m2.getVersions()); + Assert.assertNotNull(m2.getCreated()); + Assert.assertTrue(started.before(m2.getCreated())); + + // getKeys() 1 key + List ks1 = kp.getKeys(); + Assert.assertEquals(1, ks1.size()); + Assert.assertEquals("k1", ks1.get(0)); + + // getKeysMetadata() 1 key 2 versions + KeyProvider.Metadata[] kms1 = kp.getKeysMetadata("k1"); + Assert.assertEquals(1, kms1.length); + Assert.assertEquals("AES/CTR/NoPadding", kms1[0].getCipher()); + Assert.assertEquals("AES", kms1[0].getAlgorithm()); + Assert.assertEquals(128, kms1[0].getBitLength()); + Assert.assertEquals(2, kms1[0].getVersions()); + Assert.assertNotNull(kms1[0].getCreated()); + Assert.assertTrue(started.before(kms1[0].getCreated())); + + // deleteKey() + kp.deleteKey("k1"); + + // getKey() + Assert.assertNull(kp.getKeyVersion("k1")); + + // getKeyVersions() + Assert.assertNull(kp.getKeyVersions("k1")); + + // getMetadata() + Assert.assertNull(kp.getMetadata("k1")); + + // getKeys() empty + Assert.assertTrue(kp.getKeys().isEmpty()); + + // getKeysMetadata() empty + Assert.assertEquals(0, kp.getKeysMetadata().length); + + return null; + } + }); + } + + @Test + public void testACLs() throws Exception { + final File testDir = getTestDir(); + Configuration conf = createBaseKMSConf(testDir); + conf.set("hadoop.kms.authentication.type", "kerberos"); + conf.set("hadoop.kms.authentication.kerberos.keytab", + keytab.getAbsolutePath()); + conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost"); + conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT"); + + for (KMSACLs.Type type : KMSACLs.Type.values()) { + conf.set(type.getConfigKey(), type.toString()); + } + conf.set(KMSACLs.Type.CREATE.getConfigKey(), + KMSACLs.Type.CREATE.toString() + ",SET_KEY_MATERIAL"); + + conf.set(KMSACLs.Type.ROLLOVER.getConfigKey(), + KMSACLs.Type.ROLLOVER.toString() + ",SET_KEY_MATERIAL"); + + writeConf(testDir, conf); + + runServer(null, null, testDir, new KMSCallable() { + @Override + public Void call() throws Exception { + final Configuration conf = new Configuration(); + conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64); + URI uri = createKMSUri(getKMSUrl()); + final KeyProvider kp = new KMSClientProvider(uri, conf); + + //nothing allowed + doAs("client", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + kp.createKey("k", new KeyProvider.Options(conf)); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + try { + kp.createKey("k", new byte[8], new KeyProvider.Options(conf)); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + try { + kp.rollNewVersion("k"); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + try { + kp.rollNewVersion("k", new byte[8]); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + try { + kp.getKeys(); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + try { + kp.getKeysMetadata("k"); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + try { + kp.getKeyVersion(KMSClientProvider.buildVersionName("k", 0)); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + try { + kp.getCurrentKey("k"); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + try { + kp.getMetadata("k"); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + try { + kp.getKeyVersions("k"); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + + return null; + } + }); + + doAs("CREATE", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + KeyProvider.KeyVersion kv = kp.createKey("k0", + new KeyProvider.Options(conf)); + Assert.assertNull(kv.getMaterial()); + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + return null; + } + }); + + doAs("DELETE", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + kp.deleteKey("k0"); + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + return null; + } + }); + + doAs("SET_KEY_MATERIAL", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + KeyProvider.KeyVersion kv = kp.createKey("k1", new byte[8], + new KeyProvider.Options(conf)); + Assert.assertNull(kv.getMaterial()); + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + return null; + } + }); + + doAs("ROLLOVER", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + KeyProvider.KeyVersion kv = kp.rollNewVersion("k1"); + Assert.assertNull(kv.getMaterial()); + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + return null; + } + }); + + doAs("SET_KEY_MATERIAL", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + KeyProvider.KeyVersion kv = kp.rollNewVersion("k1", new byte[8]); + Assert.assertNull(kv.getMaterial()); + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + return null; + } + }); + + doAs("GET", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + kp.getKeyVersion("k1@0"); + kp.getCurrentKey("k1"); + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + return null; + } + }); + + doAs("GET_KEYS", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + kp.getKeys(); + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + return null; + } + }); + + doAs("GET_METADATA", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + kp.getMetadata("k1"); + kp.getKeysMetadata("k1"); + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + return null; + } + }); + + // test ACL reloading + Thread.sleep(10); // to ensure the ACLs file modifiedTime is newer + conf.set(KMSACLs.Type.CREATE.getConfigKey(), "foo"); + writeConf(testDir, conf); + + KMSWebApp.getACLs().run(); // forcing a reload by hand. + + // should not be able to create a key now + doAs("CREATE", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + KeyProvider.KeyVersion kv = kp.createKey("k2", + new KeyProvider.Options(conf)); + Assert.fail(); + } catch (AuthorizationException ex) { + //NOP + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + + return null; + } + }); + + return null; + } + }); + } + + @Test + public void testServicePrincipalACLs() throws Exception { + File testDir = getTestDir(); + Configuration conf = createBaseKMSConf(testDir); + conf.set("hadoop.kms.authentication.type", "kerberos"); + conf.set("hadoop.kms.authentication.kerberos.keytab", + keytab.getAbsolutePath()); + conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost"); + conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT"); + for (KMSACLs.Type type : KMSACLs.Type.values()) { + conf.set(type.getConfigKey(), " "); + } + conf.set(KMSACLs.Type.CREATE.getConfigKey(), "client"); + + writeConf(testDir, conf); + + runServer(null, null, testDir, new KMSCallable() { + @Override + public Void call() throws Exception { + final Configuration conf = new Configuration(); + conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64); + URI uri = createKMSUri(getKMSUrl()); + final KeyProvider kp = new KMSClientProvider(uri, conf); + + doAs("client", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + KeyProvider.KeyVersion kv = kp.createKey("ck0", + new KeyProvider.Options(conf)); + Assert.assertNull(kv.getMaterial()); + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + return null; + } + }); + + doAs("client/host", new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + try { + KeyProvider.KeyVersion kv = kp.createKey("ck1", + new KeyProvider.Options(conf)); + Assert.assertNull(kv.getMaterial()); + } catch (Exception ex) { + Assert.fail(ex.toString()); + } + return null; + } + }); + return null; + } + }); + } + +} diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSACLs.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSACLs.java new file mode 100644 index 00000000000..e65a1027ea7 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSACLs.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.conf.Configuration; +import org.junit.Assert; +import org.junit.Test; + +public class TestKMSACLs { + + @Test + public void testDefaults() { + KMSACLs acls = new KMSACLs(new Configuration(false)); + for (KMSACLs.Type type : KMSACLs.Type.values()) { + Assert.assertTrue(acls.hasAccess(type, "foo")); + } + } + + @Test + public void testCustom() { + Configuration conf = new Configuration(false); + for (KMSACLs.Type type : KMSACLs.Type.values()) { + conf.set(type.getConfigKey(), type.toString() + " "); + } + KMSACLs acls = new KMSACLs(conf); + for (KMSACLs.Type type : KMSACLs.Type.values()) { + Assert.assertTrue(acls.hasAccess(type, type.toString())); + Assert.assertFalse(acls.hasAccess(type, "foo")); + } + } + +} diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSCacheKeyProvider.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSCacheKeyProvider.java new file mode 100644 index 00000000000..110b0c95027 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSCacheKeyProvider.java @@ -0,0 +1,120 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import org.apache.hadoop.crypto.key.KeyProvider; +import org.apache.hadoop.crypto.key.kms.KMSClientProvider; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.util.Date; + +public class TestKMSCacheKeyProvider { + + @Test + public void testCurrentKey() throws Exception { + KeyProvider.KeyVersion mockKey = Mockito.mock(KeyProvider.KeyVersion.class); + KeyProvider mockProv = Mockito.mock(KeyProvider.class); + Mockito.when(mockProv.getCurrentKey(Mockito.eq("k1"))).thenReturn(mockKey); + Mockito.when(mockProv.getCurrentKey(Mockito.eq("k2"))).thenReturn(null); + KeyProvider cache = new KMSCacheKeyProvider(mockProv, 100); + + // asserting caching + Assert.assertEquals(mockKey, cache.getCurrentKey("k1")); + Mockito.verify(mockProv, Mockito.times(1)).getCurrentKey(Mockito.eq("k1")); + Assert.assertEquals(mockKey, cache.getCurrentKey("k1")); + Mockito.verify(mockProv, Mockito.times(1)).getCurrentKey(Mockito.eq("k1")); + Thread.sleep(1200); + Assert.assertEquals(mockKey, cache.getCurrentKey("k1")); + Mockito.verify(mockProv, Mockito.times(2)).getCurrentKey(Mockito.eq("k1")); + + // asserting no caching when key is not known + cache = new KMSCacheKeyProvider(mockProv, 100); + Assert.assertEquals(null, cache.getCurrentKey("k2")); + Mockito.verify(mockProv, Mockito.times(1)).getCurrentKey(Mockito.eq("k2")); + Assert.assertEquals(null, cache.getCurrentKey("k2")); + Mockito.verify(mockProv, Mockito.times(2)).getCurrentKey(Mockito.eq("k2")); + } + + @Test + public void testKeyVersion() throws Exception { + KeyProvider.KeyVersion mockKey = Mockito.mock(KeyProvider.KeyVersion.class); + KeyProvider mockProv = Mockito.mock(KeyProvider.class); + Mockito.when(mockProv.getKeyVersion(Mockito.eq("k1@0"))).thenReturn(mockKey); + Mockito.when(mockProv.getKeyVersion(Mockito.eq("k2@0"))).thenReturn(null); + KeyProvider cache = new KMSCacheKeyProvider(mockProv, 100); + + // asserting caching + Assert.assertEquals(mockKey, cache.getKeyVersion("k1@0")); + Mockito.verify(mockProv, Mockito.times(1)).getKeyVersion(Mockito.eq("k1@0")); + Assert.assertEquals(mockKey, cache.getKeyVersion("k1@0")); + Mockito.verify(mockProv, Mockito.times(1)).getKeyVersion(Mockito.eq("k1@0")); + Thread.sleep(200); + Assert.assertEquals(mockKey, cache.getKeyVersion("k1@0")); + Mockito.verify(mockProv, Mockito.times(2)).getKeyVersion(Mockito.eq("k1@0")); + + // asserting no caching when key is not known + cache = new KMSCacheKeyProvider(mockProv, 100); + Assert.assertEquals(null, cache.getKeyVersion("k2@0")); + Mockito.verify(mockProv, Mockito.times(1)).getKeyVersion(Mockito.eq("k2@0")); + Assert.assertEquals(null, cache.getKeyVersion("k2@0")); + Mockito.verify(mockProv, Mockito.times(2)).getKeyVersion(Mockito.eq("k2@0")); + } + + @Test + public void testRollNewVersion() throws Exception { + KeyProvider.KeyVersion mockKey = Mockito.mock(KeyProvider.KeyVersion.class); + KeyProvider mockProv = Mockito.mock(KeyProvider.class); + Mockito.when(mockProv.getCurrentKey(Mockito.eq("k1"))).thenReturn(mockKey); + KeyProvider cache = new KMSCacheKeyProvider(mockProv, 100); + Assert.assertEquals(mockKey, cache.getCurrentKey("k1")); + Mockito.verify(mockProv, Mockito.times(1)).getCurrentKey(Mockito.eq("k1")); + cache.rollNewVersion("k1"); + + // asserting the cache is purged + Assert.assertEquals(mockKey, cache.getCurrentKey("k1")); + Mockito.verify(mockProv, Mockito.times(2)).getCurrentKey(Mockito.eq("k1")); + cache.rollNewVersion("k1", new byte[0]); + Assert.assertEquals(mockKey, cache.getCurrentKey("k1")); + Mockito.verify(mockProv, Mockito.times(3)).getCurrentKey(Mockito.eq("k1")); + } + + @Test + public void testDeleteKey() throws Exception { + KeyProvider.KeyVersion mockKey = Mockito.mock(KeyProvider.KeyVersion.class); + KeyProvider mockProv = Mockito.mock(KeyProvider.class); + Mockito.when(mockProv.getCurrentKey(Mockito.eq("k1"))).thenReturn(mockKey); + Mockito.when(mockProv.getKeyVersion(Mockito.eq("k1@0"))).thenReturn(mockKey); + Mockito.when(mockProv.getMetadata(Mockito.eq("k1"))).thenReturn( + new KMSClientProvider.KMSMetadata("c", 0, "l", new Date(), 1)); + KeyProvider cache = new KMSCacheKeyProvider(mockProv, 100); + Assert.assertEquals(mockKey, cache.getCurrentKey("k1")); + Mockito.verify(mockProv, Mockito.times(1)).getCurrentKey(Mockito.eq("k1")); + Assert.assertEquals(mockKey, cache.getKeyVersion("k1@0")); + Mockito.verify(mockProv, Mockito.times(1)).getKeyVersion(Mockito.eq("k1@0")); + cache.deleteKey("k1"); + + // asserting the cache is purged + Assert.assertEquals(mockKey, cache.getCurrentKey("k1")); + Mockito.verify(mockProv, Mockito.times(2)).getCurrentKey(Mockito.eq("k1")); + Assert.assertEquals(mockKey, cache.getKeyVersion("k1@0")); + Mockito.verify(mockProv, Mockito.times(2)).getKeyVersion(Mockito.eq("k1@0")); + } + +} diff --git a/hadoop-common-project/hadoop-kms/src/test/resources/log4j.properties b/hadoop-common-project/hadoop-kms/src/test/resources/log4j.properties new file mode 100644 index 00000000000..5cd037a49c6 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/test/resources/log4j.properties @@ -0,0 +1,31 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# STDOUT Appender +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.Target=System.out +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %c{1} - %m%n + +log4j.rootLogger=WARN, stdout +log4j.logger.org.apache.hadoop.conf=ERROR +log4j.logger.org.apache.hadoop.crytpo.key.kms.server=ALL +log4j.logger.com.sun.jersey.server.wadl.generators.WadlGeneratorJAXBGrammarGenerator=OFF +log4j.logger.org.apache.hadoop.security=OFF +log4j.logger.org.apache.directory.server.core=OFF +log4j.logger.org.apache.hadoop.util.NativeCodeLoader=OFF \ No newline at end of file diff --git a/hadoop-common-project/pom.xml b/hadoop-common-project/pom.xml index cae09329290..ef49f9ce08b 100644 --- a/hadoop-common-project/pom.xml +++ b/hadoop-common-project/pom.xml @@ -37,6 +37,7 @@ hadoop-annotations hadoop-nfs hadoop-minikdc + hadoop-kms diff --git a/hadoop-dist/pom.xml b/hadoop-dist/pom.xml index 6a524fc4ba5..149f4048b00 100644 --- a/hadoop-dist/pom.xml +++ b/hadoop-dist/pom.xml @@ -118,6 +118,7 @@ run cp -r $ROOT/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-${project.version}/* . run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* . run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* . + run cp -r $ROOT/hadoop-common-project/hadoop-kms/target/hadoop-kms-${project.version}/* . run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-nfs/target/hadoop-hdfs-nfs-${project.version}/* . run cp -r $ROOT/hadoop-yarn-project/target/hadoop-yarn-project-${project.version}/* . run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* . diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 47ff38150df..d8bfaa22c8e 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -588,6 +588,11 @@ slf4j-log4j12 1.7.5 + + org.slf4j + jul-to-slf4j + 1.7.5 + org.eclipse.jdt core @@ -715,7 +720,7 @@ com.codahale.metrics metrics-core - 3.0.0 + 3.0.1 org.apache.hadoop @@ -760,6 +765,7 @@ leveldbjni-all 1.8 + diff --git a/hadoop-project/src/site/site.xml b/hadoop-project/src/site/site.xml index 87866f0a3d7..2a2e506ae72 100644 --- a/hadoop-project/src/site/site.xml +++ b/hadoop-project/src/site/site.xml @@ -62,6 +62,7 @@ +