HDDS-100. SCM CA: generate public/private key pair for SCM/OM/DNs. Contributed by Ajay Kumar.

This commit is contained in:
Xiaoyu Yao 2018-06-08 08:33:58 -07:00
parent e47135d9d9
commit faf53f8262
10 changed files with 933 additions and 2 deletions

View File

@ -81,7 +81,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<artifactId>rocksdbjni</artifactId> <artifactId>rocksdbjni</artifactId>
<version>5.14.2</version> <version>5.14.2</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
@ -110,6 +109,11 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<version>2.6.0</version> <version>2.6.0</version>
</dependency> </dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<version>1.49</version>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -112,4 +112,23 @@ public final class HddsConfigKeys {
public static final String HDDS_PROMETHEUS_ENABLED = public static final String HDDS_PROMETHEUS_ENABLED =
"hdds.prometheus.endpoint.enabled"; "hdds.prometheus.endpoint.enabled";
public static final String HDDS_KEY_LEN = "hdds.key.len";
public static final int HDDS_DEFAULT_KEY_LEN = 2048;
public static final String HDDS_KEY_ALGORITHM = "hdds.key.algo";
public static final String HDDS_DEFAULT_KEY_ALGORITHM = "RSA";
public static final String HDDS_SECURITY_PROVIDER = "hdds.security.provider";
public static final String HDDS_DEFAULT_SECURITY_PROVIDER = "BC";
public static final String HDDS_KEY_DIR_NAME = "hdds.key.dir.name";
public static final String HDDS_KEY_DIR_NAME_DEFAULT = "keys";
// TODO : Talk to StorageIO classes and see if they can return a secure
// storage location for each node.
public static final String HDDS_METADATA_DIR_NAME = "hdds.metadata.dir";
public static final String HDDS_PRIVATE_KEY_FILE_NAME =
"hdds.priv.key.file.name";
public static final String HDDS_PRIVATE_KEY_FILE_NAME_DEFAULT = "private.pem";
public static final String HDDS_PUBLIC_KEY_FILE_NAME = "hdds.public.key.file"
+ ".name";
public static final String HDDS_PUBLIC_KEY_FILE_NAME_DEFAULT = "public.pem";
} }

View File

@ -0,0 +1,99 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.hdds.security.x509;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
/** A class to generate Key Pair for use with Certificates. */
public class HDDSKeyGenerator {
private static final Logger LOG =
LoggerFactory.getLogger(HDDSKeyGenerator.class);
private final SecurityConfig securityConfig;
/**
* Constructor for HDDSKeyGenerator.
*
* @param configuration - config
*/
public HDDSKeyGenerator(Configuration configuration) {
this.securityConfig = new SecurityConfig(configuration);
}
/**
* Returns the Security config used for this object.
* @return SecurityConfig
*/
public SecurityConfig getSecurityConfig() {
return securityConfig;
}
/**
* Use Config to generate key.
*
* @return KeyPair
* @throws NoSuchProviderException
* @throws NoSuchAlgorithmException
*/
public KeyPair generateKey() throws NoSuchProviderException,
NoSuchAlgorithmException {
return generateKey(securityConfig.getSize(),
securityConfig.getAlgo(), securityConfig.getProvider());
}
/**
* Specify the size -- all other parameters are used from config.
*
* @param size - int, valid key sizes.
* @return KeyPair
* @throws NoSuchProviderException
* @throws NoSuchAlgorithmException
*/
public KeyPair generateKey(int size) throws
NoSuchProviderException, NoSuchAlgorithmException {
return generateKey(size,
securityConfig.getAlgo(), securityConfig.getProvider());
}
/**
* Custom Key Generation, all values are user provided.
*
* @param size - Key Size
* @param algorithm - Algorithm to use
* @param provider - Security provider.
* @return KeyPair.
* @throws NoSuchProviderException
* @throws NoSuchAlgorithmException
*/
public KeyPair generateKey(int size, String algorithm, String provider)
throws NoSuchProviderException, NoSuchAlgorithmException {
LOG.info("Generating key pair using size:{}, Algorithm:{}, Provider:{}",
size, algorithm, provider);
KeyPairGenerator generator = KeyPairGenerator
.getInstance(algorithm, provider);
generator.initialize(size);
return generator.generateKeyPair();
}
}

View File

@ -0,0 +1,254 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.hdds.security.x509;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.output.FileWriterWithEncoding;
import org.apache.hadoop.conf.Configuration;
import org.bouncycastle.util.io.pem.PemObject;
import org.bouncycastle.util.io.pem.PemWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.PosixFilePermission;
import java.security.KeyPair;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE;
import static java.nio.file.attribute.PosixFilePermission.OWNER_READ;
import static java.nio.file.attribute.PosixFilePermission.OWNER_WRITE;
/**
* We store all Key material in good old PEM files.
* This helps in avoiding dealing will persistent
* Java KeyStore issues. Also when debugging,
* general tools like OpenSSL can be used to read and
* decode these files.
*/
public class HDDSKeyPEMWriter {
private static final Logger LOG =
LoggerFactory.getLogger(HDDSKeyPEMWriter.class);
private final Path location;
private final SecurityConfig securityConfig;
private Set<PosixFilePermission> permissionSet =
Stream.of(OWNER_READ, OWNER_WRITE, OWNER_EXECUTE)
.collect(Collectors.toSet());
private Supplier<Boolean> isPosixFileSystem;
public final static String PRIVATE_KEY = "PRIVATE KEY";
public final static String PUBLIC_KEY = "PUBLIC KEY";
public static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
/*
Creates an HDDS Key Writer.
@param configuration - Configuration
*/
public HDDSKeyPEMWriter(Configuration configuration) throws IOException {
Preconditions.checkNotNull(configuration, "Config cannot be null");
this.securityConfig = new SecurityConfig(configuration);
isPosixFileSystem = HDDSKeyPEMWriter::isPosix;
this.location = securityConfig.getKeyLocation();
}
/**
* Checks if File System supports posix style security permissions.
*
* @return True if it supports posix.
*/
private static Boolean isPosix() {
return FileSystems.getDefault().supportedFileAttributeViews()
.contains("posix");
}
/**
* Returns the Permission set.
* @return Set
*/
@VisibleForTesting
public Set<PosixFilePermission> getPermissionSet() {
return permissionSet;
}
/**
* Returns the Security config used for this object.
* @return SecurityConfig
*/
public SecurityConfig getSecurityConfig() {
return securityConfig;
}
/**
* This function is used only for testing.
*
* @param isPosixFileSystem - Sets a boolean function for mimicking
* files systems that are not posix.
*/
@VisibleForTesting
public void setIsPosixFileSystem(Supplier<Boolean> isPosixFileSystem) {
this.isPosixFileSystem = isPosixFileSystem;
}
/**
* Writes a given key using the default config options.
*
* @param keyPair - Key Pair to write to file.
* @throws IOException
*/
public void writeKey(KeyPair keyPair) throws IOException {
writeKey(location, keyPair, securityConfig.getPrivateKeyName(),
securityConfig.getPublicKeyName(), false);
}
/**
* Writes a given key using default config options.
*
* @param keyPair - Key pair to write
* @param overwrite - Overwrites the keys if they already exist.
* @throws IOException
*/
public void writeKey(KeyPair keyPair, boolean overwrite) throws IOException {
writeKey(location, keyPair, securityConfig.getPrivateKeyName(),
securityConfig.getPublicKeyName(), overwrite);
}
/**
* Writes a given key using default config options.
*
* @param basePath - The location to write to, override the config values.
* @param keyPair - Key pair to write
* @param overwrite - Overwrites the keys if they already exist.
* @throws IOException
*/
public void writeKey(Path basePath, KeyPair keyPair, boolean overwrite)
throws IOException {
writeKey(basePath, keyPair, securityConfig.getPrivateKeyName(),
securityConfig.getPublicKeyName(), overwrite);
}
/**
* Helper function that actually writes data to the files.
*
* @param basePath - base path to write key
* @param keyPair - Key pair to write to file.
* @param privateKeyFileName - private key file name.
* @param publicKeyFileName - public key file name.
* @param force - forces overwriting the keys.
* @throws IOException
*/
private synchronized void writeKey(Path basePath, KeyPair keyPair,
String privateKeyFileName, String publicKeyFileName, boolean force)
throws IOException {
checkPreconditions(basePath);
File privateKeyFile =
Paths.get(location.toString(), privateKeyFileName).toFile();
File publicKeyFile =
Paths.get(location.toString(), publicKeyFileName).toFile();
checkKeyFile(privateKeyFile, force, publicKeyFile);
try (PemWriter privateKeyWriter = new PemWriter(new
FileWriterWithEncoding(privateKeyFile, DEFAULT_CHARSET))) {
privateKeyWriter.writeObject(
new PemObject(PRIVATE_KEY, keyPair.getPrivate().getEncoded()));
}
try (PemWriter publicKeyWriter = new PemWriter(new
FileWriterWithEncoding(publicKeyFile, DEFAULT_CHARSET))) {
publicKeyWriter.writeObject(
new PemObject(PUBLIC_KEY, keyPair.getPublic().getEncoded()));
}
Files.setPosixFilePermissions(privateKeyFile.toPath(), permissionSet);
Files.setPosixFilePermissions(publicKeyFile.toPath(), permissionSet);
}
/**
* Checks if private and public key file already exists. Throws IOException
* if file exists and force flag is set to false, else will delete the
* existing file.
*
* @param privateKeyFile - Private key file.
* @param force - forces overwriting the keys.
* @param publicKeyFile - public key file.
* @throws IOException
*/
private void checkKeyFile(File privateKeyFile, boolean force,
File publicKeyFile) throws IOException {
if (privateKeyFile.exists() && force) {
if (!privateKeyFile.delete()) {
throw new IOException("Unable to delete private key file.");
}
}
if (publicKeyFile.exists() && force) {
if (!publicKeyFile.delete()) {
throw new IOException("Unable to delete public key file.");
}
}
if (privateKeyFile.exists()) {
throw new IOException("Private Key file already exists.");
}
if (publicKeyFile.exists()) {
throw new IOException("Public Key file already exists.");
}
}
/**
* Checks if base path exists and sets file permissions.
*
* @param basePath - base path to write key
* @throws IOException
*/
private void checkPreconditions(Path basePath) throws IOException {
Preconditions.checkNotNull(basePath, "Base path cannot be null");
if (!isPosixFileSystem.get()) {
LOG.error("Keys cannot be stored securely without POSIX file system "
+ "support for now.");
throw new IOException("Unsupported File System for pem file.");
}
if (Files.exists(basePath)) {
// Not the end of the world if we reset the permissions on an existing
// directory.
Files.setPosixFilePermissions(basePath, permissionSet);
} else {
boolean success = basePath.toFile().mkdirs();
if (!success) {
LOG.error("Unable to create the directory for the "
+ "location. Location: {}", basePath);
throw new IOException("Unable to create the directory for the "
+ "location. Location:" + basePath);
}
Files.setPosixFilePermissions(basePath, permissionSet);
}
}
}

View File

@ -0,0 +1,190 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.hdds.security.x509;
import com.google.common.base.Preconditions;
import org.apache.hadoop.conf.Configuration;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.Provider;
import java.security.Security;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_KEY_LEN;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_KEY_ALGORITHM;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_SECURITY_PROVIDER;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_KEY_ALGORITHM;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_KEY_DIR_NAME;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_KEY_DIR_NAME_DEFAULT;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_KEY_LEN;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_METADATA_DIR_NAME;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PRIVATE_KEY_FILE_NAME;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PRIVATE_KEY_FILE_NAME_DEFAULT;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PUBLIC_KEY_FILE_NAME;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PUBLIC_KEY_FILE_NAME_DEFAULT;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_SECURITY_PROVIDER;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
/**
* A class that deals with all Security related configs in HDDDS.
* It is easier to have all Java code related to config in a single place.
*/
public class SecurityConfig {
private static final Logger LOG =
LoggerFactory.getLogger(SecurityConfig.class);
private static volatile Provider provider;
private final Configuration configuration;
private final int size;
private final String algo;
private final String providerString;
private final String metadatDir;
private final String keyDir;
private final String privateKeyName;
private final String publicKeyName;
/**
* Constructs a HDDSKeyGenerator.
*
* @param configuration - HDDS Configuration
*/
public SecurityConfig(Configuration configuration) {
Preconditions.checkNotNull(configuration, "Configuration cannot be null");
this.configuration = configuration;
this.size = this.configuration.getInt(HDDS_KEY_LEN, HDDS_DEFAULT_KEY_LEN);
this.algo = this.configuration.get(HDDS_KEY_ALGORITHM,
HDDS_DEFAULT_KEY_ALGORITHM);
this.providerString = this.configuration.get(HDDS_SECURITY_PROVIDER,
HDDS_DEFAULT_SECURITY_PROVIDER);
// Please Note: To make it easy for our customers we will attempt to read
// HDDS metadata dir and if that is not set, we will use Ozone directory.
// TODO: We might want to fix this later.
this.metadatDir = this.configuration.get(HDDS_METADATA_DIR_NAME,
configuration.get(OZONE_METADATA_DIRS));
Preconditions.checkNotNull(this.metadatDir, "Metadata directory can't be"
+ " null. Please check configs.");
this.keyDir = this.configuration.get(HDDS_KEY_DIR_NAME,
HDDS_KEY_DIR_NAME_DEFAULT);
this.privateKeyName = this.configuration.get(HDDS_PRIVATE_KEY_FILE_NAME,
HDDS_PRIVATE_KEY_FILE_NAME_DEFAULT);
this.publicKeyName = this.configuration.get(HDDS_PUBLIC_KEY_FILE_NAME,
HDDS_PUBLIC_KEY_FILE_NAME_DEFAULT);
// First Startup -- if the provider is null, check for the provider.
if (SecurityConfig.provider == null) {
synchronized (SecurityConfig.class) {
provider = Security.getProvider(this.providerString);
if (SecurityConfig.provider == null) {
// Provider not found, let us try to Dynamically initialize the
// provider.
provider = initSecurityProvider(this.providerString);
}
}
}
}
/**
* Returns the Provider name.
* @return String Provider name.
*/
public String getProviderString() {
return providerString;
}
/**
* Returns the public key file name.
* @return String, File name used for public keys.
*/
public String getPublicKeyName() {
return publicKeyName;
}
/**
* Returns the private key file name.
* @return String, File name used for private keys.
*/
public String getPrivateKeyName() {
return privateKeyName;
}
/**
* Returns the File path to where keys are stored.
* @return String Key location.
*/
public Path getKeyLocation() {
return Paths.get(metadatDir, keyDir);
}
/**
* Gets the Key Size.
*
* @return key size.
*/
public int getSize() {
return size;
}
/**
* Gets provider.
*
* @return String Provider name.
*/
public String getProvider() {
return providerString;
}
/**
* Returns the Key generation Algorithm used.
*
* @return String Algo.
*/
public String getAlgo() {
return algo;
}
/**
* Returns the Configuration used for initializing this SecurityConfig.
* @return Configuration
*/
public Configuration getConfiguration() {
return configuration;
}
/**
* Adds a security provider dynamically if it is not loaded already.
*
* @param providerName - name of the provider.
*/
private Provider initSecurityProvider(String providerName) {
switch (providerName) {
case "BC":
Security.addProvider(new BouncyCastleProvider());
return Security.getProvider(providerName);
default:
LOG.error("Security Provider:{} is unknown", provider);
throw new SecurityException("Unknown security provider:" + provider);
}
}
}

View File

@ -0,0 +1,25 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* This package contains common routines used in creating an x509 based
* identity framework for HDDS.
*/
package org.apache.hadoop.hdds.security.x509;

View File

@ -1633,4 +1633,44 @@
OzoneManager http server kerberos keytab. OzoneManager http server kerberos keytab.
</description> </description>
</property> </property>
<property>
<name>hdds.key.len</name>
<value>2048</value>
<tag>SCM, HDDS, X509, SECURITY</tag>
<description>
SCM CA key length. This is an algorithm-specific metric, such as modulus length, specified in number of bits.
</description>
</property>
<property>
<name>hdds.key.dir.name</name>
<value>keys</value>
<tag>SCM, HDDS, X509, SECURITY</tag>
<description>
Directory to store public/private key for SCM CA. This is relative to ozone/hdds meteadata dir.
</description>
</property>
<property>
<name>hdds.metadata.dir</name>
<value/>
<tag>X509, SECURITY</tag>
<description>
Absolute path to HDDS metadata dir.
</description>
</property>
<property>
<name>hdds.priv.key.file.name</name>
<value>private.pem</value>
<tag>X509, SECURITY</tag>
<description>
Name of file which stores private key generated for SCM CA.
</description>
</property>
<property>
<name>hdds.public.key.file.name</name>
<value>public.pem</value>
<tag>X509, SECURITY</tag>
<description>
Name of file which stores public key generated for SCM CA.
</description>
</property>
</configuration> </configuration>

View File

@ -0,0 +1,81 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.hdds.security.x509;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
import java.security.KeyPair;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.PublicKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.PKCS8EncodedKeySpec;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Test class for HDDS Key Generator.
*/
public class TestHDDSKeyGenerator {
private static SecurityConfig config;
@Before
public void init() {
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(OZONE_METADATA_DIRS, GenericTestUtils.getTempPath("testpath"));
config = new SecurityConfig(conf);
}
/**
* In this test we verify that we are able to create a key pair, then get
* bytes of that and use ASN1. parser to parse it back to a private key.
* @throws NoSuchProviderException
* @throws NoSuchAlgorithmException
*/
@Test
public void testGenerateKey()
throws NoSuchProviderException, NoSuchAlgorithmException {
HDDSKeyGenerator keyGen = new HDDSKeyGenerator(config.getConfiguration());
KeyPair keyPair = keyGen.generateKey();
Assert.assertEquals(config.getAlgo(), keyPair.getPrivate().getAlgorithm());
PKCS8EncodedKeySpec keySpec =
new PKCS8EncodedKeySpec(keyPair.getPrivate().getEncoded());
Assert.assertEquals("PKCS#8", keySpec.getFormat());
}
/**
* In this test we assert that size that we specified is used for Key
* generation.
* @throws NoSuchProviderException
* @throws NoSuchAlgorithmException
*/
@Test
public void testGenerateKeyWithSize() throws NoSuchProviderException,
NoSuchAlgorithmException {
HDDSKeyGenerator keyGen = new HDDSKeyGenerator(config.getConfiguration());
KeyPair keyPair = keyGen.generateKey(4096);
PublicKey publicKey = keyPair.getPublic();
if(publicKey instanceof RSAPublicKey) {
Assert.assertEquals(4096,
((RSAPublicKey)(publicKey)).getModulus().bitLength());
}
}
}

View File

@ -0,0 +1,213 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.hdds.security.x509;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_METADATA_DIR_NAME;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.PosixFilePermission;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.X509EncodedKeySpec;
import java.util.Set;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
/**
* Test class for HDDS pem writer.
*/
public class TestHDDSKeyPEMWriter {
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
private OzoneConfiguration configuration;
private HDDSKeyGenerator keyGenerator;
private String prefix;
@Before
public void init() throws IOException {
configuration = new OzoneConfiguration();
prefix = temporaryFolder.newFolder().toString();
configuration.set(HDDS_METADATA_DIR_NAME, prefix);
keyGenerator = new HDDSKeyGenerator(configuration);
}
/**
* Assert basic things like we are able to create a file, and the names are
* in expected format etc.
*
* @throws NoSuchProviderException
* @throws NoSuchAlgorithmException
* @throws IOException
*/
@Test
public void testWriteKey()
throws NoSuchProviderException, NoSuchAlgorithmException,
IOException, InvalidKeySpecException {
KeyPair keys = keyGenerator.generateKey();
HDDSKeyPEMWriter pemWriter = new HDDSKeyPEMWriter(configuration);
pemWriter.writeKey(keys);
// Assert that locations have been created.
Path keyLocation = pemWriter.getSecurityConfig().getKeyLocation();
Assert.assertTrue(keyLocation.toFile().exists());
// Assert that locations are created in the locations that we specified
// using the Config.
Assert.assertTrue(keyLocation.toString().startsWith(prefix));
Path privateKeyPath = Paths.get(keyLocation.toString(),
pemWriter.getSecurityConfig().getPrivateKeyName());
Assert.assertTrue(privateKeyPath.toFile().exists());
Path publicKeyPath = Paths.get(keyLocation.toString(),
pemWriter.getSecurityConfig().getPublicKeyName());
Assert.assertTrue(publicKeyPath.toFile().exists());
// Read the private key and test if the expected String in the PEM file
// format exists.
byte[] privateKey = Files.readAllBytes(privateKeyPath);
String privateKeydata = new String(privateKey, StandardCharsets.UTF_8);
Assert.assertTrue(privateKeydata.contains("PRIVATE KEY"));
// Read the public key and test if the expected String in the PEM file
// format exists.
byte[] publicKey = Files.readAllBytes(publicKeyPath);
String publicKeydata = new String(publicKey, StandardCharsets.UTF_8);
Assert.assertTrue(publicKeydata.contains("PUBLIC KEY"));
// Let us decode the PEM file and parse it back into binary.
KeyFactory kf = KeyFactory.getInstance(
pemWriter.getSecurityConfig().getAlgo());
// Replace the PEM Human readable guards.
privateKeydata =
privateKeydata.replace("-----BEGIN PRIVATE KEY-----\n", "");
privateKeydata =
privateKeydata.replace("-----END PRIVATE KEY-----", "");
// Decode the bas64 to binary format and then use an ASN.1 parser to
// parse the binary format.
byte[] keyBytes = Base64.decodeBase64(privateKeydata);
PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
PrivateKey privateKeyDecoded = kf.generatePrivate(spec);
Assert.assertNotNull("Private Key should not be null",
privateKeyDecoded);
// Let us decode the public key and veriy that we can parse it back into
// binary.
publicKeydata =
publicKeydata.replace("-----BEGIN PUBLIC KEY-----\n", "");
publicKeydata =
publicKeydata.replace("-----END PUBLIC KEY-----", "");
keyBytes = Base64.decodeBase64(publicKeydata);
X509EncodedKeySpec pubKeyspec = new X509EncodedKeySpec(keyBytes);
PublicKey publicKeyDecoded = kf.generatePublic(pubKeyspec);
Assert.assertNotNull("Public Key should not be null",
publicKeyDecoded);
// Now let us assert the permissions on the Directories and files are as
// expected.
Set<PosixFilePermission> expectedSet = pemWriter.getPermissionSet();
Set<PosixFilePermission> currentSet =
Files.getPosixFilePermissions(privateKeyPath);
currentSet.removeAll(expectedSet);
Assert.assertEquals(0, currentSet.size());
currentSet =
Files.getPosixFilePermissions(publicKeyPath);
currentSet.removeAll(expectedSet);
Assert.assertEquals(0, currentSet.size());
currentSet =
Files.getPosixFilePermissions(keyLocation);
currentSet.removeAll(expectedSet);
Assert.assertEquals(0, currentSet.size());
}
/**
* Assert key rewrite fails without force option.
*
* @throws IOException
*/
@Test
public void testReWriteKey()
throws Exception {
KeyPair kp = keyGenerator.generateKey();
HDDSKeyPEMWriter pemWriter = new HDDSKeyPEMWriter(configuration);
SecurityConfig secConfig = pemWriter.getSecurityConfig();
pemWriter.writeKey(kp);
// Assert that rewriting of keys throws exception with valid messages.
LambdaTestUtils
.intercept(IOException.class, "Private Key file already exists.",
() -> pemWriter.writeKey(kp));
FileUtils.deleteQuietly(Paths.get(
secConfig.getKeyLocation().toString() + "/" + secConfig
.getPrivateKeyName()).toFile());
LambdaTestUtils
.intercept(IOException.class, "Public Key file already exists.",
() -> pemWriter.writeKey(kp));
FileUtils.deleteQuietly(Paths.get(
secConfig.getKeyLocation().toString() + "/" + secConfig
.getPublicKeyName()).toFile());
// Should succeed now as both public and private key are deleted.
pemWriter.writeKey(kp);
// Should succeed with overwrite flag as true.
pemWriter.writeKey(kp, true);
}
/**
* Assert key rewrite fails in non Posix file system.
*
* @throws IOException
*/
@Test
public void testWriteKeyInNonPosixFS()
throws Exception {
KeyPair kp = keyGenerator.generateKey();
HDDSKeyPEMWriter pemWriter = new HDDSKeyPEMWriter(configuration);
pemWriter.setIsPosixFileSystem(() -> false);
// Assert key rewrite fails in non Posix file system.
LambdaTestUtils
.intercept(IOException.class, "Unsupported File System for pem file.",
() -> pemWriter.writeKey(kp));
}
}

View File

@ -38,5 +38,11 @@ public class TestOzoneConfigurationFields extends TestConfigurationFieldsBase {
errorIfMissingConfigProps = true; errorIfMissingConfigProps = true;
errorIfMissingXmlProps = true; errorIfMissingXmlProps = true;
xmlPropsToSkipCompare.add("hadoop.tags.custom"); xmlPropsToSkipCompare.add("hadoop.tags.custom");
addPropertiesNotInXml();
}
private void addPropertiesNotInXml() {
configurationPropsToSkipCompare.add(HddsConfigKeys.HDDS_KEY_ALGORITHM);
configurationPropsToSkipCompare.add(HddsConfigKeys.HDDS_SECURITY_PROVIDER);
} }
} }