HDDS-548. Create a Self-Signed Certificate. Contributed by Anu Engineer.
This commit is contained in:
parent
0b034b7005
commit
2d269440b0
|
@ -131,4 +131,20 @@ public final class HddsConfigKeys {
|
|||
public static final String HDDS_PUBLIC_KEY_FILE_NAME = "hdds.public.key.file"
|
||||
+ ".name";
|
||||
public static final String HDDS_PUBLIC_KEY_FILE_NAME_DEFAULT = "public.pem";
|
||||
|
||||
/**
|
||||
* Maximum duration of certificates issued by SCM including Self-Signed Roots.
|
||||
* The formats accepted are based on the ISO-8601 duration format PnDTnHnMn.nS
|
||||
* Default value is 5 years and written as P1865D.
|
||||
*/
|
||||
public static final String HDDS_X509_MAX_DURATION = "hdds.x509.max.duration";
|
||||
// Limit Certificate duration to a max value of 5 years.
|
||||
public static final String HDDS_X509_MAX_DURATION_DEFAULT= "P1865D";
|
||||
|
||||
public static final String HDDS_X509_SIGNATURE_ALGO =
|
||||
"hdds.x509.signature.algorithm";
|
||||
public static final String HDDS_X509_SIGNATURE_ALGO_DEFAULT = "SHA256withRSA";
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -29,9 +29,10 @@ import java.nio.file.Path;
|
|||
import java.nio.file.Paths;
|
||||
import java.security.Provider;
|
||||
import java.security.Security;
|
||||
import java.time.Duration;
|
||||
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_KEY_LEN;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_KEY_ALGORITHM;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_KEY_LEN;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_SECURITY_PROVIDER;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_KEY_ALGORITHM;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_KEY_DIR_NAME;
|
||||
|
@ -43,11 +44,17 @@ import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PRIVATE_KEY_FILE_NAME_D
|
|||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PUBLIC_KEY_FILE_NAME;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PUBLIC_KEY_FILE_NAME_DEFAULT;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_SECURITY_PROVIDER;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_MAX_DURATION;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_MAX_DURATION_DEFAULT;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_SIGNATURE_ALGO;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_SIGNATURE_ALGO_DEFAULT;
|
||||
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
|
||||
|
||||
/**
|
||||
* A class that deals with all Security related configs in HDDDS.
|
||||
* It is easier to have all Java code related to config in a single place.
|
||||
* A class that deals with all Security related configs in HDDS.
|
||||
*
|
||||
* This class allows security configs to be read and used consistently across
|
||||
* all of security related code base.
|
||||
*/
|
||||
public class SecurityConfig {
|
||||
private static final Logger LOG =
|
||||
|
@ -55,15 +62,17 @@ public class SecurityConfig {
|
|||
private static volatile Provider provider;
|
||||
private final Configuration configuration;
|
||||
private final int size;
|
||||
private final String algo;
|
||||
private final String keyAlgo;
|
||||
private final String providerString;
|
||||
private final String metadatDir;
|
||||
private final String keyDir;
|
||||
private final String privateKeyName;
|
||||
private final String publicKeyName;
|
||||
private final String privateKeyFileName;
|
||||
private final String publicKeyFileName;
|
||||
private final Duration certDuration;
|
||||
private final String x509SignatureAlgo;
|
||||
|
||||
/**
|
||||
* Constructs a HDDSKeyGenerator.
|
||||
* Constructs a SecurityConfig.
|
||||
*
|
||||
* @param configuration - HDDS Configuration
|
||||
*/
|
||||
|
@ -71,10 +80,10 @@ public class SecurityConfig {
|
|||
Preconditions.checkNotNull(configuration, "Configuration cannot be null");
|
||||
this.configuration = configuration;
|
||||
this.size = this.configuration.getInt(HDDS_KEY_LEN, HDDS_DEFAULT_KEY_LEN);
|
||||
this.algo = this.configuration.get(HDDS_KEY_ALGORITHM,
|
||||
this.keyAlgo = this.configuration.get(HDDS_KEY_ALGORITHM,
|
||||
HDDS_DEFAULT_KEY_ALGORITHM);
|
||||
this.providerString = this.configuration.get(HDDS_SECURITY_PROVIDER,
|
||||
HDDS_DEFAULT_SECURITY_PROVIDER);
|
||||
HDDS_DEFAULT_SECURITY_PROVIDER);
|
||||
|
||||
// Please Note: To make it easy for our customers we will attempt to read
|
||||
// HDDS metadata dir and if that is not set, we will use Ozone directory.
|
||||
|
@ -86,11 +95,17 @@ public class SecurityConfig {
|
|||
+ " null. Please check configs.");
|
||||
this.keyDir = this.configuration.get(HDDS_KEY_DIR_NAME,
|
||||
HDDS_KEY_DIR_NAME_DEFAULT);
|
||||
this.privateKeyName = this.configuration.get(HDDS_PRIVATE_KEY_FILE_NAME,
|
||||
this.privateKeyFileName = this.configuration.get(HDDS_PRIVATE_KEY_FILE_NAME,
|
||||
HDDS_PRIVATE_KEY_FILE_NAME_DEFAULT);
|
||||
this.publicKeyName = this.configuration.get(HDDS_PUBLIC_KEY_FILE_NAME,
|
||||
this.publicKeyFileName = this.configuration.get(HDDS_PUBLIC_KEY_FILE_NAME,
|
||||
HDDS_PUBLIC_KEY_FILE_NAME_DEFAULT);
|
||||
|
||||
String durationString = this.configuration.get(HDDS_X509_MAX_DURATION,
|
||||
HDDS_X509_MAX_DURATION_DEFAULT);
|
||||
this.certDuration = Duration.parse(durationString);
|
||||
this.x509SignatureAlgo = this.configuration.get(HDDS_X509_SIGNATURE_ALGO,
|
||||
HDDS_X509_SIGNATURE_ALGO_DEFAULT);
|
||||
|
||||
// First Startup -- if the provider is null, check for the provider.
|
||||
if (SecurityConfig.provider == null) {
|
||||
synchronized (SecurityConfig.class) {
|
||||
|
@ -105,39 +120,38 @@ public class SecurityConfig {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns the Provider name.
|
||||
* @return String Provider name.
|
||||
*/
|
||||
public String getProviderString() {
|
||||
return providerString;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the public key file name.
|
||||
* Returns the public key file name, This is used for storing the public
|
||||
* keys on disk.
|
||||
*
|
||||
* @return String, File name used for public keys.
|
||||
*/
|
||||
public String getPublicKeyName() {
|
||||
return publicKeyName;
|
||||
public String getPublicKeyFileName() {
|
||||
return publicKeyFileName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the private key file name.
|
||||
* Returns the private key file name.This is used for storing the private
|
||||
* keys on disk.
|
||||
*
|
||||
* @return String, File name used for private keys.
|
||||
*/
|
||||
public String getPrivateKeyName() {
|
||||
return privateKeyName;
|
||||
public String getPrivateKeyFileName() {
|
||||
return privateKeyFileName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the File path to where keys are stored.
|
||||
* @return String Key location.
|
||||
*
|
||||
* @return String Key location.
|
||||
*/
|
||||
public Path getKeyLocation() {
|
||||
return Paths.get(metadatDir, keyDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the Key Size.
|
||||
* Gets the Key Size, The default key size is 2048, since the default
|
||||
* algorithm used is RSA. User can change this by setting the "hdds.key
|
||||
* .len" in configuration.
|
||||
*
|
||||
* @return key size.
|
||||
*/
|
||||
|
@ -146,7 +160,8 @@ public class SecurityConfig {
|
|||
}
|
||||
|
||||
/**
|
||||
* Gets provider.
|
||||
* Returns the Provider name. SCM defaults to using Bouncy Castle and will
|
||||
* return "BC".
|
||||
*
|
||||
* @return String Provider name.
|
||||
*/
|
||||
|
@ -155,22 +170,48 @@ public class SecurityConfig {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns the Key generation Algorithm used.
|
||||
* Returns the Key generation Algorithm used. User can change this by
|
||||
* setting the "hdds.key.algo" in configuration.
|
||||
*
|
||||
* @return String Algo.
|
||||
*/
|
||||
public String getAlgo() {
|
||||
return algo;
|
||||
public String getKeyAlgo() {
|
||||
return keyAlgo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the X.509 Signature Algorithm used. This can be changed by setting
|
||||
* "hdds.x509.signature.algorithm" to the new name. The default algorithm
|
||||
* is SHA256withRSA.
|
||||
*
|
||||
* @return String
|
||||
*/
|
||||
public String getSignatureAlgo() {
|
||||
return x509SignatureAlgo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Configuration used for initializing this SecurityConfig.
|
||||
* @return Configuration
|
||||
*
|
||||
* @return Configuration
|
||||
*/
|
||||
public Configuration getConfiguration() {
|
||||
return configuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the maximum length a certificate can be valid in SCM. The
|
||||
* default value is 5 years. This can be changed by setting
|
||||
* "hdds.x509.max.duration" in configuration. The formats accepted are
|
||||
* based on the ISO-8601 duration format PnDTnHnMn.nS
|
||||
*
|
||||
* Default value is 5 years and written as P1865D.
|
||||
*
|
||||
* @return Duration.
|
||||
*/
|
||||
public Duration getMaxCertificateDuration() {
|
||||
return this.certDuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a security provider dynamically if it is not loaded already.
|
||||
|
|
|
@ -0,0 +1,212 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.security.x509.certificates;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdds.security.x509.SecurityConfig;
|
||||
import org.apache.hadoop.hdds.security.x509.exceptions.CertificateException;
|
||||
import org.apache.hadoop.hdds.security.x509.exceptions.SCMSecurityException;
|
||||
import org.apache.hadoop.util.Time;
|
||||
import org.apache.logging.log4j.util.Strings;
|
||||
import org.bouncycastle.asn1.x500.X500Name;
|
||||
import org.bouncycastle.asn1.x509.BasicConstraints;
|
||||
import org.bouncycastle.asn1.x509.Extension;
|
||||
import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo;
|
||||
import org.bouncycastle.cert.CertIOException;
|
||||
import org.bouncycastle.cert.X509CertificateHolder;
|
||||
import org.bouncycastle.cert.X509v3CertificateBuilder;
|
||||
import org.bouncycastle.operator.ContentSigner;
|
||||
import org.bouncycastle.operator.OperatorCreationException;
|
||||
import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.security.KeyPair;
|
||||
import java.time.Duration;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* A Self Signed Certificate with CA basic constraint can be used to boot-strap
|
||||
* a certificate infra-structure, if no external certificate is provided.
|
||||
*/
|
||||
public final class SelfSignedCertificate {
|
||||
private static final String NAME_FORMAT = "CN=%s,OU=%s,O=%s";
|
||||
private String subject;
|
||||
private String clusterID;
|
||||
private String scmID;
|
||||
private Date beginDate;
|
||||
private Date endDate;
|
||||
private KeyPair key;
|
||||
private SecurityConfig config;
|
||||
private boolean isCA;
|
||||
|
||||
/**
|
||||
* Private Ctor invoked only via Builder Interface.
|
||||
* @param subject - Subject
|
||||
* @param scmID - SCM ID
|
||||
* @param clusterID - Cluster ID
|
||||
* @param beginDate - NotBefore
|
||||
* @param endDate - Not After
|
||||
* @param configuration - SCM Config
|
||||
* @param keyPair - KeyPair
|
||||
* @param ca - isCA?
|
||||
*/
|
||||
private SelfSignedCertificate(String subject, String scmID, String clusterID,
|
||||
Date beginDate, Date endDate, SecurityConfig configuration,
|
||||
KeyPair keyPair, boolean ca) {
|
||||
this.subject = subject;
|
||||
this.clusterID = clusterID;
|
||||
this.scmID = scmID;
|
||||
this.beginDate = beginDate;
|
||||
this.endDate = endDate;
|
||||
config = configuration;
|
||||
this.key = keyPair;
|
||||
this.isCA = ca;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public static String getNameFormat() {
|
||||
return NAME_FORMAT;
|
||||
}
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
private X509CertificateHolder generateCertificate()
|
||||
throws OperatorCreationException, CertIOException {
|
||||
// For the Root Certificate we form the name from Subject, SCM ID and
|
||||
// Cluster ID.
|
||||
String dnName = String.format(getNameFormat(), subject, scmID, clusterID);
|
||||
X500Name name = new X500Name(dnName);
|
||||
byte[] encoded = key.getPublic().getEncoded();
|
||||
SubjectPublicKeyInfo publicKeyInfo =
|
||||
SubjectPublicKeyInfo.getInstance(encoded);
|
||||
|
||||
|
||||
ContentSigner contentSigner =
|
||||
new JcaContentSignerBuilder(
|
||||
config.getSignatureAlgo()).build(key.getPrivate());
|
||||
|
||||
// Please note: Since this is a root certificate we use "ONE" as the
|
||||
// serial number. Also note that skip enforcing locale or UTC. We are
|
||||
// trying to operate at the Days level, hence Time zone is also skipped for
|
||||
// now.
|
||||
BigInteger serial = BigInteger.ONE;
|
||||
if (!isCA) {
|
||||
serial = new BigInteger(Long.toString(Time.monotonicNow()));
|
||||
}
|
||||
|
||||
X509v3CertificateBuilder builder = new X509v3CertificateBuilder(name,
|
||||
serial, beginDate, endDate, name, publicKeyInfo);
|
||||
|
||||
if (isCA) {
|
||||
builder.addExtension(Extension.basicConstraints, true,
|
||||
new BasicConstraints(true));
|
||||
}
|
||||
return builder.build(contentSigner);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder class for Root Certificates.
|
||||
*/
|
||||
public static class Builder {
|
||||
private String subject;
|
||||
private String clusterID;
|
||||
private String scmID;
|
||||
private Date beginDate;
|
||||
private Date endDate;
|
||||
private KeyPair key;
|
||||
private SecurityConfig config;
|
||||
private boolean isCA;
|
||||
|
||||
public Builder setConfiguration(Configuration configuration) {
|
||||
this.config = new SecurityConfig(configuration);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setKey(KeyPair keyPair) {
|
||||
this.key = keyPair;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setSubject(String subjectString) {
|
||||
this.subject = subjectString;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setClusterID(String s) {
|
||||
this.clusterID = s;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setScmID(String s) {
|
||||
this.scmID = s;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setBeginDate(Date date) {
|
||||
this.beginDate = new Date(date.toInstant().toEpochMilli());
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setEndDate(Date date) {
|
||||
this.endDate = new Date(date.toInstant().toEpochMilli());
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder makeCA() {
|
||||
isCA = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public X509CertificateHolder build() throws SCMSecurityException {
|
||||
Preconditions.checkNotNull(key, "Key cannot be null");
|
||||
Preconditions.checkArgument(Strings.isNotBlank(subject), "Subject " +
|
||||
"cannot be blank");
|
||||
Preconditions.checkArgument(Strings.isNotBlank(clusterID), "Cluster ID " +
|
||||
"cannot be blank");
|
||||
Preconditions.checkArgument(Strings.isNotBlank(scmID), "SCM ID cannot " +
|
||||
"be blank");
|
||||
|
||||
Preconditions.checkArgument(beginDate.before(endDate), "Certificate " +
|
||||
"begin date should be before end date");
|
||||
|
||||
Duration certDuration = Duration.between(beginDate.toInstant(),
|
||||
endDate.toInstant());
|
||||
Preconditions.checkArgument(
|
||||
certDuration.compareTo(config.getMaxCertificateDuration()) < 0,
|
||||
"Certificate life time cannot be greater than max configured value.");
|
||||
|
||||
|
||||
SelfSignedCertificate rootCertificate =
|
||||
new SelfSignedCertificate(this.subject,
|
||||
this.scmID, this.clusterID, this.beginDate, this.endDate,
|
||||
this.config, key, isCA);
|
||||
try {
|
||||
return rootCertificate.generateCertificate();
|
||||
} catch (OperatorCreationException | CertIOException e) {
|
||||
throw new CertificateException("Unable to create root certificate.",
|
||||
e.getCause());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
/**
|
||||
* Utils for Certificates.
|
||||
*/
|
||||
package org.apache.hadoop.hdds.security.x509.certificates;
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.security.x509.exceptions;
|
||||
|
||||
/**
|
||||
* Certificate Exceptions from the SCM Security layer.
|
||||
*/
|
||||
public class CertificateException extends SCMSecurityException {
|
||||
|
||||
/**
|
||||
* Ctor.
|
||||
* @param message - Error Message.
|
||||
*/
|
||||
public CertificateException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ctor.
|
||||
* @param message - Message.
|
||||
* @param cause - Actual cause.
|
||||
*/
|
||||
public CertificateException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ctor.
|
||||
* @param cause - Base Exception.
|
||||
*/
|
||||
public CertificateException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ctor.
|
||||
* @param message - Error Message
|
||||
* @param cause - Cause
|
||||
* @param enableSuppression - Enable suppression.
|
||||
* @param writableStackTrace - Writable stack trace.
|
||||
*/
|
||||
public CertificateException(String message, Throwable cause,
|
||||
boolean enableSuppression, boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.security.x509.exceptions;
|
||||
|
||||
/**
|
||||
* Root Security Exception call for all Certificate related Execptions.
|
||||
*/
|
||||
public class SCMSecurityException extends Exception {
|
||||
|
||||
/**
|
||||
* Ctor.
|
||||
* @param message - Error Message.
|
||||
*/
|
||||
public SCMSecurityException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ctor.
|
||||
* @param message - Message.
|
||||
* @param cause - Actual cause.
|
||||
*/
|
||||
public SCMSecurityException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ctor.
|
||||
* @param cause - Base Exception.
|
||||
*/
|
||||
public SCMSecurityException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Ctor.
|
||||
* @param message - Error Message
|
||||
* @param cause - Cause
|
||||
* @param enableSuppression - Enable suppression.
|
||||
* @param writableStackTrace - Writable stack trace.
|
||||
*/
|
||||
public SCMSecurityException(String message, Throwable cause,
|
||||
boolean enableSuppression, boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* Exceptions thrown by X.509 security classes.
|
||||
*/
|
||||
package org.apache.hadoop.hdds.security.x509.exceptions;
|
|
@ -5,7 +5,7 @@
|
|||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
|
@ -16,9 +16,10 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.hdds.security.x509;
|
||||
package org.apache.hadoop.hdds.security.x509.keys;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdds.security.x509.SecurityConfig;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -54,13 +55,15 @@ public class HDDSKeyGenerator {
|
|||
* Use Config to generate key.
|
||||
*
|
||||
* @return KeyPair
|
||||
* @throws NoSuchProviderException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws NoSuchProviderException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
* @throws NoSuchAlgorithmException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
*/
|
||||
public KeyPair generateKey() throws NoSuchProviderException,
|
||||
NoSuchAlgorithmException {
|
||||
return generateKey(securityConfig.getSize(),
|
||||
securityConfig.getAlgo(), securityConfig.getProvider());
|
||||
securityConfig.getKeyAlgo(), securityConfig.getProvider());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -68,13 +71,15 @@ public class HDDSKeyGenerator {
|
|||
*
|
||||
* @param size - int, valid key sizes.
|
||||
* @return KeyPair
|
||||
* @throws NoSuchProviderException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws NoSuchProviderException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
* @throws NoSuchAlgorithmException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
*/
|
||||
public KeyPair generateKey(int size) throws
|
||||
NoSuchProviderException, NoSuchAlgorithmException {
|
||||
return generateKey(size,
|
||||
securityConfig.getAlgo(), securityConfig.getProvider());
|
||||
securityConfig.getKeyAlgo(), securityConfig.getProvider());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -84,8 +89,10 @@ public class HDDSKeyGenerator {
|
|||
* @param algorithm - Algorithm to use
|
||||
* @param provider - Security provider.
|
||||
* @return KeyPair.
|
||||
* @throws NoSuchProviderException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws NoSuchProviderException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
* @throws NoSuchAlgorithmException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
*/
|
||||
public KeyPair generateKey(int size, String algorithm, String provider)
|
||||
throws NoSuchProviderException, NoSuchAlgorithmException {
|
|
@ -5,7 +5,7 @@
|
|||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.hdds.security.x509;
|
||||
package org.apache.hadoop.hdds.security.x509.keys;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
|
@ -24,6 +24,7 @@ import java.nio.charset.Charset;
|
|||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.commons.io.output.FileWriterWithEncoding;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdds.security.x509.SecurityConfig;
|
||||
import org.bouncycastle.util.io.pem.PemObject;
|
||||
import org.bouncycastle.util.io.pem.PemWriter;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -119,11 +120,11 @@ public class HDDSKeyPEMWriter {
|
|||
* Writes a given key using the default config options.
|
||||
*
|
||||
* @param keyPair - Key Pair to write to file.
|
||||
* @throws IOException
|
||||
* @throws IOException - On I/O failure.
|
||||
*/
|
||||
public void writeKey(KeyPair keyPair) throws IOException {
|
||||
writeKey(location, keyPair, securityConfig.getPrivateKeyName(),
|
||||
securityConfig.getPublicKeyName(), false);
|
||||
writeKey(location, keyPair, securityConfig.getPrivateKeyFileName(),
|
||||
securityConfig.getPublicKeyFileName(), false);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -131,11 +132,11 @@ public class HDDSKeyPEMWriter {
|
|||
*
|
||||
* @param keyPair - Key pair to write
|
||||
* @param overwrite - Overwrites the keys if they already exist.
|
||||
* @throws IOException
|
||||
* @throws IOException - On I/O failure.
|
||||
*/
|
||||
public void writeKey(KeyPair keyPair, boolean overwrite) throws IOException {
|
||||
writeKey(location, keyPair, securityConfig.getPrivateKeyName(),
|
||||
securityConfig.getPublicKeyName(), overwrite);
|
||||
writeKey(location, keyPair, securityConfig.getPrivateKeyFileName(),
|
||||
securityConfig.getPublicKeyFileName(), overwrite);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -144,12 +145,12 @@ public class HDDSKeyPEMWriter {
|
|||
* @param basePath - The location to write to, override the config values.
|
||||
* @param keyPair - Key pair to write
|
||||
* @param overwrite - Overwrites the keys if they already exist.
|
||||
* @throws IOException
|
||||
* @throws IOException - On I/O failure.
|
||||
*/
|
||||
public void writeKey(Path basePath, KeyPair keyPair, boolean overwrite)
|
||||
throws IOException {
|
||||
writeKey(basePath, keyPair, securityConfig.getPrivateKeyName(),
|
||||
securityConfig.getPublicKeyName(), overwrite);
|
||||
writeKey(basePath, keyPair, securityConfig.getPrivateKeyFileName(),
|
||||
securityConfig.getPublicKeyFileName(), overwrite);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -160,7 +161,7 @@ public class HDDSKeyPEMWriter {
|
|||
* @param privateKeyFileName - private key file name.
|
||||
* @param publicKeyFileName - public key file name.
|
||||
* @param force - forces overwriting the keys.
|
||||
* @throws IOException
|
||||
* @throws IOException - On I/O failure.
|
||||
*/
|
||||
private synchronized void writeKey(Path basePath, KeyPair keyPair,
|
||||
String privateKeyFileName, String publicKeyFileName, boolean force)
|
||||
|
@ -196,7 +197,7 @@ public class HDDSKeyPEMWriter {
|
|||
* @param privateKeyFile - Private key file.
|
||||
* @param force - forces overwriting the keys.
|
||||
* @param publicKeyFile - public key file.
|
||||
* @throws IOException
|
||||
* @throws IOException - On I/O failure.
|
||||
*/
|
||||
private void checkKeyFile(File privateKeyFile, boolean force,
|
||||
File publicKeyFile) throws IOException {
|
||||
|
@ -225,7 +226,7 @@ public class HDDSKeyPEMWriter {
|
|||
* Checks if base path exists and sets file permissions.
|
||||
*
|
||||
* @param basePath - base path to write key
|
||||
* @throws IOException
|
||||
* @throws IOException - On I/O failure.
|
||||
*/
|
||||
private void checkPreconditions(Path basePath) throws IOException {
|
||||
Preconditions.checkNotNull(basePath, "Base path cannot be null");
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* Utils for private and public keys.
|
||||
*/
|
||||
package org.apache.hadoop.hdds.security.x509.keys;
|
|
@ -0,0 +1,258 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.security.x509.certificates;
|
||||
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import org.apache.hadoop.hdds.security.x509.SecurityConfig;
|
||||
import org.apache.hadoop.hdds.security.x509.exceptions.SCMSecurityException;
|
||||
import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator;
|
||||
import org.bouncycastle.asn1.x509.Extension;
|
||||
import org.bouncycastle.cert.X509CertificateHolder;
|
||||
import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.KeyPair;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.NoSuchProviderException;
|
||||
import java.security.SignatureException;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
|
||||
|
||||
/**
|
||||
* Test Class for Root Certificate generation.
|
||||
*/
|
||||
public class TestRootCertificate {
|
||||
private SecurityConfig securityConfig;
|
||||
private static OzoneConfiguration conf = new OzoneConfiguration();
|
||||
@Rule
|
||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
@Before
|
||||
public void init() throws IOException {
|
||||
conf.set(OZONE_METADATA_DIRS, temporaryFolder.newFolder().toString());
|
||||
securityConfig = new SecurityConfig(conf);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAllFieldsAreExpected()
|
||||
throws SCMSecurityException, NoSuchProviderException,
|
||||
NoSuchAlgorithmException, CertificateException,
|
||||
SignatureException, InvalidKeyException {
|
||||
Instant now = Instant.now();
|
||||
Date notBefore = Date.from(now);
|
||||
Date notAfter = Date.from(now.plus(Duration.ofDays(365)));
|
||||
String clusterID = UUID.randomUUID().toString();
|
||||
String scmID = UUID.randomUUID().toString();
|
||||
String subject = "testRootCert";
|
||||
HDDSKeyGenerator keyGen =
|
||||
new HDDSKeyGenerator(securityConfig.getConfiguration());
|
||||
KeyPair keyPair = keyGen.generateKey();
|
||||
|
||||
SelfSignedCertificate.Builder builder =
|
||||
SelfSignedCertificate.newBuilder()
|
||||
.setBeginDate(notBefore)
|
||||
.setEndDate(notAfter)
|
||||
.setClusterID(clusterID)
|
||||
.setScmID(scmID)
|
||||
.setSubject(subject)
|
||||
.setKey(keyPair)
|
||||
.setConfiguration(conf);
|
||||
|
||||
X509CertificateHolder certificateHolder = builder.build();
|
||||
|
||||
//Assert that we indeed have a self signed certificate.
|
||||
Assert.assertEquals(certificateHolder.getIssuer(),
|
||||
certificateHolder.getSubject());
|
||||
|
||||
|
||||
// Make sure that NotBefore is before the current Date
|
||||
Date invalidDate = Date.from(now.minus(Duration.ofDays(1)));
|
||||
Assert.assertFalse(
|
||||
certificateHolder.getNotBefore()
|
||||
.before(invalidDate));
|
||||
|
||||
//Make sure the end date is honored.
|
||||
invalidDate = Date.from(now.plus(Duration.ofDays(366)));
|
||||
Assert.assertFalse(
|
||||
certificateHolder.getNotAfter()
|
||||
.after(invalidDate));
|
||||
|
||||
// Check the Subject Name and Issuer Name is in the expected format.
|
||||
String dnName = String.format(SelfSignedCertificate.getNameFormat(),
|
||||
subject, scmID, clusterID);
|
||||
Assert.assertEquals(certificateHolder.getIssuer().toString(), dnName);
|
||||
Assert.assertEquals(certificateHolder.getSubject().toString(), dnName);
|
||||
|
||||
// We did not ask for this Certificate to be a CA certificate, hence that
|
||||
// extension should be null.
|
||||
Assert.assertNull(
|
||||
certificateHolder.getExtension(Extension.basicConstraints));
|
||||
|
||||
// Extract the Certificate and verify that certificate matches the public
|
||||
// key.
|
||||
X509Certificate cert =
|
||||
new JcaX509CertificateConverter().getCertificate(certificateHolder);
|
||||
cert.verify(keyPair.getPublic());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCACert()
|
||||
throws SCMSecurityException, NoSuchProviderException,
|
||||
NoSuchAlgorithmException {
|
||||
Instant now = Instant.now();
|
||||
Date notBefore = Date.from(now);
|
||||
Date notAfter = Date.from(now.plus(Duration.ofDays(365)));
|
||||
String clusterID = UUID.randomUUID().toString();
|
||||
String scmID = UUID.randomUUID().toString();
|
||||
String subject = "testRootCert";
|
||||
HDDSKeyGenerator keyGen =
|
||||
new HDDSKeyGenerator(securityConfig.getConfiguration());
|
||||
KeyPair keyPair = keyGen.generateKey();
|
||||
|
||||
SelfSignedCertificate.Builder builder =
|
||||
SelfSignedCertificate.newBuilder()
|
||||
.setBeginDate(notBefore)
|
||||
.setEndDate(notAfter)
|
||||
.setClusterID(clusterID)
|
||||
.setScmID(scmID)
|
||||
.setSubject(subject)
|
||||
.setKey(keyPair)
|
||||
.setConfiguration(conf)
|
||||
.makeCA();
|
||||
|
||||
X509CertificateHolder certificateHolder = builder.build();
|
||||
// This time we asked for a CA Certificate, make sure that extension is
|
||||
// present and valid.
|
||||
Extension basicExt =
|
||||
certificateHolder.getExtension(Extension.basicConstraints);
|
||||
|
||||
Assert.assertNotNull(basicExt);
|
||||
Assert.assertTrue(basicExt.isCritical());
|
||||
|
||||
// Since this code assigns ONE for the root certificate, we check if the
|
||||
// serial number is the expected number.
|
||||
Assert.assertEquals(certificateHolder.getSerialNumber(), BigInteger.ONE);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalidParamFails()
|
||||
throws SCMSecurityException, NoSuchProviderException,
|
||||
NoSuchAlgorithmException {
|
||||
Instant now = Instant.now();
|
||||
Date notBefore = Date.from(now);
|
||||
Date notAfter = Date.from(now.plus(Duration.ofDays(365)));
|
||||
String clusterID = UUID.randomUUID().toString();
|
||||
String scmID = UUID.randomUUID().toString();
|
||||
String subject = "testRootCert";
|
||||
HDDSKeyGenerator keyGen =
|
||||
new HDDSKeyGenerator(securityConfig.getConfiguration());
|
||||
KeyPair keyPair = keyGen.generateKey();
|
||||
|
||||
SelfSignedCertificate.Builder builder =
|
||||
SelfSignedCertificate.newBuilder()
|
||||
.setBeginDate(notBefore)
|
||||
.setEndDate(notAfter)
|
||||
.setClusterID(clusterID)
|
||||
.setScmID(scmID)
|
||||
.setSubject(subject)
|
||||
.setConfiguration(conf)
|
||||
.setKey(keyPair)
|
||||
.makeCA();
|
||||
try {
|
||||
builder.setKey(null);
|
||||
builder.build();
|
||||
Assert.fail("Null Key should have failed.");
|
||||
} catch (NullPointerException | IllegalArgumentException e) {
|
||||
builder.setKey(keyPair);
|
||||
}
|
||||
|
||||
// Now try with Blank Subject.
|
||||
try {
|
||||
builder.setSubject("");
|
||||
builder.build();
|
||||
Assert.fail("Null/Blank Subject should have thrown.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
builder.setSubject(subject);
|
||||
}
|
||||
|
||||
// Now try with blank/null SCM ID
|
||||
try {
|
||||
builder.setScmID(null);
|
||||
builder.build();
|
||||
Assert.fail("Null/Blank SCM ID should have thrown.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
builder.setScmID(scmID);
|
||||
}
|
||||
|
||||
|
||||
// Now try with blank/null SCM ID
|
||||
try {
|
||||
builder.setClusterID(null);
|
||||
builder.build();
|
||||
Assert.fail("Null/Blank Cluster ID should have thrown.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
builder.setClusterID(clusterID);
|
||||
}
|
||||
|
||||
|
||||
// Swap the Begin and End Date and verify that we cannot create a
|
||||
// certificate like that.
|
||||
try {
|
||||
builder.setBeginDate(notAfter);
|
||||
builder.setEndDate(notBefore);
|
||||
builder.build();
|
||||
Assert.fail("Illegal dates should have thrown.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
builder.setBeginDate(notBefore);
|
||||
builder.setEndDate(notAfter);
|
||||
}
|
||||
|
||||
try {
|
||||
KeyPair newKey = keyGen.generateKey();
|
||||
KeyPair wrongKey = new KeyPair(newKey.getPublic(), keyPair.getPrivate());
|
||||
builder.setKey(wrongKey);
|
||||
X509CertificateHolder certificateHolder = builder.build();
|
||||
X509Certificate cert =
|
||||
new JcaX509CertificateConverter().getCertificate(certificateHolder);
|
||||
cert.verify(wrongKey.getPublic());
|
||||
Assert.fail("Invalid Key, should have thrown.");
|
||||
} catch (SCMSecurityException | CertificateException
|
||||
| SignatureException | InvalidKeyException e) {
|
||||
builder.setKey(keyPair);
|
||||
}
|
||||
// Assert that we can create a certificate with all sane params.
|
||||
Assert.assertNotNull(builder.build());
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
/**
|
||||
* Test classes for Certificate utilities.
|
||||
*/
|
||||
package org.apache.hadoop.hdds.security.x509.certificates;
|
|
@ -5,7 +5,7 @@
|
|||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.security.x509;
|
||||
package org.apache.hadoop.hdds.security.x509.keys;
|
||||
|
||||
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
|
||||
import java.security.KeyPair;
|
||||
|
@ -27,6 +27,7 @@ import java.security.PublicKey;
|
|||
import java.security.interfaces.RSAPublicKey;
|
||||
import java.security.spec.PKCS8EncodedKeySpec;
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import org.apache.hadoop.hdds.security.x509.SecurityConfig;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
|
@ -36,7 +37,7 @@ import org.junit.Test;
|
|||
* Test class for HDDS Key Generator.
|
||||
*/
|
||||
public class TestHDDSKeyGenerator {
|
||||
private static SecurityConfig config;
|
||||
private SecurityConfig config;
|
||||
|
||||
@Before
|
||||
public void init() {
|
||||
|
@ -47,15 +48,18 @@ public class TestHDDSKeyGenerator {
|
|||
/**
|
||||
* In this test we verify that we are able to create a key pair, then get
|
||||
* bytes of that and use ASN1. parser to parse it back to a private key.
|
||||
* @throws NoSuchProviderException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws NoSuchProviderException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
* @throws NoSuchAlgorithmException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
*/
|
||||
@Test
|
||||
public void testGenerateKey()
|
||||
throws NoSuchProviderException, NoSuchAlgorithmException {
|
||||
HDDSKeyGenerator keyGen = new HDDSKeyGenerator(config.getConfiguration());
|
||||
KeyPair keyPair = keyGen.generateKey();
|
||||
Assert.assertEquals(config.getAlgo(), keyPair.getPrivate().getAlgorithm());
|
||||
Assert.assertEquals(config.getKeyAlgo(),
|
||||
keyPair.getPrivate().getAlgorithm());
|
||||
PKCS8EncodedKeySpec keySpec =
|
||||
new PKCS8EncodedKeySpec(keyPair.getPrivate().getEncoded());
|
||||
Assert.assertEquals("PKCS#8", keySpec.getFormat());
|
||||
|
@ -64,8 +68,10 @@ public class TestHDDSKeyGenerator {
|
|||
/**
|
||||
* In this test we assert that size that we specified is used for Key
|
||||
* generation.
|
||||
* @throws NoSuchProviderException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws NoSuchProviderException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
* @throws NoSuchAlgorithmException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
*/
|
||||
@Test
|
||||
public void testGenerateKeyWithSize() throws NoSuchProviderException,
|
|
@ -5,7 +5,7 @@
|
|||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.security.x509;
|
||||
package org.apache.hadoop.hdds.security.x509.keys;
|
||||
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_METADATA_DIR_NAME;
|
||||
|
||||
|
@ -40,6 +40,7 @@ import java.util.Set;
|
|||
import org.apache.commons.codec.binary.Base64;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import org.apache.hadoop.hdds.security.x509.SecurityConfig;
|
||||
import org.apache.hadoop.test.LambdaTestUtils;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
|
@ -70,9 +71,11 @@ public class TestHDDSKeyPEMWriter {
|
|||
* Assert basic things like we are able to create a file, and the names are
|
||||
* in expected format etc.
|
||||
*
|
||||
* @throws NoSuchProviderException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws IOException
|
||||
* @throws NoSuchProviderException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
* @throws NoSuchAlgorithmException - On Error, due to missing Java
|
||||
* dependencies.
|
||||
* @throws IOException - On I/O failure.
|
||||
*/
|
||||
@Test
|
||||
public void testWriteKey()
|
||||
|
@ -90,10 +93,10 @@ public class TestHDDSKeyPEMWriter {
|
|||
// using the Config.
|
||||
Assert.assertTrue(keyLocation.toString().startsWith(prefix));
|
||||
Path privateKeyPath = Paths.get(keyLocation.toString(),
|
||||
pemWriter.getSecurityConfig().getPrivateKeyName());
|
||||
pemWriter.getSecurityConfig().getPrivateKeyFileName());
|
||||
Assert.assertTrue(privateKeyPath.toFile().exists());
|
||||
Path publicKeyPath = Paths.get(keyLocation.toString(),
|
||||
pemWriter.getSecurityConfig().getPublicKeyName());
|
||||
pemWriter.getSecurityConfig().getPublicKeyFileName());
|
||||
Assert.assertTrue(publicKeyPath.toFile().exists());
|
||||
|
||||
// Read the private key and test if the expected String in the PEM file
|
||||
|
@ -110,7 +113,7 @@ public class TestHDDSKeyPEMWriter {
|
|||
|
||||
// Let us decode the PEM file and parse it back into binary.
|
||||
KeyFactory kf = KeyFactory.getInstance(
|
||||
pemWriter.getSecurityConfig().getAlgo());
|
||||
pemWriter.getSecurityConfig().getKeyAlgo());
|
||||
|
||||
// Replace the PEM Human readable guards.
|
||||
privateKeydata =
|
||||
|
@ -162,7 +165,7 @@ public class TestHDDSKeyPEMWriter {
|
|||
/**
|
||||
* Assert key rewrite fails without force option.
|
||||
*
|
||||
* @throws IOException
|
||||
* @throws IOException - on I/O failure.
|
||||
*/
|
||||
@Test
|
||||
public void testReWriteKey()
|
||||
|
@ -178,13 +181,13 @@ public class TestHDDSKeyPEMWriter {
|
|||
() -> pemWriter.writeKey(kp));
|
||||
FileUtils.deleteQuietly(Paths.get(
|
||||
secConfig.getKeyLocation().toString() + "/" + secConfig
|
||||
.getPrivateKeyName()).toFile());
|
||||
.getPrivateKeyFileName()).toFile());
|
||||
LambdaTestUtils
|
||||
.intercept(IOException.class, "Public Key file already exists.",
|
||||
() -> pemWriter.writeKey(kp));
|
||||
FileUtils.deleteQuietly(Paths.get(
|
||||
secConfig.getKeyLocation().toString() + "/" + secConfig
|
||||
.getPublicKeyName()).toFile());
|
||||
.getPublicKeyFileName()).toFile());
|
||||
|
||||
// Should succeed now as both public and private key are deleted.
|
||||
pemWriter.writeKey(kp);
|
||||
|
@ -196,7 +199,7 @@ public class TestHDDSKeyPEMWriter {
|
|||
/**
|
||||
* Assert key rewrite fails in non Posix file system.
|
||||
*
|
||||
* @throws IOException
|
||||
* @throws IOException - on I/O failure.
|
||||
*/
|
||||
@Test
|
||||
public void testWriteKeyInNonPosixFS()
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
/**
|
||||
* Test package for keys used in X.509 env.
|
||||
*/
|
||||
package org.apache.hadoop.hdds.security.x509.keys;
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
/**
|
||||
* X.509 Certificate and keys related tests.
|
||||
*/
|
||||
package org.apache.hadoop.hdds.security.x509;
|
Loading…
Reference in New Issue