HDDS-1101. SCM CA: Write Certificate information to SCM Metadata. Contributed by Anu Engineer.
This commit is contained in:
parent
1e0ae6ed15
commit
cf1a66d0da
|
@ -0,0 +1,80 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.security.x509.certificate.authority;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import java.security.cert.X509Certificate;
|
||||
|
||||
/**
|
||||
* This interface allows the DefaultCA to be portable and use different DB
|
||||
* interfaces later. It also allows us define this interface in the SCM layer
|
||||
* by which we don't have to take a circular dependency between hdds-common
|
||||
* and the SCM.
|
||||
*
|
||||
* With this interface, DefaultCA server read and write DB or persistence
|
||||
* layer and we can write to SCM's Metadata DB.
|
||||
*/
|
||||
public interface CertificateStore {
|
||||
|
||||
/**
|
||||
* Writes a new certificate that was issued to the persistent store.
|
||||
* @param serialID - Certificate Serial Number.
|
||||
* @param certificate - Certificate to persist.
|
||||
* @throws IOException - on Failure.
|
||||
*/
|
||||
void storeValidCertificate(BigInteger serialID,
|
||||
X509Certificate certificate) throws IOException;
|
||||
|
||||
/**
|
||||
* Moves a certificate in a transactional manner from valid certificate to
|
||||
* revoked certificate state.
|
||||
* @param serialID - Serial ID of the certificate.
|
||||
* @throws IOException
|
||||
*/
|
||||
void revokeCertificate(BigInteger serialID) throws IOException;
|
||||
|
||||
/**
|
||||
* Deletes an expired certificate from the store. Please note: We don't
|
||||
* remove revoked certificates, we need that information to generate the
|
||||
* CRLs.
|
||||
* @param serialID - Certificate ID.
|
||||
*/
|
||||
void removeExpiredCertificate(BigInteger serialID) throws IOException;
|
||||
|
||||
/**
|
||||
* Retrieves a Certificate based on the Serial number of that certificate.
|
||||
* @param serialID - ID of the certificate.
|
||||
* @param certType
|
||||
* @return X509Certificate
|
||||
* @throws IOException
|
||||
*/
|
||||
X509Certificate getCertificateByID(BigInteger serialID, CertType certType)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Different kind of Certificate stores.
|
||||
*/
|
||||
enum CertType {
|
||||
VALID_CERTS,
|
||||
REVOKED_CERTS
|
||||
}
|
||||
|
||||
}
|
|
@ -19,10 +19,10 @@
|
|||
|
||||
package org.apache.hadoop.hdds.security.x509.certificate.authority;
|
||||
|
||||
import org.apache.commons.lang3.RandomUtils;
|
||||
import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
|
||||
import org.apache.hadoop.hdds.security.x509.SecurityConfig;
|
||||
import org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.PKIProfile;
|
||||
import org.apache.hadoop.util.Time;
|
||||
import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
|
||||
import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo;
|
||||
import org.bouncycastle.cert.X509CertificateHolder;
|
||||
|
@ -100,9 +100,8 @@ public class DefaultApprover extends BaseApprover {
|
|||
X509v3CertificateBuilder certificateGenerator =
|
||||
new X509v3CertificateBuilder(
|
||||
caCertificate.getSubject(),
|
||||
// When we do persistence we will check if the certificate number
|
||||
// is a duplicate.
|
||||
new BigInteger(RandomUtils.nextBytes(8)),
|
||||
// Serial is not sequential but it is monotonically increasing.
|
||||
BigInteger.valueOf(Time.monotonicNowNanos()),
|
||||
validFrom,
|
||||
validTill,
|
||||
certificationRequest.getSubject(), keyInfo);
|
||||
|
|
|
@ -115,18 +115,21 @@ public class DefaultCAServer implements CertificateServer {
|
|||
*/
|
||||
private PKIProfile profile;
|
||||
private CertificateApprover approver;
|
||||
private CertificateStore store;
|
||||
|
||||
/**
|
||||
* Create an Instance of DefaultCAServer.
|
||||
*
|
||||
* @param subject - String Subject
|
||||
* @param subject - String Subject
|
||||
* @param clusterID - String ClusterID
|
||||
* @param scmID - String SCMID.
|
||||
* @param certificateStore - A store used to persist Certificates.
|
||||
*/
|
||||
public DefaultCAServer(String subject, String clusterID, String scmID) {
|
||||
public DefaultCAServer(String subject, String clusterID, String scmID,
|
||||
CertificateStore certificateStore) {
|
||||
this.subject = subject;
|
||||
this.clusterID = clusterID;
|
||||
this.scmID = scmID;
|
||||
this.store = certificateStore;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -207,12 +210,15 @@ public class DefaultCAServer implements CertificateServer {
|
|||
getCAKeys().getPrivate(),
|
||||
getCACertificate(), java.sql.Date.valueOf(beginDate),
|
||||
java.sql.Date.valueOf(endDate), csr);
|
||||
store.storeValidCertificate(xcert.getSerialNumber(),
|
||||
CertificateCodec.getX509Certificate(xcert));
|
||||
xcertHolder.complete(xcert);
|
||||
break;
|
||||
default:
|
||||
return null; // cannot happen, keeping checkstyle happy.
|
||||
}
|
||||
} catch (IOException | OperatorCreationException e) {
|
||||
} catch (CertificateException | IOException | OperatorCreationException e) {
|
||||
LOG.error("Unable to issue a certificate. {}", e);
|
||||
xcertHolder.completeExceptionally(new SCMSecurityException(e));
|
||||
}
|
||||
return xcertHolder;
|
||||
|
@ -230,7 +236,19 @@ public class DefaultCAServer implements CertificateServer {
|
|||
public Future<Boolean> revokeCertificate(X509Certificate certificate,
|
||||
CertificateApprover.ApprovalType approverType)
|
||||
throws SCMSecurityException {
|
||||
return null;
|
||||
CompletableFuture<Boolean> revoked = new CompletableFuture<>();
|
||||
if (certificate == null) {
|
||||
revoked.completeExceptionally(new SCMSecurityException(
|
||||
"Certificate cannot be null"));
|
||||
return revoked;
|
||||
}
|
||||
try {
|
||||
store.revokeCertificate(certificate.getSerialNumber());
|
||||
} catch (IOException ex) {
|
||||
LOG.error("Revoking the certificate failed. {}", ex.getCause());
|
||||
throw new SCMSecurityException(ex);
|
||||
}
|
||||
return revoked;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -103,15 +103,6 @@ public class CertificateCodec {
|
|||
return CERTIFICATE_CONVERTER.getCertificate(holder);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Certificate location.
|
||||
*
|
||||
* @return Path
|
||||
*/
|
||||
public Path getLocation() {
|
||||
return location;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Certificate as a PEM encoded String.
|
||||
*
|
||||
|
@ -121,17 +112,33 @@ public class CertificateCodec {
|
|||
*/
|
||||
public static String getPEMEncodedString(X509CertificateHolder x509CertHolder)
|
||||
throws SCMSecurityException {
|
||||
try {
|
||||
return getPEMEncodedString(getX509Certificate(x509CertHolder));
|
||||
} catch (CertificateException exp) {
|
||||
throw new SCMSecurityException(exp);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Certificate as a PEM encoded String.
|
||||
*
|
||||
* @param certificate - X.509 Certificate.
|
||||
* @return PEM Encoded Certificate String.
|
||||
* @throws SCMSecurityException - On failure to create a PEM String.
|
||||
*/
|
||||
public static String getPEMEncodedString(X509Certificate certificate)
|
||||
throws SCMSecurityException {
|
||||
try {
|
||||
StringWriter stringWriter = new StringWriter();
|
||||
try (JcaPEMWriter pemWriter = new JcaPEMWriter(stringWriter)) {
|
||||
pemWriter.writeObject(getX509Certificate(x509CertHolder));
|
||||
pemWriter.writeObject(certificate);
|
||||
}
|
||||
return stringWriter.toString();
|
||||
} catch (CertificateException | IOException e) {
|
||||
LOG.error("Error in encoding certificate." + x509CertHolder
|
||||
.getSubject().toString(), e);
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error in encoding certificate." + certificate
|
||||
.getSubjectDN().toString(), e);
|
||||
throw new SCMSecurityException("PEM Encoding failed for certificate." +
|
||||
x509CertHolder.getSubject().toString(), e);
|
||||
certificate.getSubjectDN().toString(), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -143,7 +150,7 @@ public class CertificateCodec {
|
|||
* @throws CertificateException - Thrown on Failure.
|
||||
* @throws IOException - Thrown on Failure.
|
||||
*/
|
||||
public X509Certificate getX509Certificate(String pemEncodedString)
|
||||
public static X509Certificate getX509Certificate(String pemEncodedString)
|
||||
throws CertificateException, IOException {
|
||||
CertificateFactory fact = CertificateFactory.getInstance("X.509");
|
||||
try (InputStream input = IOUtils.toInputStream(pemEncodedString, UTF_8)) {
|
||||
|
@ -151,6 +158,15 @@ public class CertificateCodec {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Certificate location.
|
||||
*
|
||||
* @return Path
|
||||
*/
|
||||
public Path getLocation() {
|
||||
return location;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the Certificate pointed to the location by the configs.
|
||||
*
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Codec interface to marshall/unmarshall data to/from a byte[] based
|
||||
* key/value store.
|
||||
|
@ -30,12 +32,12 @@ public interface Codec<T> {
|
|||
* Convert object to raw persisted format.
|
||||
* @param object The original java object. Should not be null.
|
||||
*/
|
||||
byte[] toPersistedFormat(T object);
|
||||
byte[] toPersistedFormat(T object) throws IOException;
|
||||
|
||||
/**
|
||||
* Convert object from raw persisted format.
|
||||
*
|
||||
* @param rawData Byte array from the key/value store. Should not be null.
|
||||
*/
|
||||
T fromPersistedFormat(byte[] rawData);
|
||||
T fromPersistedFormat(byte[] rawData) throws IOException;
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -43,7 +44,7 @@ public class CodecRegistry {
|
|||
* @param <T> Type of the return value.
|
||||
* @return the object with the parsed field data
|
||||
*/
|
||||
public <T> T asObject(byte[] rawData, Class<T> format) {
|
||||
public <T> T asObject(byte[] rawData, Class<T> format) throws IOException {
|
||||
if (rawData == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -62,7 +63,7 @@ public class CodecRegistry {
|
|||
* @param <T> Type of the typed object.
|
||||
* @return byte array to store it ini the kv store.
|
||||
*/
|
||||
public <T> byte[] asRawData(T object) {
|
||||
public <T> byte[] asRawData(T object) throws IOException {
|
||||
Preconditions.checkNotNull(object,
|
||||
"Null value shouldn't be persisted in the database");
|
||||
Class<T> format = (Class<T>) object.getClass();
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hdfs.DFSUtil;
|
||||
|
||||
/**
|
||||
|
@ -26,7 +27,7 @@ import org.apache.hadoop.hdfs.DFSUtil;
|
|||
public class StringCodec implements Codec<String> {
|
||||
|
||||
@Override
|
||||
public byte[] toPersistedFormat(String object) {
|
||||
public byte[] toPersistedFormat(String object) throws IOException {
|
||||
if (object != null) {
|
||||
return DFSUtil.string2Bytes(object);
|
||||
} else {
|
||||
|
@ -35,7 +36,7 @@ public class StringCodec implements Codec<String> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String fromPersistedFormat(byte[] rawData) {
|
||||
public String fromPersistedFormat(byte[] rawData) throws IOException {
|
||||
if (rawData != null) {
|
||||
return DFSUtil.bytes2String(rawData);
|
||||
} else {
|
||||
|
|
|
@ -102,8 +102,8 @@ public interface Table<KEY, VALUE> extends AutoCloseable {
|
|||
*/
|
||||
interface KeyValue<KEY, VALUE> {
|
||||
|
||||
KEY getKey();
|
||||
KEY getKey() throws IOException;
|
||||
|
||||
VALUE getValue();
|
||||
VALUE getValue() throws IOException;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.apache.hadoop.utils.db;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
|
@ -45,13 +46,13 @@ public interface TableIterator<KEY, T> extends Iterator<T>, Closeable {
|
|||
* @param key - Bytes that represent the key.
|
||||
* @return VALUE.
|
||||
*/
|
||||
T seek(KEY key);
|
||||
T seek(KEY key) throws IOException;
|
||||
|
||||
/**
|
||||
* Returns the key value at the current position.
|
||||
* @return KEY
|
||||
*/
|
||||
KEY key();
|
||||
KEY key() throws IOException;
|
||||
|
||||
/**
|
||||
* Returns the VALUE at the current position.
|
||||
|
|
|
@ -123,12 +123,12 @@ public class TypedTable<KEY, VALUE> implements Table<KEY, VALUE> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public KEY getKey() {
|
||||
public KEY getKey() throws IOException {
|
||||
return codecRegistry.asObject(rawKeyValue.getKey(), keyType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VALUE getValue() {
|
||||
public VALUE getValue() throws IOException {
|
||||
return codecRegistry.asObject(rawKeyValue.getValue(), valueType);
|
||||
}
|
||||
}
|
||||
|
@ -163,7 +163,7 @@ public class TypedTable<KEY, VALUE> implements Table<KEY, VALUE> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public TypedKeyValue seek(KEY key) {
|
||||
public TypedKeyValue seek(KEY key) throws IOException {
|
||||
byte[] keyBytes = codecRegistry.asRawData(key);
|
||||
KeyValue<byte[], byte[]> result = rawIterator.seek(keyBytes);
|
||||
if (result == null) {
|
||||
|
@ -173,7 +173,7 @@ public class TypedTable<KEY, VALUE> implements Table<KEY, VALUE> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public KEY key() {
|
||||
public KEY key() throws IOException {
|
||||
byte[] result = rawIterator.key();
|
||||
if (result == null) {
|
||||
return null;
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.security.x509.certificate.authority;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import java.security.cert.X509Certificate;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class MockCAStore implements CertificateStore {
|
||||
@Override
|
||||
public void storeValidCertificate(BigInteger serialID,
|
||||
X509Certificate certificate)
|
||||
throws IOException {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void revokeCertificate(BigInteger serialID) throws IOException {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeExpiredCertificate(BigInteger serialID)
|
||||
throws IOException {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public X509Certificate getCertificateByID(BigInteger serialID,
|
||||
CertType certType)
|
||||
throws IOException {
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -51,10 +51,12 @@ public class TestDefaultCAServer {
|
|||
private static OzoneConfiguration conf = new OzoneConfiguration();
|
||||
@Rule
|
||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
private MockCAStore caStore;
|
||||
|
||||
@Before
|
||||
public void init() throws IOException {
|
||||
conf.set(OZONE_METADATA_DIRS, temporaryFolder.newFolder().toString());
|
||||
caStore = new MockCAStore();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -63,7 +65,7 @@ public class TestDefaultCAServer {
|
|||
SecurityConfig securityConfig = new SecurityConfig(conf);
|
||||
CertificateServer testCA = new DefaultCAServer("testCA",
|
||||
RandomStringUtils.randomAlphabetic(4),
|
||||
RandomStringUtils.randomAlphabetic(4));
|
||||
RandomStringUtils.randomAlphabetic(4), caStore);
|
||||
testCA.init(securityConfig, CertificateServer.CAType.SELF_SIGNED_CA);
|
||||
X509CertificateHolder first = testCA.getCACertificate();
|
||||
assertNotNull(first);
|
||||
|
@ -88,7 +90,7 @@ public class TestDefaultCAServer {
|
|||
SecurityConfig securityConfig = new SecurityConfig(conf);
|
||||
CertificateServer testCA = new DefaultCAServer("testCA",
|
||||
RandomStringUtils.randomAlphabetic(4),
|
||||
RandomStringUtils.randomAlphabetic(4));
|
||||
RandomStringUtils.randomAlphabetic(4), caStore);
|
||||
Consumer<SecurityConfig> caInitializer =
|
||||
((DefaultCAServer) testCA).processVerificationStatus(
|
||||
DefaultCAServer.VerificationStatus.MISSING_CERTIFICATE);
|
||||
|
@ -108,7 +110,7 @@ public class TestDefaultCAServer {
|
|||
SecurityConfig securityConfig = new SecurityConfig(conf);
|
||||
CertificateServer testCA = new DefaultCAServer("testCA",
|
||||
RandomStringUtils.randomAlphabetic(4),
|
||||
RandomStringUtils.randomAlphabetic(4));
|
||||
RandomStringUtils.randomAlphabetic(4), caStore);
|
||||
Consumer<SecurityConfig> caInitializer =
|
||||
((DefaultCAServer) testCA).processVerificationStatus(
|
||||
DefaultCAServer.VerificationStatus.MISSING_KEYS);
|
||||
|
@ -155,7 +157,7 @@ public class TestDefaultCAServer {
|
|||
|
||||
CertificateServer testCA = new DefaultCAServer("testCA",
|
||||
RandomStringUtils.randomAlphabetic(4),
|
||||
RandomStringUtils.randomAlphabetic(4));
|
||||
RandomStringUtils.randomAlphabetic(4), caStore);
|
||||
testCA.init(new SecurityConfig(conf),
|
||||
CertificateServer.CAType.SELF_SIGNED_CA);
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.apache.hadoop.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -56,6 +57,16 @@ public class TestRDBTableStore {
|
|||
private RDBStore rdbStore = null;
|
||||
private DBOptions options = null;
|
||||
|
||||
private static boolean consume(Table.KeyValue keyValue) {
|
||||
count++;
|
||||
try {
|
||||
Assert.assertNotNull(keyValue.getKey());
|
||||
} catch(IOException ex) {
|
||||
Assert.fail("Unexpected Exception " + ex.toString());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
options = new DBOptions();
|
||||
|
@ -191,12 +202,6 @@ public class TestRDBTableStore {
|
|||
}
|
||||
}
|
||||
|
||||
private static boolean consume(Table.KeyValue keyValue) {
|
||||
count++;
|
||||
Assert.assertNotNull(keyValue.getKey());
|
||||
return true;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void forEachAndIterator() throws Exception {
|
||||
final int iterCount = 100;
|
||||
|
|
|
@ -199,7 +199,11 @@ public class TestTypedRDBTableStore {
|
|||
|
||||
private static boolean consume(Table.KeyValue keyValue) {
|
||||
count++;
|
||||
Assert.assertNotNull(keyValue.getKey());
|
||||
try {
|
||||
Assert.assertNotNull(keyValue.getKey());
|
||||
} catch (IOException ex) {
|
||||
Assert.fail(ex.toString());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -232,4 +236,4 @@ public class TestTypedRDBTableStore {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.scm.metadata;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
|
||||
/**
|
||||
* Encode and decode BigInteger.
|
||||
*/
|
||||
public class BigIntegerCodec implements Codec<BigInteger> {
|
||||
@Override
|
||||
public byte[] toPersistedFormat(BigInteger object) throws IOException {
|
||||
return object.toByteArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger fromPersistedFormat(byte[] rawData) throws IOException {
|
||||
return new BigInteger(rawData);
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hdds.scm.metadata;
|
|||
|
||||
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
|
||||
|
@ -30,12 +31,14 @@ import org.apache.hadoop.utils.db.Codec;
|
|||
public class DeletedBlocksTransactionCodec
|
||||
implements Codec<DeletedBlocksTransaction> {
|
||||
@Override
|
||||
public byte[] toPersistedFormat(DeletedBlocksTransaction object) {
|
||||
public byte[] toPersistedFormat(DeletedBlocksTransaction object)
|
||||
throws IOException {
|
||||
return object.toByteArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DeletedBlocksTransaction fromPersistedFormat(byte[] rawData) {
|
||||
public DeletedBlocksTransaction fromPersistedFormat(byte[] rawData)
|
||||
throws IOException {
|
||||
try {
|
||||
return DeletedBlocksTransaction.parseFrom(rawData);
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.apache.hadoop.hdds.scm.metadata;
|
||||
|
||||
import com.google.common.primitives.Longs;
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
|
||||
/**
|
||||
|
@ -28,12 +29,12 @@ import org.apache.hadoop.utils.db.Codec;
|
|||
public class LongCodec implements Codec<Long> {
|
||||
|
||||
@Override
|
||||
public byte[] toPersistedFormat(Long object) {
|
||||
public byte[] toPersistedFormat(Long object) throws IOException {
|
||||
return Longs.toByteArray(object);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long fromPersistedFormat(byte[] rawData) {
|
||||
public Long fromPersistedFormat(byte[] rawData) throws IOException {
|
||||
return Longs.fromByteArray(rawData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,13 +17,17 @@
|
|||
*/
|
||||
package org.apache.hadoop.hdds.scm.metadata;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.security.cert.X509Certificate;
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import java.io.IOException;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore;
|
||||
import org.apache.hadoop.utils.db.DBStore;
|
||||
import org.apache.hadoop.utils.db.Table;
|
||||
import org.apache.hadoop.hdds.protocol.proto
|
||||
.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
|
||||
import org.apache.hadoop.utils.db.TableIterator;
|
||||
|
||||
/**
|
||||
* Generic interface for data stores for SCM.
|
||||
|
@ -53,21 +57,47 @@ public interface SCMMetadataStore {
|
|||
DBStore getStore();
|
||||
|
||||
/**
|
||||
* A Table that keeps the deleted blocks lists and transactions.
|
||||
* @return Table
|
||||
* A Table that keeps the deleted blocks lists and transactions.
|
||||
*
|
||||
* @return Table
|
||||
*/
|
||||
Table<Long, DeletedBlocksTransaction> getDeletedBlocksTXTable();
|
||||
|
||||
/**
|
||||
* Returns the current TXID for the deleted blocks.
|
||||
*
|
||||
* @return Long
|
||||
*/
|
||||
Long getCurrentTXID();
|
||||
|
||||
/**
|
||||
* Returns the next TXID for the Deleted Blocks.
|
||||
*
|
||||
* @return Long.
|
||||
*/
|
||||
*/
|
||||
Long getNextDeleteBlockTXID();
|
||||
|
||||
/**
|
||||
* A table that maintains all the valid certificates issued by the SCM CA.
|
||||
*
|
||||
* @return Table
|
||||
*/
|
||||
Table<BigInteger, X509Certificate> getValidCertsTable();
|
||||
|
||||
/**
|
||||
* A Table that maintains all revoked certificates until they expire.
|
||||
*
|
||||
* @return Table.
|
||||
*/
|
||||
Table<BigInteger, X509Certificate> getRevokedCertsTable();
|
||||
|
||||
/**
|
||||
* Returns the list of Certificates of a specific type.
|
||||
*
|
||||
* @param certType - CertType.
|
||||
* @return Iterator<X509Certificate>
|
||||
*/
|
||||
TableIterator getAllCerts(CertificateStore.CertType certType);
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -18,10 +18,13 @@
|
|||
package org.apache.hadoop.hdds.scm.metadata;
|
||||
|
||||
import java.io.File;
|
||||
import java.math.BigInteger;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore;
|
||||
import org.apache.hadoop.hdds.server.ServerUtils;
|
||||
import org.apache.hadoop.utils.db.DBStore;
|
||||
import org.apache.hadoop.utils.db.DBStoreBuilder;
|
||||
|
@ -37,17 +40,28 @@ import static org.apache.hadoop.ozone.OzoneConsts.SCM_DB_NAME;
|
|||
* A RocksDB based implementation of SCM Metadata Store.
|
||||
* <p>
|
||||
* <p>
|
||||
* +---------------+------------+-------------------------+
|
||||
* | Column Family | Key | Value |
|
||||
* +---------------+------------+-------------------------+
|
||||
* | DeletedBlocks | TXID(Long) | DeletedBlockTransaction |
|
||||
* +---------------+------------+-------------------------+
|
||||
* +---------------+------------------+-------------------------+
|
||||
* | Column Family | Key | Value |
|
||||
* +---------------+------------------+-------------------------+
|
||||
* | DeletedBlocks | TXID(Long) | DeletedBlockTransaction |
|
||||
* +---------------+------------------+-------------------------+
|
||||
* | ValidCerts | Serial (BigInt) | X509Certificate |
|
||||
* +---------------+------------------+-------------------------+
|
||||
* |RevokedCerts | Serial (BigInt) | X509Certificate |
|
||||
* +---------------+------------------+-------------------------+
|
||||
*/
|
||||
public class SCMMetadataStoreRDBImpl implements SCMMetadataStore {
|
||||
|
||||
private static final String DELETED_BLOCKS_TABLE = "deletedBlocks";
|
||||
private Table deletedBlocksTable;
|
||||
|
||||
private static final String VALID_CERTS_TABLE = "validCerts";
|
||||
private Table validCertsTable;
|
||||
|
||||
private static final String REVOKED_CERTS_TABLE = "revokedCerts";
|
||||
private Table revokedCertsTable;
|
||||
|
||||
|
||||
|
||||
private static final Logger LOG =
|
||||
LoggerFactory.getLogger(SCMMetadataStoreRDBImpl.class);
|
||||
|
@ -77,13 +91,26 @@ public class SCMMetadataStoreRDBImpl implements SCMMetadataStore {
|
|||
.setName(SCM_DB_NAME)
|
||||
.setPath(Paths.get(metaDir.getPath()))
|
||||
.addTable(DELETED_BLOCKS_TABLE)
|
||||
.addTable(VALID_CERTS_TABLE)
|
||||
.addTable(REVOKED_CERTS_TABLE)
|
||||
.addCodec(DeletedBlocksTransaction.class,
|
||||
new DeletedBlocksTransactionCodec())
|
||||
.addCodec(Long.class, new LongCodec())
|
||||
.addCodec(BigInteger.class, new BigIntegerCodec())
|
||||
.addCodec(X509Certificate.class, new X509CertificateCodec())
|
||||
.build();
|
||||
|
||||
deletedBlocksTable = this.store.getTable(DELETED_BLOCKS_TABLE,
|
||||
Long.class, DeletedBlocksTransaction.class);
|
||||
checkTableStatus(deletedBlocksTable, DELETED_BLOCKS_TABLE);
|
||||
|
||||
validCertsTable = this.store.getTable(VALID_CERTS_TABLE,
|
||||
BigInteger.class, X509Certificate.class);
|
||||
checkTableStatus(validCertsTable, VALID_CERTS_TABLE);
|
||||
|
||||
revokedCertsTable = this.store.getTable(REVOKED_CERTS_TABLE,
|
||||
BigInteger.class, X509Certificate.class);
|
||||
checkTableStatus(revokedCertsTable, REVOKED_CERTS_TABLE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -110,6 +137,29 @@ public class SCMMetadataStoreRDBImpl implements SCMMetadataStore {
|
|||
return this.txID.incrementAndGet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Table<BigInteger, X509Certificate> getValidCertsTable() {
|
||||
return validCertsTable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Table<BigInteger, X509Certificate> getRevokedCertsTable() {
|
||||
return revokedCertsTable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TableIterator getAllCerts(CertificateStore.CertType certType) {
|
||||
if(certType == CertificateStore.CertType.VALID_CERTS) {
|
||||
return validCertsTable.iterator();
|
||||
}
|
||||
|
||||
if(certType == CertificateStore.CertType.REVOKED_CERTS) {
|
||||
return revokedCertsTable.iterator();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getCurrentTXID() {
|
||||
return this.txID.get();
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.scm.metadata;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
|
||||
import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
|
||||
/**
|
||||
* Encodes and Decodes X509Certificate Class.
|
||||
*/
|
||||
public class X509CertificateCodec implements Codec<X509Certificate> {
|
||||
@Override
|
||||
public byte[] toPersistedFormat(X509Certificate object) throws IOException {
|
||||
try {
|
||||
return CertificateCodec.getPEMEncodedString(object)
|
||||
.getBytes(Charset.forName("UTF-8"));
|
||||
} catch (SCMSecurityException exp) {
|
||||
throw new IOException(exp);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public X509Certificate fromPersistedFormat(byte[] rawData)
|
||||
throws IOException {
|
||||
try{
|
||||
String s = new String(rawData, Charset.forName("UTF-8"));
|
||||
return CertificateCodec.getX509Certificate(s);
|
||||
} catch (CertificateException exp) {
|
||||
throw new IOException(exp);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,115 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hdds.scm.server;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore;
|
||||
import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
|
||||
import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore;
|
||||
import org.apache.hadoop.utils.db.BatchOperation;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* A Certificate Store class that persists certificates issued by SCM CA.
|
||||
*/
|
||||
public class SCMCertStore implements CertificateStore {
|
||||
private static final Logger LOG =
|
||||
LoggerFactory.getLogger(SCMCertStore.class);
|
||||
private final SCMMetadataStore scmMetadataStore;
|
||||
private final Lock lock;
|
||||
|
||||
public SCMCertStore(SCMMetadataStore dbStore) {
|
||||
this.scmMetadataStore = dbStore;
|
||||
lock = new ReentrantLock();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void storeValidCertificate(BigInteger serialID,
|
||||
X509Certificate certificate)
|
||||
throws IOException {
|
||||
lock.lock();
|
||||
try {
|
||||
// This makes sure that no certificate IDs are reusable.
|
||||
if ((getCertificateByID(serialID, CertType.VALID_CERTS) == null) &&
|
||||
(getCertificateByID(serialID, CertType.REVOKED_CERTS) == null)) {
|
||||
scmMetadataStore.getValidCertsTable().put(serialID, certificate);
|
||||
} else {
|
||||
throw new SCMSecurityException("Conflicting certificate ID");
|
||||
}
|
||||
} finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void revokeCertificate(BigInteger serialID) throws IOException {
|
||||
lock.lock();
|
||||
try {
|
||||
X509Certificate cert = getCertificateByID(serialID, CertType.VALID_CERTS);
|
||||
if (cert == null) {
|
||||
LOG.error("trying to revoke a certificate that is not valid. Serial: " +
|
||||
"{}", serialID.toString());
|
||||
throw new SCMSecurityException("Trying to revoke an invalid " +
|
||||
"certificate.");
|
||||
}
|
||||
// TODO : Check if we are trying to revoke an expired certificate.
|
||||
|
||||
if (getCertificateByID(serialID, CertType.REVOKED_CERTS) != null) {
|
||||
LOG.error("Trying to revoke a certificate that is already revoked.");
|
||||
throw new SCMSecurityException("Trying to revoke an already revoked " +
|
||||
"certificate.");
|
||||
}
|
||||
|
||||
// let is do this in a transaction.
|
||||
try (BatchOperation batch =
|
||||
scmMetadataStore.getStore().initBatchOperation();) {
|
||||
scmMetadataStore.getRevokedCertsTable()
|
||||
.putWithBatch(batch, serialID, cert);
|
||||
scmMetadataStore.getValidCertsTable().deleteWithBatch(batch, serialID);
|
||||
scmMetadataStore.getStore().commitBatchOperation(batch);
|
||||
}
|
||||
} finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeExpiredCertificate(BigInteger serialID)
|
||||
throws IOException {
|
||||
// TODO: Later this allows removal of expired certificates from the system.
|
||||
}
|
||||
|
||||
@Override
|
||||
public X509Certificate getCertificateByID(BigInteger serialID,
|
||||
CertType certType)
|
||||
throws IOException {
|
||||
if (certType == CertType.VALID_CERTS) {
|
||||
return scmMetadataStore.getValidCertsTable().get(serialID);
|
||||
} else {
|
||||
return scmMetadataStore.getRevokedCertsTable().get(serialID);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -254,8 +254,16 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
|
|||
* passed before any artifacts like SCM DB is created. So please don't
|
||||
* add any other initialization above the Security checks please.
|
||||
*/
|
||||
if (OzoneSecurityUtil.isSecurityEnabled(conf)) {
|
||||
loginAsSCMUser(conf);
|
||||
}
|
||||
|
||||
// Authenticate SCM if security is enabled
|
||||
// Creates the SCM DBs or opens them if it exists.
|
||||
// A valid pointer to the store is required by all the other services below.
|
||||
initalizeMetadataStore(conf, configurator);
|
||||
|
||||
// Authenticate SCM if security is enabled, this initialization can only
|
||||
// be done after the metadata store is initialized.
|
||||
if (OzoneSecurityUtil.isSecurityEnabled(conf)) {
|
||||
initializeCAnSecurityProtocol(conf, configurator);
|
||||
} else {
|
||||
|
@ -266,8 +274,6 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
|
|||
securityProtocolServer = null;
|
||||
}
|
||||
|
||||
// Creates the SCM DBs or opens them if it exists.
|
||||
initalizeMetadataStore(conf, configurator);
|
||||
|
||||
eventQueue = new EventQueue();
|
||||
long watcherTimeout =
|
||||
|
@ -438,11 +444,11 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
|
|||
*/
|
||||
private void initializeCAnSecurityProtocol(OzoneConfiguration conf,
|
||||
SCMConfigurator configurator)
|
||||
throws IOException, AuthenticationException {
|
||||
loginAsSCMUser(conf);
|
||||
throws IOException {
|
||||
if(configurator.getCertificateServer() != null) {
|
||||
this.certificateServer = configurator.getCertificateServer();
|
||||
} else {
|
||||
// This assumes that SCM init has run, and DB metadata stores are created.
|
||||
certificateServer = initializeCertificateServer(
|
||||
getScmStorageConfig().getClusterID(),
|
||||
getScmStorageConfig().getScmId());
|
||||
|
@ -515,7 +521,14 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
|
|||
// TODO: Support Certificate Server loading via Class Name loader.
|
||||
// So it is easy to use different Certificate Servers if needed.
|
||||
String subject = "scm@" + InetAddress.getLocalHost().getHostName();
|
||||
return new DefaultCAServer(subject, clusterID, scmID);
|
||||
if(this.scmMetadataStore == null) {
|
||||
LOG.error("Cannot initialize Certificate Server without a valid meta " +
|
||||
"data layer.");
|
||||
throw new SCMException("Cannot initialize CA without a valid metadata " +
|
||||
"store", ResultCodes.SCM_NOT_INITIALIZED);
|
||||
}
|
||||
SCMCertStore certStore = new SCMCertStore(this.scmMetadataStore);
|
||||
return new DefaultCAServer(subject, clusterID, scmID, certStore);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.ozone.om.codec;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmBucketInfo;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.BucketInfo;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
|
@ -30,14 +31,14 @@ import com.google.protobuf.InvalidProtocolBufferException;
|
|||
public class OmBucketInfoCodec implements Codec<OmBucketInfo> {
|
||||
|
||||
@Override
|
||||
public byte[] toPersistedFormat(OmBucketInfo object) {
|
||||
public byte[] toPersistedFormat(OmBucketInfo object) throws IOException {
|
||||
Preconditions
|
||||
.checkNotNull(object, "Null object can't be converted to byte array.");
|
||||
return object.getProtobuf().toByteArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public OmBucketInfo fromPersistedFormat(byte[] rawData) {
|
||||
public OmBucketInfo fromPersistedFormat(byte[] rawData) throws IOException {
|
||||
Preconditions
|
||||
.checkNotNull(rawData,
|
||||
"Null byte array can't converted to real object.");
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.ozone.om.codec;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.KeyInfo;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
|
@ -30,14 +31,14 @@ import com.google.protobuf.InvalidProtocolBufferException;
|
|||
public class OmKeyInfoCodec implements Codec<OmKeyInfo> {
|
||||
|
||||
@Override
|
||||
public byte[] toPersistedFormat(OmKeyInfo object) {
|
||||
public byte[] toPersistedFormat(OmKeyInfo object) throws IOException {
|
||||
Preconditions
|
||||
.checkNotNull(object, "Null object can't be converted to byte array.");
|
||||
return object.getProtobuf().toByteArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public OmKeyInfo fromPersistedFormat(byte[] rawData) {
|
||||
public OmKeyInfo fromPersistedFormat(byte[] rawData) throws IOException {
|
||||
Preconditions
|
||||
.checkNotNull(rawData,
|
||||
"Null byte array can't converted to real object.");
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.ozone.om.codec;
|
|||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmMultipartKeyInfo;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
|
@ -30,7 +31,8 @@ import org.apache.hadoop.utils.db.Codec;
|
|||
public class OmMultipartKeyInfoCodec implements Codec<OmMultipartKeyInfo> {
|
||||
|
||||
@Override
|
||||
public byte[] toPersistedFormat(OmMultipartKeyInfo object) {
|
||||
public byte[] toPersistedFormat(OmMultipartKeyInfo object)
|
||||
throws IOException {
|
||||
Preconditions.checkNotNull(object,
|
||||
"Null object can't be converted to byte array.");
|
||||
return object.getProto().toByteArray();
|
||||
|
@ -42,7 +44,8 @@ public class OmMultipartKeyInfoCodec implements Codec<OmMultipartKeyInfo> {
|
|||
* Construct {@link OmMultipartKeyInfo} from byte[]. If unable to convert
|
||||
* return null.
|
||||
*/
|
||||
public OmMultipartKeyInfo fromPersistedFormat(byte[] rawData) {
|
||||
public OmMultipartKeyInfo fromPersistedFormat(byte[] rawData)
|
||||
throws IOException {
|
||||
Preconditions.checkNotNull(rawData,
|
||||
"Null byte array can't converted to real object.");
|
||||
try {
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.ozone.om.codec;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.VolumeInfo;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
|
@ -30,14 +31,14 @@ import com.google.protobuf.InvalidProtocolBufferException;
|
|||
public class OmVolumeArgsCodec implements Codec<OmVolumeArgs> {
|
||||
|
||||
@Override
|
||||
public byte[] toPersistedFormat(OmVolumeArgs object) {
|
||||
public byte[] toPersistedFormat(OmVolumeArgs object) throws IOException {
|
||||
Preconditions
|
||||
.checkNotNull(object, "Null object can't be converted to byte array.");
|
||||
return object.getProtobuf().toByteArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public OmVolumeArgs fromPersistedFormat(byte[] rawData) {
|
||||
public OmVolumeArgs fromPersistedFormat(byte[] rawData) throws IOException {
|
||||
Preconditions
|
||||
.checkNotNull(rawData,
|
||||
"Null byte array can't converted to real object.");
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.ozone.om.codec;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.VolumeList;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
|
||||
|
@ -29,14 +30,14 @@ import com.google.protobuf.InvalidProtocolBufferException;
|
|||
public class VolumeListCodec implements Codec<VolumeList> {
|
||||
|
||||
@Override
|
||||
public byte[] toPersistedFormat(VolumeList object) {
|
||||
public byte[] toPersistedFormat(VolumeList object) throws IOException {
|
||||
Preconditions
|
||||
.checkNotNull(object, "Null object can't be converted to byte array.");
|
||||
return object.toByteArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public VolumeList fromPersistedFormat(byte[] rawData) {
|
||||
public VolumeList fromPersistedFormat(byte[] rawData) throws IOException {
|
||||
Preconditions
|
||||
.checkNotNull(rawData,
|
||||
"Null byte array can't converted to real object.");
|
||||
|
|
|
@ -36,18 +36,30 @@ public class TestOmMultipartKeyInfoCodec {
|
|||
OmMultipartKeyInfoCodec codec = new OmMultipartKeyInfoCodec();
|
||||
OmMultipartKeyInfo omMultipartKeyInfo = new OmMultipartKeyInfo(UUID
|
||||
.randomUUID().toString(), new HashMap<>());
|
||||
byte[] data = codec.toPersistedFormat(omMultipartKeyInfo);
|
||||
byte[] data = new byte[0];
|
||||
try {
|
||||
data = codec.toPersistedFormat(omMultipartKeyInfo);
|
||||
} catch (java.io.IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
Assert.assertNotNull(data);
|
||||
|
||||
OmMultipartKeyInfo multipartKeyInfo = codec.fromPersistedFormat(data);
|
||||
OmMultipartKeyInfo multipartKeyInfo = null;
|
||||
try {
|
||||
multipartKeyInfo = codec.fromPersistedFormat(data);
|
||||
} catch (java.io.IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
Assert.assertEquals(omMultipartKeyInfo, multipartKeyInfo);
|
||||
|
||||
// When random byte data passed returns null.
|
||||
try {
|
||||
codec.fromPersistedFormat("radom".getBytes());
|
||||
codec.fromPersistedFormat("random".getBytes());
|
||||
} catch (IllegalArgumentException ex) {
|
||||
GenericTestUtils.assertExceptionContains("Can't encode the the raw " +
|
||||
"data from the byte array", ex);
|
||||
} catch (java.io.IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue