diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultCAServer.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultCAServer.java index 901c86cb3a7..54fb3907266 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultCAServer.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultCAServer.java @@ -224,12 +224,16 @@ public Future requestCertificate( break; case KERBEROS_TRUSTED: case TESTING_AUTOMATIC: - X509CertificateHolder xcert = approver.sign(config, - getCAKeys().getPrivate(), - getCACertificate(), java.sql.Date.valueOf(beginDate), - java.sql.Date.valueOf(endDate), csr, scmID, clusterID); - store.storeValidCertificate(xcert.getSerialNumber(), - CertificateCodec.getX509Certificate(xcert)); + X509CertificateHolder xcert; + try { + xcert = signAndStoreCertificate(beginDate, endDate, csr); + } catch (SCMSecurityException e) { + // Certificate with conflicting serial id, retry again may resolve + // this issue. + LOG.error("Certificate storage failed, retrying one more time.", e); + xcert = signAndStoreCertificate(beginDate, endDate, csr); + } + xcertHolder.complete(xcert); break; default: @@ -242,6 +246,18 @@ public Future requestCertificate( return xcertHolder; } + private X509CertificateHolder signAndStoreCertificate(LocalDate beginDate, LocalDate endDate, + PKCS10CertificationRequest csr) throws IOException, + OperatorCreationException, CertificateException { + X509CertificateHolder xcert = approver.sign(config, + getCAKeys().getPrivate(), + getCACertificate(), java.sql.Date.valueOf(beginDate), + java.sql.Date.valueOf(endDate), csr, scmID, clusterID); + store.storeValidCertificate(xcert.getSerialNumber(), + CertificateCodec.getX509Certificate(xcert)); + return xcert; + } + @Override public Future requestCertificate(String csr, CertificateApprover.ApprovalType type) throws IOException { diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-compose.yaml b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-compose.yaml index c0057276981..62b07b2d920 100644 --- a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-compose.yaml +++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-compose.yaml @@ -38,7 +38,6 @@ services: image: apache/hadoop-runner volumes: - ../..:/opt/hadoop - hostname: datanode ports: - 9864 command: ["/opt/hadoop/bin/ozone","datanode"] diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config index 5031e6cbd67..2c7f8160da7 100644 --- a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config +++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config @@ -23,6 +23,7 @@ OZONE-SITE.XML_ozone.scm.block.client.address=scm OZONE-SITE.XML_ozone.metadata.dirs=/data/metadata OZONE-SITE.XML_ozone.handler.type=distributed OZONE-SITE.XML_ozone.scm.client.address=scm +OZONE-SITE.XML_hdds.block.token.enabled=true OZONE-SITE.XML_ozone.replication=1 OZONE-SITE.XML_hdds.scm.kerberos.principal=scm/scm@EXAMPLE.COM OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab diff --git a/hadoop-ozone/dist/src/main/smoketest/security/ozone-secure.robot b/hadoop-ozone/dist/src/main/smoketest/security/ozone-secure.robot index 457ebaaa1d3..5031e00ed77 100644 --- a/hadoop-ozone/dist/src/main/smoketest/security/ozone-secure.robot +++ b/hadoop-ozone/dist/src/main/smoketest/security/ozone-secure.robot @@ -23,7 +23,8 @@ Create volume and bucket ${rc} ${output} = Run And Return Rc And Output ozone sh volume create o3://om/fstest --user bilbo --quota 100TB --root Should contain ${output} Client cannot authenticate via # Authenticate testuser - Execute kinit -k testuser/datanode@EXAMPLE.COM -t /etc/security/keytabs/testuser.keytab + ${hostname}= Execute hostname + Execute kinit -k testuser/${hostname}@EXAMPLE.COM -t /etc/security/keytabs/testuser.keytab Execute ozone sh volume create o3://om/fstest --user bilbo --quota 100TB --root Execute ozone sh volume create o3://om/fstest2 --user bilbo --quota 100TB --root Execute ozone sh bucket create o3://om/fstest/bucket1 @@ -107,5 +108,5 @@ Run ozoneFS tests Execute ls -l GET.txt ${rc} ${result} = Run And Return Rc And Output ozone fs -ls o3fs://abcde.pqrs/ Should Be Equal As Integers ${rc} 1 - Should contain ${result} VOLUME_NOT_FOUND + Should contain ${result} Volume pqrs is not found diff --git a/hadoop-ozone/dist/src/main/smoketest/test.sh b/hadoop-ozone/dist/src/main/smoketest/test.sh index 582fbdf7c52..41da2f0d8b3 100755 --- a/hadoop-ozone/dist/src/main/smoketest/test.sh +++ b/hadoop-ozone/dist/src/main/smoketest/test.sh @@ -151,6 +151,8 @@ if [ "$RUN_ALL" = true ]; then execute_tests ozone-hdfs "${TESTS[@]}" TESTS=("s3") execute_tests ozones3 "${TESTS[@]}" + TESTS=("security") + execute_tests ozonesecure "${TESTS[@]}" else execute_tests "$DOCKERENV" "${POSITIONAL[@]}" fi