HDDS-939. Add S3 access check to Ozone manager. Contributed by Ajay Kumar. (#634)

This commit is contained in:
Ajay Yadav 2019-03-26 08:59:59 -07:00 committed by Bharat Viswanadham
parent 5c0a81ad3c
commit 82d477293c
8 changed files with 94 additions and 28 deletions

View File

@ -61,8 +61,7 @@ public class S3SecretManagerImpl implements S3SecretManager {
public S3SecretValue getS3Secret(String kerberosID) throws IOException { public S3SecretValue getS3Secret(String kerberosID) throws IOException {
Preconditions.checkArgument(Strings.isNotBlank(kerberosID), Preconditions.checkArgument(Strings.isNotBlank(kerberosID),
"kerberosID cannot be null or empty."); "kerberosID cannot be null or empty.");
String awsAccessKeyStr = DigestUtils.md5Hex(kerberosID); byte[] awsAccessKey = kerberosID.getBytes(UTF_8);
byte[] awsAccessKey = awsAccessKeyStr.getBytes(UTF_8);
S3SecretValue result = null; S3SecretValue result = null;
omMetadataManager.getLock().acquireS3SecretLock(kerberosID); omMetadataManager.getLock().acquireS3SecretLock(kerberosID);
try { try {
@ -77,33 +76,31 @@ public class S3SecretManagerImpl implements S3SecretManager {
result = S3SecretValue.fromProtobuf( result = S3SecretValue.fromProtobuf(
OzoneManagerProtocolProtos.S3Secret.parseFrom(s3Secret)); OzoneManagerProtocolProtos.S3Secret.parseFrom(s3Secret));
} }
result.setAwsAccessKey(awsAccessKeyStr);
} finally { } finally {
omMetadataManager.getLock().releaseS3SecretLock(kerberosID); omMetadataManager.getLock().releaseS3SecretLock(kerberosID);
} }
LOG.trace("Secret for kerberosID:{},accessKey:{}, proto:{}", kerberosID, LOG.trace("Secret for accessKey:{}, proto:{}", kerberosID, result);
awsAccessKeyStr, result);
return result; return result;
} }
@Override @Override
public String getS3UserSecretString(String awsAccessKeyId) public String getS3UserSecretString(String kerberosID)
throws IOException { throws IOException {
Preconditions.checkArgument(Strings.isNotBlank(awsAccessKeyId), Preconditions.checkArgument(Strings.isNotBlank(kerberosID),
"awsAccessKeyId cannot be null or empty."); "awsAccessKeyId cannot be null or empty.");
LOG.trace("Get secret for awsAccessKey:{}", awsAccessKeyId); LOG.trace("Get secret for awsAccessKey:{}", kerberosID);
byte[] s3Secret; byte[] s3Secret;
omMetadataManager.getLock().acquireS3SecretLock(awsAccessKeyId); omMetadataManager.getLock().acquireS3SecretLock(kerberosID);
try { try {
s3Secret = omMetadataManager.getS3SecretTable() s3Secret = omMetadataManager.getS3SecretTable()
.get(awsAccessKeyId.getBytes(UTF_8)); .get(kerberosID.getBytes(UTF_8));
if (s3Secret == null) { if (s3Secret == null) {
throw new OzoneSecurityException("S3 secret not found for " + throw new OzoneSecurityException("S3 secret not found for " +
"awsAccessKeyId " + awsAccessKeyId, S3_SECRET_NOT_FOUND); "awsAccessKeyId " + kerberosID, S3_SECRET_NOT_FOUND);
} }
} finally { } finally {
omMetadataManager.getLock().releaseS3SecretLock(awsAccessKeyId); omMetadataManager.getLock().releaseS3SecretLock(kerberosID);
} }
return OzoneManagerProtocolProtos.S3Secret.parseFrom(s3Secret) return OzoneManagerProtocolProtos.S3Secret.parseFrom(s3Secret)

View File

@ -17,7 +17,6 @@
*/ */
package org.apache.hadoop.ozone.om.helpers; package org.apache.hadoop.ozone.om.helpers;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
/** /**
@ -26,12 +25,10 @@ import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
public class S3SecretValue { public class S3SecretValue {
private String kerberosID; private String kerberosID;
private String awsSecret; private String awsSecret;
private String awsAccessKey;
public S3SecretValue(String kerberosID, String awsSecret) { public S3SecretValue(String kerberosID, String awsSecret) {
this.kerberosID = kerberosID; this.kerberosID = kerberosID;
this.awsSecret = awsSecret; this.awsSecret = awsSecret;
this.awsAccessKey = DigestUtils.md5Hex(kerberosID);
} }
public String getKerberosID() { public String getKerberosID() {
@ -51,11 +48,7 @@ public class S3SecretValue {
} }
public String getAwsAccessKey() { public String getAwsAccessKey() {
return awsAccessKey; return kerberosID;
}
public void setAwsAccessKey(String awsAccessKey) {
this.awsAccessKey = awsAccessKey;
} }
public static S3SecretValue fromProtobuf( public static S3SecretValue fromProtobuf(
@ -72,6 +65,6 @@ public class S3SecretValue {
@Override @Override
public String toString() { public String toString() {
return "awsAccessKey=" + awsAccessKey + "\nawsSecret=" + awsSecret; return "awsAccessKey=" + kerberosID + "\nawsSecret=" + awsSecret;
} }
} }

View File

@ -81,6 +81,7 @@ import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo;
import org.apache.hadoop.ozone.om.helpers.OmMultipartCommitUploadPartInfo; import org.apache.hadoop.ozone.om.helpers.OmMultipartCommitUploadPartInfo;
import org.apache.hadoop.ozone.om.helpers.OmMultipartInfo; import org.apache.hadoop.ozone.om.helpers.OmMultipartInfo;
import org.apache.hadoop.ozone.om.helpers.OmMultipartUploadCompleteInfo; import org.apache.hadoop.ozone.om.helpers.OmMultipartUploadCompleteInfo;
import org.apache.hadoop.ozone.s3.util.OzoneS3Util;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils; import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
@ -92,6 +93,8 @@ import org.apache.commons.lang3.RandomUtils;
import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.either;
import org.junit.Assert; import org.junit.Assert;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
@ -288,6 +291,23 @@ public abstract class TestOzoneRpcClientAbstract {
Assert.assertTrue(volume.getCreationTime() >= currentTime); Assert.assertTrue(volume.getCreationTime() >= currentTime);
} }
@Test
public void testCreateSecureS3Bucket() throws IOException {
long currentTime = Time.now();
String userName = "ozone/localhost@EXAMPLE.COM";
String bucketName = UUID.randomUUID().toString();
String s3VolumeName = OzoneS3Util.getVolumeName(userName);
store.createS3Bucket(s3VolumeName, bucketName);
String volumeName = store.getOzoneVolumeName(bucketName);
assertEquals(volumeName, "s3" + s3VolumeName);
OzoneVolume volume = store.getVolume(volumeName);
OzoneBucket bucket = volume.getBucket(bucketName);
Assert.assertEquals(bucketName, bucket.getName());
Assert.assertTrue(bucket.getCreationTime() >= currentTime);
Assert.assertTrue(volume.getCreationTime() >= currentTime);
}
@Test @Test
public void testListS3Buckets() public void testListS3Buckets()

View File

@ -53,6 +53,8 @@ import org.apache.hadoop.ozone.s3.util.S3StorageType;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import static org.apache.hadoop.ozone.s3.util.OzoneS3Util.getVolumeName;
import static org.apache.hadoop.ozone.s3.util.S3Consts.ENCODING_TYPE; import static org.apache.hadoop.ozone.s3.util.S3Consts.ENCODING_TYPE;
import org.apache.http.HttpStatus; import org.apache.http.HttpStatus;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -196,9 +198,10 @@ public class BucketEndpoint extends EndpointBase {
public Response put(@PathParam("bucket") String bucketName, @Context public Response put(@PathParam("bucket") String bucketName, @Context
HttpHeaders httpHeaders) throws IOException, OS3Exception { HttpHeaders httpHeaders) throws IOException, OS3Exception {
String userName = getAuthenticationHeaderParser().getAccessKeyID(); String volumeName = getVolumeName(getAuthenticationHeaderParser().
getAccessKeyID());
String location = createS3Bucket(userName, bucketName); String location = createS3Bucket(volumeName, bucketName);
LOG.info("Location is {}", location); LOG.info("Location is {}", location);
return Response.status(HttpStatus.SC_OK).header("Location", location) return Response.status(HttpStatus.SC_OK).header("Location", location)

View File

@ -34,6 +34,8 @@ import org.apache.hadoop.ozone.s3.header.AuthenticationHeaderParser;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import static org.apache.hadoop.ozone.s3.util.OzoneS3Util.getVolumeName;
/** /**
* Top level rest endpoint. * Top level rest endpoint.
*/ */
@ -63,8 +65,9 @@ public class RootEndpoint extends EndpointBase {
.header("Location", "/static/") .header("Location", "/static/")
.build(); .build();
} }
String userName = authenticationHeaderParser.getAccessKeyID(); String volumeName = getVolumeName(authenticationHeaderParser.
Iterator<? extends OzoneBucket> bucketIterator = listS3Buckets(userName, getAccessKeyID());
Iterator<? extends OzoneBucket> bucketIterator = listS3Buckets(volumeName,
null); null);
while (bucketIterator.hasNext()) { while (bucketIterator.hasNext()) {

View File

@ -59,13 +59,25 @@ public class Credential {
@SuppressWarnings("StringSplitter") @SuppressWarnings("StringSplitter")
public void parseCredential() throws OS3Exception { public void parseCredential() throws OS3Exception {
String[] split = credential.split("/"); String[] split = credential.split("/");
if (split.length == 5) { switch (split.length) {
case 5:
// Ex: dkjad922329ddnks/20190321/us-west-1/s3/aws4_request
accessKeyID = split[0].trim(); accessKeyID = split[0].trim();
date = split[1].trim(); date = split[1].trim();
awsRegion = split[2].trim(); awsRegion = split[2].trim();
awsService = split[3].trim(); awsService = split[3].trim();
awsRequest = split[4].trim(); awsRequest = split[4].trim();
} else { return;
case 6:
// Access id is kerberos principal.
// Ex: testuser/om@EXAMPLE.COM/20190321/us-west-1/s3/aws4_request
accessKeyID = split[0] + "/" +split[1];
date = split[2].trim();
awsRegion = split[3].trim();
awsService = split[4].trim();
awsRequest = split[5].trim();
return;
default:
LOG.error("Credentials not in expected format. credential:{}", LOG.error("Credentials not in expected format. credential:{}",
credential); credential);
throw S3ErrorTable.newError(S3ErrorTable.MALFORMED_HEADER, credential); throw S3ErrorTable.newError(S3ErrorTable.MALFORMED_HEADER, credential);

View File

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.ozone.s3.util;
import org.apache.commons.codec.digest.DigestUtils;
import java.util.Objects;
/**
* Ozone util for S3 related operations.
*/
public final class OzoneS3Util {
private OzoneS3Util() {
}
public static String getVolumeName(String userName) {
Objects.requireNonNull(userName);
return DigestUtils.md5Hex(userName);
}
}

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.ozone.client.OzoneClientStub;
import org.apache.hadoop.ozone.s3.header.AuthenticationHeaderParser; import org.apache.hadoop.ozone.s3.header.AuthenticationHeaderParser;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import org.apache.hadoop.ozone.s3.util.OzoneS3Util;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -61,10 +62,11 @@ public class TestRootList {
ListBucketResponse response = ListBucketResponse response =
(ListBucketResponse) rootEndpoint.get().getEntity(); (ListBucketResponse) rootEndpoint.get().getEntity();
assertEquals(0, response.getBucketsNum()); assertEquals(0, response.getBucketsNum());
String volumeName = OzoneS3Util.getVolumeName(userName);
String bucketBaseName = "bucket-" + getClass().getName(); String bucketBaseName = "bucket-" + getClass().getName();
for(int i = 0; i < 10; i++) { for(int i = 0; i < 10; i++) {
objectStoreStub.createS3Bucket(userName, bucketBaseName + i); objectStoreStub.createS3Bucket(volumeName, bucketBaseName + i);
} }
response = (ListBucketResponse) rootEndpoint.get().getEntity(); response = (ListBucketResponse) rootEndpoint.get().getEntity();
assertEquals(10, response.getBucketsNum()); assertEquals(10, response.getBucketsNum());