parent
34b14061b3
commit
dcb0de848d
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hdds.security.x509;
|
|||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.ozone.OzoneConfigKeys;
|
||||
import org.apache.ratis.thirdparty.io.netty.handler.ssl.SslProvider;
|
||||
import org.bouncycastle.jce.provider.BouncyCastleProvider;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -32,6 +33,7 @@ import java.nio.file.Paths;
|
|||
import java.security.Provider;
|
||||
import java.security.Security;
|
||||
import java.time.Duration;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_KEY_ALGORITHM;
|
||||
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_KEY_LEN;
|
||||
|
@ -459,4 +461,14 @@ public class SecurityConfig {
|
|||
throw new SecurityException("Unknown security provider:" + provider);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns max date for which S3 tokens will be valid.
|
||||
* */
|
||||
public long getS3TokenMaxDate() {
|
||||
return getConfiguration().getTimeDuration(
|
||||
OzoneConfigKeys.OZONE_S3_TOKEN_MAX_LIFETIME_KEY,
|
||||
OzoneConfigKeys.OZONE_S3_TOKEN_MAX_LIFETIME_KEY_DEFAULT,
|
||||
TimeUnit.MICROSECONDS);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -373,7 +373,9 @@ public final class OzoneConfigKeys {
|
|||
"ozone.acl.enabled";
|
||||
public static final boolean OZONE_ACL_ENABLED_DEFAULT =
|
||||
false;
|
||||
|
||||
public static final String OZONE_S3_TOKEN_MAX_LIFETIME_KEY =
|
||||
"ozone.s3.token.max.lifetime";
|
||||
public static final String OZONE_S3_TOKEN_MAX_LIFETIME_KEY_DEFAULT = "3m";
|
||||
//For technical reasons this is unused and hardcoded to the
|
||||
// OzoneFileSystem.initialize.
|
||||
public static final String OZONE_FS_ISOLATED_CLASSLOADER =
|
||||
|
|
|
@ -27,4 +27,10 @@ import java.io.IOException;
|
|||
public interface S3SecretManager {
|
||||
|
||||
S3SecretValue getS3Secret(String kerberosID) throws IOException;
|
||||
|
||||
/**
|
||||
* API to get s3 secret for given awsAccessKey.
|
||||
* @param awsAccessKey
|
||||
* */
|
||||
String getS3UserSecretString(String awsAccessKey) throws IOException;
|
||||
}
|
|
@ -24,12 +24,16 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
|||
import org.apache.hadoop.ozone.OmUtils;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
|
||||
import org.apache.hadoop.ozone.security.OzoneSecurityException;
|
||||
import org.apache.logging.log4j.util.Strings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.apache.hadoop.ozone.security.OzoneSecurityException.ResultCodes.S3_SECRET_NOT_FOUND;
|
||||
|
||||
/**
|
||||
* S3 Secret manager.
|
||||
*/
|
||||
|
@ -58,7 +62,8 @@ public class S3SecretManagerImpl implements S3SecretManager {
|
|||
public S3SecretValue getS3Secret(String kerberosID) throws IOException {
|
||||
Preconditions.checkArgument(Strings.isNotBlank(kerberosID),
|
||||
"kerberosID cannot be null or empty.");
|
||||
byte[] awsAccessKey = OmUtils.getMD5Digest(kerberosID);
|
||||
String awsAccessKeyStr = DigestUtils.md5Hex(kerberosID);
|
||||
byte[] awsAccessKey = awsAccessKeyStr.getBytes(UTF_8);
|
||||
S3SecretValue result = null;
|
||||
omMetadataManager.getLock().acquireS3SecretLock(kerberosID);
|
||||
try {
|
||||
|
@ -73,10 +78,36 @@ public class S3SecretManagerImpl implements S3SecretManager {
|
|||
result = S3SecretValue.fromProtobuf(
|
||||
OzoneManagerProtocolProtos.S3Secret.parseFrom(s3Secret));
|
||||
}
|
||||
result.setAwsAccessKey(DigestUtils.md5Hex(awsAccessKey));
|
||||
result.setAwsAccessKey(awsAccessKeyStr);
|
||||
} finally {
|
||||
omMetadataManager.getLock().releaseS3SecretLock(kerberosID);
|
||||
}
|
||||
LOG.trace("Secret for kerberosID:{},accessKey:{}, proto:{}", kerberosID,
|
||||
awsAccessKeyStr, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getS3UserSecretString(String awsAccessKeyId)
|
||||
throws IOException {
|
||||
Preconditions.checkArgument(Strings.isNotBlank(awsAccessKeyId),
|
||||
"awsAccessKeyId cannot be null or empty.");
|
||||
LOG.trace("Get secret for awsAccessKey:{}", awsAccessKeyId);
|
||||
|
||||
byte[] s3Secret;
|
||||
omMetadataManager.getLock().acquireS3SecretLock(awsAccessKeyId);
|
||||
try {
|
||||
s3Secret = omMetadataManager.getS3SecretTable()
|
||||
.get(awsAccessKeyId.getBytes(UTF_8));
|
||||
if (s3Secret == null) {
|
||||
throw new OzoneSecurityException("S3 secret not found for " +
|
||||
"awsAccessKeyId " + awsAccessKeyId, S3_SECRET_NOT_FOUND);
|
||||
}
|
||||
} finally {
|
||||
omMetadataManager.getLock().releaseS3SecretLock(awsAccessKeyId);
|
||||
}
|
||||
|
||||
return OzoneManagerProtocolProtos.S3Secret.parseFrom(s3Secret)
|
||||
.getAwsSecret();
|
||||
}
|
||||
}
|
|
@ -185,5 +185,6 @@ public class OMException extends IOException {
|
|||
|
||||
INVALID_KMS_PROVIDER,
|
||||
|
||||
TOKEN_CREATION_ERROR
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,11 +18,8 @@
|
|||
package org.apache.hadoop.ozone.om.helpers;
|
||||
|
||||
import org.apache.commons.codec.digest.DigestUtils;
|
||||
import org.apache.hadoop.ozone.OmUtils;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* S3Secret to be saved in database.
|
||||
*/
|
||||
|
@ -31,11 +28,10 @@ public class S3SecretValue {
|
|||
private String awsSecret;
|
||||
private String awsAccessKey;
|
||||
|
||||
public S3SecretValue(String kerberosID, String awsSecret) throws IOException {
|
||||
public S3SecretValue(String kerberosID, String awsSecret) {
|
||||
this.kerberosID = kerberosID;
|
||||
this.awsSecret = awsSecret;
|
||||
this.awsAccessKey =
|
||||
DigestUtils.md5Hex(OmUtils.getMD5Digest(kerberosID));
|
||||
this.awsAccessKey = DigestUtils.md5Hex(kerberosID);
|
||||
}
|
||||
|
||||
public String getKerberosID() {
|
||||
|
@ -63,7 +59,7 @@ public class S3SecretValue {
|
|||
}
|
||||
|
||||
public static S3SecretValue fromProtobuf(
|
||||
OzoneManagerProtocolProtos.S3Secret s3Secret) throws IOException {
|
||||
OzoneManagerProtocolProtos.S3Secret s3Secret) {
|
||||
return new S3SecretValue(s3Secret.getKerberosID(), s3Secret.getAwsSecret());
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.ozone.security;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.kerby.util.Hex;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.crypto.Mac;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLDecoder;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
|
||||
/**
|
||||
* AWS v4 authentication payload validator. For more details refer to AWS
|
||||
* documentation https://docs.aws.amazon.com/general/latest/gr/
|
||||
* sigv4-create-canonical-request.html.
|
||||
**/
|
||||
final class AWSV4AuthValidator {
|
||||
|
||||
private final static Logger LOG =
|
||||
LoggerFactory.getLogger(AWSV4AuthValidator.class);
|
||||
private static final String HMAC_SHA256_ALGORITHM = "HmacSHA256";
|
||||
private static final Charset UTF_8 = Charset.forName("utf-8");
|
||||
|
||||
private AWSV4AuthValidator() {
|
||||
}
|
||||
|
||||
private static String urlDecode(String str) {
|
||||
try {
|
||||
return URLDecoder.decode(str, UTF_8.name());
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public static String hash(String payload) throws NoSuchAlgorithmException {
|
||||
MessageDigest md = MessageDigest.getInstance("SHA-256");
|
||||
md.update(payload.getBytes(UTF_8));
|
||||
return String.format("%064x", new java.math.BigInteger(1, md.digest()));
|
||||
}
|
||||
|
||||
private static byte[] sign(byte[] key, String msg) {
|
||||
try {
|
||||
SecretKeySpec signingKey = new SecretKeySpec(key, HMAC_SHA256_ALGORITHM);
|
||||
Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM);
|
||||
mac.init(signingKey);
|
||||
return mac.doFinal(msg.getBytes(StandardCharsets.UTF_8));
|
||||
} catch (GeneralSecurityException gse) {
|
||||
throw new RuntimeException(gse);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns signing key.
|
||||
*
|
||||
* @param key
|
||||
* @param strToSign
|
||||
*
|
||||
* SignatureKey = HMAC-SHA256(HMAC-SHA256(HMAC-SHA256(HMAC-SHA256("AWS4" +
|
||||
* "<YourSecretAccessKey>","20130524"),"us-east-1"),"s3"),"aws4_request")
|
||||
*
|
||||
* For more details refer to AWS documentation: https://docs.aws.amazon
|
||||
* .com/AmazonS3/latest/API/sig-v4-header-based-auth.html
|
||||
*
|
||||
* */
|
||||
private static byte[] getSigningKey(String key, String strToSign) {
|
||||
String[] signData = StringUtils.split(StringUtils.split(strToSign,
|
||||
'\n')[2], '/');
|
||||
String dateStamp = signData[0];
|
||||
String regionName = signData[1];
|
||||
String serviceName = signData[2];
|
||||
byte[] kDate = sign(("AWS4" + key).getBytes(UTF_8), dateStamp);
|
||||
byte[] kRegion = sign(kDate, regionName);
|
||||
byte[] kService = sign(kRegion, serviceName);
|
||||
byte[] kSigning = sign(kService, "aws4_request");
|
||||
LOG.info(Hex.encode(kSigning));
|
||||
return kSigning;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate request by comparing Signature from request. Returns true if
|
||||
* aws request is legit else returns false.
|
||||
* Signature = HEX(HMAC_SHA256(key, String to Sign))
|
||||
*
|
||||
* For more details refer to AWS documentation: https://docs.aws.amazon.com
|
||||
* /AmazonS3/latest/API/sigv4-streaming.html
|
||||
*/
|
||||
public static boolean validateRequest(String strToSign, String signature,
|
||||
String userKey) {
|
||||
String expectedSignature = Hex.encode(sign(getSigningKey(userKey,
|
||||
strToSign), strToSign));
|
||||
return expectedSignature.equals(signature);
|
||||
}
|
||||
}
|
|
@ -24,6 +24,7 @@ import org.apache.hadoop.hdds.security.x509.SecurityConfig;
|
|||
import org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient;
|
||||
import org.apache.hadoop.hdds.security.x509.exceptions.CertificateException;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.ozone.om.S3SecretManager;
|
||||
import org.apache.hadoop.ozone.om.exceptions.OMException;
|
||||
import org.apache.hadoop.ozone.security.OzoneSecretStore.OzoneManagerSecretState;
|
||||
import org.apache.hadoop.ozone.security.OzoneTokenIdentifier.TokenInfo;
|
||||
|
@ -43,7 +44,9 @@ import java.util.Iterator;
|
|||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.apache.hadoop.ozone.om.exceptions.OMException.ResultCodes.TOKEN_EXPIRED;
|
||||
import static org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type.S3TOKEN;
|
||||
|
||||
/**
|
||||
* SecretManager for Ozone Master. Responsible for signing identifiers with
|
||||
|
@ -58,6 +61,7 @@ public class OzoneDelegationTokenSecretManager
|
|||
.getLogger(OzoneDelegationTokenSecretManager.class);
|
||||
private final Map<OzoneTokenIdentifier, TokenInfo> currentTokens;
|
||||
private final OzoneSecretStore store;
|
||||
private final S3SecretManager s3SecretManager;
|
||||
private Thread tokenRemoverThread;
|
||||
private final long tokenRemoverScanInterval;
|
||||
private String omCertificateSerialId;
|
||||
|
@ -80,12 +84,14 @@ public class OzoneDelegationTokenSecretManager
|
|||
*/
|
||||
public OzoneDelegationTokenSecretManager(OzoneConfiguration conf,
|
||||
long tokenMaxLifetime, long tokenRenewInterval,
|
||||
long dtRemoverScanInterval, Text service) throws IOException {
|
||||
long dtRemoverScanInterval, Text service,
|
||||
S3SecretManager s3SecretManager) throws IOException {
|
||||
super(new SecurityConfig(conf), tokenMaxLifetime, tokenRenewInterval,
|
||||
service, LOG);
|
||||
currentTokens = new ConcurrentHashMap();
|
||||
this.tokenRemoverScanInterval = dtRemoverScanInterval;
|
||||
this.store = new OzoneSecretStore(conf);
|
||||
this.s3SecretManager = s3SecretManager;
|
||||
loadTokenSecretState(store.loadState());
|
||||
}
|
||||
|
||||
|
@ -279,6 +285,9 @@ public class OzoneDelegationTokenSecretManager
|
|||
@Override
|
||||
public byte[] retrievePassword(OzoneTokenIdentifier identifier)
|
||||
throws InvalidToken {
|
||||
if(identifier.getTokenType().equals(S3TOKEN)) {
|
||||
return validateS3Token(identifier);
|
||||
}
|
||||
return validateToken(identifier).getPassword();
|
||||
}
|
||||
|
||||
|
@ -286,7 +295,7 @@ public class OzoneDelegationTokenSecretManager
|
|||
* Checks if TokenInfo for the given identifier exists in database and if the
|
||||
* token is expired.
|
||||
*/
|
||||
public TokenInfo validateToken(OzoneTokenIdentifier identifier)
|
||||
private TokenInfo validateToken(OzoneTokenIdentifier identifier)
|
||||
throws InvalidToken {
|
||||
TokenInfo info = currentTokens.get(identifier);
|
||||
if (info == null) {
|
||||
|
@ -327,6 +336,37 @@ public class OzoneDelegationTokenSecretManager
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates if a S3 identifier is valid or not.
|
||||
* */
|
||||
private byte[] validateS3Token(OzoneTokenIdentifier identifier)
|
||||
throws InvalidToken {
|
||||
LOG.trace("Validating S3Token for identifier:{}", identifier);
|
||||
String awsSecret;
|
||||
try {
|
||||
awsSecret = s3SecretManager.getS3UserSecretString(identifier
|
||||
.getAwsAccessId());
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error while validating S3 identifier:{}",
|
||||
identifier, e);
|
||||
throw new InvalidToken("No S3 secret found for S3 identifier:"
|
||||
+ identifier);
|
||||
}
|
||||
|
||||
if (awsSecret == null) {
|
||||
throw new InvalidToken("No S3 secret found for S3 identifier:"
|
||||
+ identifier);
|
||||
}
|
||||
|
||||
if (AWSV4AuthValidator.validateRequest(identifier.getStrToSign(),
|
||||
identifier.getSignature(), awsSecret)) {
|
||||
return identifier.getSignature().getBytes(UTF_8);
|
||||
}
|
||||
throw new InvalidToken("Invalid S3 identifier:"
|
||||
+ identifier);
|
||||
|
||||
}
|
||||
|
||||
// TODO: handle roll private key/certificate
|
||||
private synchronized void removeExpiredKeys() {
|
||||
long now = Time.now();
|
||||
|
|
|
@ -99,6 +99,7 @@ public class OzoneSecurityException extends IOException {
|
|||
*/
|
||||
public enum ResultCodes {
|
||||
OM_PUBLIC_PRIVATE_KEY_FILE_NOT_EXIST,
|
||||
S3_SECRET_NOT_FOUND,
|
||||
SECRET_MANAGER_HMAC_ERROR
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,8 +28,11 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type;
|
||||
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
|
||||
|
||||
import static org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type.S3TOKEN;
|
||||
|
||||
/**
|
||||
* The token identifier for Ozone Master.
|
||||
*/
|
||||
|
@ -40,12 +43,17 @@ public class OzoneTokenIdentifier extends
|
|||
|
||||
public final static Text KIND_NAME = new Text("OzoneToken");
|
||||
private String omCertSerialId;
|
||||
private Type tokenType;
|
||||
private String awsAccessId;
|
||||
private String signature;
|
||||
private String strToSign;
|
||||
|
||||
/**
|
||||
* Create an empty delegation token identifier.
|
||||
*/
|
||||
public OzoneTokenIdentifier() {
|
||||
super();
|
||||
this.tokenType = Type.DELEGATION_TOKEN;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -57,6 +65,7 @@ public class OzoneTokenIdentifier extends
|
|||
*/
|
||||
public OzoneTokenIdentifier(Text owner, Text renewer, Text realUser) {
|
||||
super(owner, renewer, realUser);
|
||||
this.tokenType = Type.DELEGATION_TOKEN;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -75,16 +84,26 @@ public class OzoneTokenIdentifier extends
|
|||
*/
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
OMTokenProto token = OMTokenProto.newBuilder()
|
||||
OMTokenProto.Builder builder = OMTokenProto.newBuilder()
|
||||
.setMaxDate(getMaxDate())
|
||||
.setType(getTokenType())
|
||||
.setOwner(getOwner().toString())
|
||||
.setRealUser(getRealUser().toString())
|
||||
.setRenewer(getRenewer().toString())
|
||||
.setIssueDate(getIssueDate())
|
||||
.setMaxDate(getMaxDate())
|
||||
.setSequenceNumber(getSequenceNumber())
|
||||
.setMasterKeyId(getMasterKeyId())
|
||||
.setOmCertSerialId(getOmCertSerialId())
|
||||
.build();
|
||||
.setMasterKeyId(getMasterKeyId());
|
||||
|
||||
// Set s3 specific fields.
|
||||
if (getTokenType().equals(S3TOKEN)) {
|
||||
builder.setAccessKeyId(getAwsAccessId())
|
||||
.setSignature(getSignature())
|
||||
.setStrToSign(getStrToSign());
|
||||
} else {
|
||||
builder.setOmCertSerialId(getOmCertSerialId());
|
||||
}
|
||||
OMTokenProto token = builder.build();
|
||||
out.write(token.toByteArray());
|
||||
}
|
||||
|
||||
|
@ -97,6 +116,8 @@ public class OzoneTokenIdentifier extends
|
|||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
OMTokenProto token = OMTokenProto.parseFrom((DataInputStream) in);
|
||||
setTokenType(token.getType());
|
||||
setMaxDate(token.getMaxDate());
|
||||
setOwner(new Text(token.getOwner()));
|
||||
setRealUser(new Text(token.getRealUser()));
|
||||
setRenewer(new Text(token.getRenewer()));
|
||||
|
@ -105,6 +126,13 @@ public class OzoneTokenIdentifier extends
|
|||
setSequenceNumber(token.getSequenceNumber());
|
||||
setMasterKeyId(token.getMasterKeyId());
|
||||
setOmCertSerialId(token.getOmCertSerialId());
|
||||
|
||||
// Set s3 specific fields.
|
||||
if (getTokenType().equals(S3TOKEN)) {
|
||||
setAwsAccessId(token.getAccessKeyId());
|
||||
setSignature(token.getSignature());
|
||||
setStrToSign(token.getStrToSign());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -115,13 +143,22 @@ public class OzoneTokenIdentifier extends
|
|||
throws IOException {
|
||||
OMTokenProto token = OMTokenProto.parseFrom((DataInputStream) in);
|
||||
OzoneTokenIdentifier identifier = new OzoneTokenIdentifier();
|
||||
identifier.setRenewer(new Text(token.getRenewer()));
|
||||
identifier.setOwner(new Text(token.getOwner()));
|
||||
identifier.setRealUser(new Text(token.getRealUser()));
|
||||
identifier.setTokenType(token.getType());
|
||||
identifier.setMaxDate(token.getMaxDate());
|
||||
identifier.setIssueDate(token.getIssueDate());
|
||||
identifier.setSequenceNumber(token.getSequenceNumber());
|
||||
identifier.setMasterKeyId(token.getMasterKeyId());
|
||||
|
||||
// Set type specific fields.
|
||||
if (token.getType().equals(S3TOKEN)) {
|
||||
identifier.setSignature(token.getSignature());
|
||||
identifier.setStrToSign(token.getStrToSign());
|
||||
identifier.setAwsAccessId(token.getAccessKeyId());
|
||||
} else {
|
||||
identifier.setRenewer(new Text(token.getRenewer()));
|
||||
identifier.setOwner(new Text(token.getOwner()));
|
||||
identifier.setRealUser(new Text(token.getRealUser()));
|
||||
identifier.setIssueDate(token.getIssueDate());
|
||||
identifier.setSequenceNumber(token.getSequenceNumber());
|
||||
identifier.setMasterKeyId(token.getMasterKeyId());
|
||||
}
|
||||
identifier.setOmCertSerialId(token.getOmCertSerialId());
|
||||
return identifier;
|
||||
}
|
||||
|
@ -226,4 +263,53 @@ public class OzoneTokenIdentifier extends
|
|||
public void setOmCertSerialId(String omCertSerialId) {
|
||||
this.omCertSerialId = omCertSerialId;
|
||||
}
|
||||
|
||||
public Type getTokenType() {
|
||||
return tokenType;
|
||||
}
|
||||
|
||||
public void setTokenType(Type tokenType) {
|
||||
this.tokenType = tokenType;
|
||||
}
|
||||
|
||||
public String getAwsAccessId() {
|
||||
return awsAccessId;
|
||||
}
|
||||
|
||||
public void setAwsAccessId(String awsAccessId) {
|
||||
this.awsAccessId = awsAccessId;
|
||||
}
|
||||
|
||||
public String getSignature() {
|
||||
return signature;
|
||||
}
|
||||
|
||||
public void setSignature(String signature) {
|
||||
this.signature = signature;
|
||||
}
|
||||
|
||||
public String getStrToSign() {
|
||||
return strToSign;
|
||||
}
|
||||
|
||||
public void setStrToSign(String strToSign) {
|
||||
this.strToSign = strToSign;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder buffer = new StringBuilder();
|
||||
buffer.append(getKind())
|
||||
.append(" owner=").append(getOwner())
|
||||
.append(", renewer=").append(getRenewer())
|
||||
.append(", realUser=").append(getRealUser())
|
||||
.append(", issueDate=").append(getIssueDate())
|
||||
.append(", maxDate=").append(getMaxDate())
|
||||
.append(", sequenceNumber=").append(getSequenceNumber())
|
||||
.append(", masterKeyId=").append(getMasterKeyId())
|
||||
.append(", strToSign=").append(getStrToSign())
|
||||
.append(", signature=").append(getSignature())
|
||||
.append(", awsAccessKeyId=").append(getAwsAccessId());
|
||||
return buffer.toString();
|
||||
}
|
||||
}
|
|
@ -228,6 +228,7 @@ enum Status {
|
|||
BUCKET_ENCRYPTION_KEY_NOT_FOUND = 40;
|
||||
UNKNOWN_CIPHER_SUITE = 41;
|
||||
INVALID_KMS_PROVIDER = 42;
|
||||
TOKEN_CREATION_ERROR = 43;
|
||||
|
||||
}
|
||||
|
||||
|
@ -567,16 +568,24 @@ message DeleteKeyResponse {
|
|||
}
|
||||
|
||||
message OMTokenProto {
|
||||
optional uint32 version = 1;
|
||||
optional string owner = 2;
|
||||
optional string renewer = 3;
|
||||
optional string realUser = 4;
|
||||
optional uint64 issueDate = 5;
|
||||
optional uint64 maxDate = 6;
|
||||
optional uint32 sequenceNumber = 7;
|
||||
optional uint32 masterKeyId = 8;
|
||||
optional uint64 expiryDate = 9;
|
||||
required string omCertSerialId = 10;
|
||||
enum Type {
|
||||
DELEGATION_TOKEN = 1;
|
||||
S3TOKEN = 2;
|
||||
};
|
||||
required Type type = 1;
|
||||
optional uint32 version = 2;
|
||||
optional string owner = 3;
|
||||
optional string renewer = 4;
|
||||
optional string realUser = 5;
|
||||
optional uint64 issueDate = 6;
|
||||
optional uint64 maxDate = 7;
|
||||
optional uint32 sequenceNumber = 8;
|
||||
optional uint32 masterKeyId = 9;
|
||||
optional uint64 expiryDate = 10;
|
||||
optional string omCertSerialId = 11;
|
||||
optional string accessKeyId = 12;
|
||||
optional string signature = 13;
|
||||
optional string strToSign = 14;
|
||||
}
|
||||
|
||||
message SecretKeyProto {
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.ozone.security;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
* Test for {@link AWSV4AuthValidator}.
|
||||
* */
|
||||
@RunWith(Parameterized.class)
|
||||
public class TestAWSV4AuthValidator {
|
||||
|
||||
private String strToSign;
|
||||
private String signature;
|
||||
private String awsAccessKey;
|
||||
|
||||
public TestAWSV4AuthValidator(String strToSign, String signature,
|
||||
String awsAccessKey) {
|
||||
this.strToSign = strToSign;
|
||||
this.signature = signature;
|
||||
this.awsAccessKey = awsAccessKey;
|
||||
}
|
||||
|
||||
@Parameterized.Parameters
|
||||
public static Collection<Object[]> data() {
|
||||
return Arrays.asList(new Object[][]{
|
||||
{
|
||||
"AWS4-HMAC-SHA256\n" +
|
||||
"20190221T002037Z\n" +
|
||||
"20190221/us-west-1/s3/aws4_request\n" +
|
||||
"c297c080cce4e0927779823d3fd1f5cae71481a8f7dfc7e18d" +
|
||||
"91851294efc47d",
|
||||
"56ec73ba1974f8feda8365c3caef89c5d4a688d5f9baccf" +
|
||||
"4765f46a14cd745ad",
|
||||
"dbaksbzljandlkandlsd"
|
||||
},
|
||||
{
|
||||
"AWS4-HMAC-SHA256\n" +
|
||||
"20150830T123600Z\n" +
|
||||
"20150830/us-east-1/iam/aws4_request\n" +
|
||||
"f536975d06c0309214f805bb90ccff089219ecd68b2" +
|
||||
"577efef23edd43b7e1a59",
|
||||
"5d672d79c15b13162d9279b0855cfba" +
|
||||
"6789a8edb4c82c400e06b5924a6f2b5d7",
|
||||
"wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY"
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateRequest() {
|
||||
assertTrue(AWSV4AuthValidator.validateRequest(strToSign, signature,
|
||||
awsAccessKey));
|
||||
}
|
||||
}
|
|
@ -25,8 +25,11 @@ import org.apache.hadoop.hdds.security.x509.SecurityConfig;
|
|||
import org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient;
|
||||
import org.apache.hadoop.hdds.security.x509.certificate.client.OMCertificateClient;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.ozone.om.S3SecretManager;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.security.AccessControlException;
|
||||
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
|
||||
import org.apache.hadoop.security.token.SecretManager;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.test.LambdaTestUtils;
|
||||
|
@ -43,6 +46,11 @@ import java.security.PrivateKey;
|
|||
import java.security.PublicKey;
|
||||
import java.security.Signature;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type.S3TOKEN;
|
||||
|
||||
|
||||
/**
|
||||
* Test class for {@link OzoneDelegationTokenSecretManager}.
|
||||
|
@ -60,6 +68,8 @@ public class TestOzoneDelegationTokenSecretManager {
|
|||
private final static Text TEST_USER = new Text("testUser");
|
||||
private long tokenMaxLifetime = 1000 * 20;
|
||||
private long tokenRemoverScanInterval = 1000 * 20;
|
||||
private S3SecretManager s3SecretManager;
|
||||
private String s3Secret = "dbaksbzljandlkandlsd";
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
|
@ -70,6 +80,26 @@ public class TestOzoneDelegationTokenSecretManager {
|
|||
certificateClient.init();
|
||||
expiryTime = Time.monotonicNow() + 60 * 60 * 24;
|
||||
serviceRpcAdd = new Text("localhost");
|
||||
final Map<String, String> s3Secrets = new HashMap<>();
|
||||
s3Secrets.put("testuser1", s3Secret);
|
||||
s3Secrets.put("abc", "djakjahkd");
|
||||
s3SecretManager = new S3SecretManager() {
|
||||
@Override
|
||||
public S3SecretValue getS3Secret(String kerberosID) {
|
||||
if(s3Secrets.containsKey(kerberosID)) {
|
||||
return new S3SecretValue(kerberosID, s3Secrets.get(kerberosID));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getS3UserSecretString(String awsAccessKey) {
|
||||
if(s3Secrets.containsKey(awsAccessKey)) {
|
||||
return s3Secrets.get(awsAccessKey);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -250,6 +280,66 @@ public class TestOzoneDelegationTokenSecretManager {
|
|||
certificateClient.signData(id.getBytes())));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateS3TOKENSuccess() throws Exception {
|
||||
secretManager = createSecretManager(conf, tokenMaxLifetime,
|
||||
expiryTime, tokenRemoverScanInterval);
|
||||
secretManager.start(certificateClient);
|
||||
|
||||
OzoneTokenIdentifier identifier = new OzoneTokenIdentifier();
|
||||
identifier.setTokenType(S3TOKEN);
|
||||
identifier.setSignature("56ec73ba1974f8feda8365c3caef89c5d4a688d" +
|
||||
"5f9baccf4765f46a14cd745ad");
|
||||
identifier.setStrToSign("AWS4-HMAC-SHA256\n" +
|
||||
"20190221T002037Z\n" +
|
||||
"20190221/us-west-1/s3/aws4_request\n" +
|
||||
"c297c080cce4e0927779823d3fd1f5cae71481a8f7dfc7e18d91851294efc47d");
|
||||
identifier.setAwsAccessId("testuser1");
|
||||
secretManager.retrievePassword(identifier);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateS3TOKENFailure() throws Exception {
|
||||
secretManager = createSecretManager(conf, tokenMaxLifetime,
|
||||
expiryTime, tokenRemoverScanInterval);
|
||||
secretManager.start(certificateClient);
|
||||
|
||||
OzoneTokenIdentifier identifier = new OzoneTokenIdentifier();
|
||||
identifier.setTokenType(S3TOKEN);
|
||||
identifier.setSignature("56ec73ba1974f8feda8365c3caef89c5d4a688d" +
|
||||
"5f9baccf4765f46a14cd745ad");
|
||||
identifier.setStrToSign("AWS4-HMAC-SHA256\n" +
|
||||
"20190221T002037Z\n" +
|
||||
"20190221/us-west-1/s3/aws4_request\n" +
|
||||
"c297c080cce4e0927779823d3fd1f5cae71481a8f7dfc7e18d91851294efc47d");
|
||||
identifier.setAwsAccessId("testuser2");
|
||||
// Case 1: User don't have aws secret set.
|
||||
LambdaTestUtils.intercept(SecretManager.InvalidToken.class, " No S3 " +
|
||||
"secret found for S3 identifier",
|
||||
() -> secretManager.retrievePassword(identifier));
|
||||
|
||||
// Case 2: Invalid hash in string to sign.
|
||||
identifier.setStrToSign("AWS4-HMAC-SHA256\n" +
|
||||
"20190221T002037Z\n" +
|
||||
"20190221/us-west-1/s3/aws4_request\n" +
|
||||
"c297c080cce4e0927779823d3fd1f5cae71481a8f7dfc7e18d91851294efc47d" +
|
||||
"+invalidhash");
|
||||
LambdaTestUtils.intercept(SecretManager.InvalidToken.class, " No S3 " +
|
||||
"secret found for S3 identifier",
|
||||
() -> secretManager.retrievePassword(identifier));
|
||||
|
||||
// Case 3: Invalid hash in authorization hmac.
|
||||
identifier.setSignature("56ec73ba1974f8feda8365c3caef89c5d4a688d" +
|
||||
"+invalidhash" + "5f9baccf4765f46a14cd745ad");
|
||||
identifier.setStrToSign("AWS4-HMAC-SHA256\n" +
|
||||
"20190221T002037Z\n" +
|
||||
"20190221/us-west-1/s3/aws4_request\n" +
|
||||
"c297c080cce4e0927779823d3fd1f5cae71481a8f7dfc7e18d91851294efc47d");
|
||||
LambdaTestUtils.intercept(SecretManager.InvalidToken.class, " No S3 " +
|
||||
"secret found for S3 identifier",
|
||||
() -> secretManager.retrievePassword(identifier));
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate hash using public key of KeyPair.
|
||||
*/
|
||||
|
@ -269,6 +359,6 @@ public class TestOzoneDelegationTokenSecretManager {
|
|||
createSecretManager(OzoneConfiguration config, long tokenMaxLife,
|
||||
long expiry, long tokenRemoverScanTime) throws IOException {
|
||||
return new OzoneDelegationTokenSecretManager(config, tokenMaxLife,
|
||||
expiry, tokenRemoverScanTime, serviceRpcAdd);
|
||||
expiry, tokenRemoverScanTime, serviceRpcAdd, s3SecretManager);
|
||||
}
|
||||
}
|
|
@ -56,6 +56,16 @@ services:
|
|||
env_file:
|
||||
- docker-config
|
||||
command: ["/opt/hadoop/bin/ozone","om"]
|
||||
s3g:
|
||||
image: apache/hadoop-runner
|
||||
hostname: s3g
|
||||
volumes:
|
||||
- ../..:/opt/hadoop
|
||||
ports:
|
||||
- 9878:9878
|
||||
env_file:
|
||||
- ./docker-config
|
||||
command: ["/opt/hadoop/bin/ozone","s3g"]
|
||||
scm:
|
||||
image: apache/hadoop-runner
|
||||
hostname: scm
|
||||
|
|
|
@ -29,6 +29,9 @@ OZONE-SITE.XML_hdds.scm.kerberos.principal=scm/scm@EXAMPLE.COM
|
|||
OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab
|
||||
OZONE-SITE.XML_ozone.om.kerberos.principal=om/om@EXAMPLE.COM
|
||||
OZONE-SITE.XML_ozone.om.kerberos.keytab.file=/etc/security/keytabs/om.keytab
|
||||
OZONE-SITE.XML_ozone.s3g.keytab.file=/etc/security/keytabs/HTTP.keytab
|
||||
OZONE-SITE.XML_ozone.s3g.authentication.kerberos.principal=HTTP/s3g@EXAMPLE.COM
|
||||
|
||||
OZONE-SITE.XML_ozone.security.enabled=true
|
||||
OZONE-SITE.XML_hdds.scm.http.kerberos.principal=HTTP/scm@EXAMPLE.COM
|
||||
OZONE-SITE.XML_hdds.scm.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
|
||||
|
@ -61,6 +64,7 @@ LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
|||
LOG4J.PROPERTIES_log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
|
||||
LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
|
||||
LOG4J.PROPERTIES_log4j.logger.org.apache.ratis.conf.ConfUtils=WARN
|
||||
LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop=INFO
|
||||
LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.security.ShellBasedUnixGroupsMapping=ERROR
|
||||
|
||||
#Enable this variable to print out all hadoop rpc traffic to the stdout. See http://byteman.jboss.org/ to define your own instrumentation.
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
Resource ../commonlib.robot
|
||||
|
||||
*** Variables ***
|
||||
${OZONE_S3_HEADER_VERSION} v2
|
||||
${OZONE_S3_HEADER_VERSION} v4
|
||||
${OZONE_S3_SET_CREDENTIALS} true
|
||||
|
||||
*** Keywords ***
|
||||
|
|
|
@ -16,8 +16,35 @@
|
|||
*** Settings ***
|
||||
Documentation Smoke test to start cluster with docker-compose environments.
|
||||
Library OperatingSystem
|
||||
Library String
|
||||
Resource ../commonlib.robot
|
||||
|
||||
*** Variables ***
|
||||
${ENDPOINT_URL} http://s3g:9878
|
||||
|
||||
*** Keywords ***
|
||||
Install aws cli s3 centos
|
||||
Execute sudo yum install -y awscli
|
||||
Install aws cli s3 debian
|
||||
Execute sudo apt-get install -y awscli
|
||||
|
||||
Install aws cli
|
||||
${rc} ${output} = Run And Return Rc And Output which apt-get
|
||||
Run Keyword if '${rc}' == '0' Install aws cli s3 debian
|
||||
${rc} ${output} = Run And Return Rc And Output yum --help
|
||||
Run Keyword if '${rc}' == '0' Install aws cli s3 centos
|
||||
|
||||
Setup credentials
|
||||
${hostname}= Execute hostname
|
||||
Execute kinit -k testuser/${hostname}@EXAMPLE.COM -t /etc/security/keytabs/testuser.keytab
|
||||
${result} = Execute ozone sh s3 getsecret
|
||||
${accessKey} = Get Regexp Matches ${result} (?<=awsAccessKey=).*
|
||||
${secret} = Get Regexp Matches ${result} (?<=awsSecret=).*
|
||||
Execute aws configure set default.s3.signature_version s3v4
|
||||
Execute aws configure set aws_access_key_id ${accessKey[0]}
|
||||
Execute aws configure set aws_secret_access_key ${secret[0]}
|
||||
Execute aws configure set region us-west-1
|
||||
|
||||
*** Test Cases ***
|
||||
Create volume and bucket
|
||||
${rc} ${output} = Run And Return Rc And Output ozone sh volume create o3://om/fstest --user bilbo --quota 100TB --root
|
||||
|
@ -95,8 +122,11 @@ Run ozoneFS tests
|
|||
|
||||
Execute ozone fs -mkdir -p o3fs://bucket2.fstest/testdir2
|
||||
Execute ozone fs -mkdir -p o3fs://bucket3.fstest2/testdir3
|
||||
|
||||
Execute ozone fs -cp o3fs://bucket1.fstest/testdir1/localdir1 o3fs://bucket2.fstest/testdir2/
|
||||
|
||||
Execute ozone fs -cp o3fs://bucket1.fstest/testdir1/localdir1 o3fs://bucket3.fstest2/testdir3/
|
||||
|
||||
Execute ozone sh key put o3://om/fstest/bucket1/KEY.txt NOTICE.txt
|
||||
${result} = Execute ozone fs -ls o3fs://bucket1.fstest/KEY.txt
|
||||
Should contain ${result} KEY.txt
|
||||
|
@ -108,5 +138,16 @@ Run ozoneFS tests
|
|||
Execute ls -l GET.txt
|
||||
${rc} ${result} = Run And Return Rc And Output ozone fs -ls o3fs://abcde.pqrs/
|
||||
Should Be Equal As Integers ${rc} 1
|
||||
Should contain ${result} not found
|
||||
|
||||
|
||||
Secure S3 test Failure
|
||||
Run Keyword Install aws cli
|
||||
${rc} ${result} = Run And Return Rc And Output aws s3api --endpoint-url ${ENDPOINT_URL} create-bucket --bucket bucket-test123
|
||||
Should Be True ${rc} > 0
|
||||
|
||||
Secure S3 test Success
|
||||
Run Keyword Setup credentials
|
||||
${output} = Execute aws s3api --endpoint-url ${ENDPOINT_URL} create-bucket --bucket bucket-test123
|
||||
Should contain ${result} Volume pqrs is not found
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@ execute_tests(){
|
|||
OUTPUT_NAME="$COMPOSE_DIR-${TEST//\//_}"
|
||||
docker-compose -f "$COMPOSE_FILE" exec -T om python -m robot --log NONE --report NONE "${OZONE_ROBOT_OPTS[@]}" --output "smoketest/$RESULT_DIR/robot-$OUTPUT_NAME.xml" --logtitle "$TITLE" --reporttitle "$TITLE" "smoketest/$TEST"
|
||||
set -e
|
||||
docker-compose -f "$COMPOSE_FILE" logs > "$DIR/$RESULT_DIR/docker-$OUTPUT_NAME.log"
|
||||
docker-compose -f "$COMPOSE_FILE" logs > "$DIR/$RESULT_DIR/docker-$OUTPUT_NAME.log"
|
||||
done
|
||||
if [ "$KEEP_RUNNING" = false ]; then
|
||||
docker-compose -f "$COMPOSE_FILE" down
|
||||
|
|
|
@ -49,5 +49,7 @@ public class TestOzoneConfigurationFields extends TestConfigurationFieldsBase {
|
|||
configurationPropsToSkipCompare.add(HddsConfigKeys.HDDS_SECURITY_PROVIDER);
|
||||
configurationPropsToSkipCompare.add(HddsConfigKeys.HDDS_GRPC_TLS_TEST_CERT);
|
||||
configurationPropsToSkipCompare.add(OMConfigKeys.OZONE_OM_NODES_KEY);
|
||||
configurationPropsToSkipCompare.add(OzoneConfigKeys.
|
||||
OZONE_S3_TOKEN_MAX_LIFETIME_KEY);
|
||||
}
|
||||
}
|
|
@ -68,6 +68,7 @@ import org.apache.hadoop.ozone.om.OMConfigKeys;
|
|||
import org.apache.hadoop.ozone.om.OMStorage;
|
||||
import org.apache.hadoop.ozone.om.OzoneManager;
|
||||
import org.apache.hadoop.ozone.om.exceptions.OMException;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.ozone.om.protocolPB.OzoneManagerProtocolClientSideTranslatorPB;
|
||||
import org.apache.hadoop.ozone.om.protocolPB.OzoneManagerProtocolPB;
|
||||
import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
|
||||
|
@ -659,6 +660,50 @@ public final class TestSecureOzoneCluster {
|
|||
om = OzoneManager.createOm(null, config);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetS3Secret() throws Exception {
|
||||
|
||||
// Setup secure OM for start
|
||||
setupOm(conf);
|
||||
long omVersion =
|
||||
RPC.getProtocolVersion(OzoneManagerProtocolPB.class);
|
||||
try {
|
||||
// Start OM
|
||||
om.setCertClient(new CertificateClientTestImpl(conf));
|
||||
om.start();
|
||||
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
|
||||
String username = ugi.getUserName();
|
||||
|
||||
// Get first OM client which will authenticate via Kerberos
|
||||
omClient = new OzoneManagerProtocolClientSideTranslatorPB(
|
||||
RPC.getProxy(OzoneManagerProtocolPB.class, omVersion,
|
||||
OmUtils.getOmAddress(conf), ugi, conf,
|
||||
NetUtils.getDefaultSocketFactory(conf),
|
||||
CLIENT_TIMEOUT), RandomStringUtils.randomAscii(5));
|
||||
|
||||
//Creates a secret since it does not exist
|
||||
S3SecretValue firstAttempt = omClient
|
||||
.getS3Secret("HADOOP/JOHNDOE");
|
||||
|
||||
//Fetches the secret from db since it was created in previous step
|
||||
S3SecretValue secondAttempt = omClient
|
||||
.getS3Secret("HADOOP/JOHNDOE");
|
||||
|
||||
//secret fetched on both attempts must be same
|
||||
Assert.assertTrue(firstAttempt.getAwsSecret()
|
||||
.equals(secondAttempt.getAwsSecret()));
|
||||
|
||||
//access key fetched on both attempts must be same
|
||||
Assert.assertTrue(firstAttempt.getAwsAccessKey()
|
||||
.equals(secondAttempt.getAwsAccessKey()));
|
||||
|
||||
} finally {
|
||||
if(om != null){
|
||||
om.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test functionality to get SCM signed certificate for OM.
|
||||
*/
|
||||
|
|
|
@ -20,11 +20,8 @@ package org.apache.hadoop.ozone.client.rpc;
|
|||
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -55,23 +52,4 @@ public class TestOzoneRpcClient extends TestOzoneRpcClientAbstract {
|
|||
public static void shutdown() throws IOException {
|
||||
shutdownCluster();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetS3Secret() throws IOException {
|
||||
//Creates a secret since it does not exist
|
||||
S3SecretValue firstAttempt = TestOzoneRpcClient.getStore()
|
||||
.getS3Secret("HADOOP/JOHNDOE");
|
||||
|
||||
//Fetches the secret from db since it was created in previous step
|
||||
S3SecretValue secondAttempt = TestOzoneRpcClient.getStore()
|
||||
.getS3Secret("HADOOP/JOHNDOE");
|
||||
|
||||
//secret fetched on both attempts must be same
|
||||
Assert.assertTrue(firstAttempt.getAwsSecret()
|
||||
.equals(secondAttempt.getAwsSecret()));
|
||||
|
||||
//access key fetched on both attempts must be same
|
||||
Assert.assertTrue(firstAttempt.getAwsAccessKey()
|
||||
.equals(secondAttempt.getAwsAccessKey()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -87,6 +87,7 @@ import static org.junit.Assert.assertTrue;
|
|||
import static org.junit.Assert.fail;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.Timeout;
|
||||
|
@ -1213,6 +1214,7 @@ public class TestOzoneShell {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Ignore("Can't run without secure cluster.")
|
||||
public void testS3Secret() throws Exception {
|
||||
String setOmAddress =
|
||||
"--set=" + OZONE_OM_ADDRESS_KEY + "=" + getOmAddress();
|
||||
|
|
|
@ -218,7 +218,7 @@ public final class OzoneManager extends ServiceRuntimeInfoImpl
|
|||
+ StartupOption.HELP.getName() + " ]\n";
|
||||
private static final String OM_DAEMON = "om";
|
||||
private static boolean securityEnabled = false;
|
||||
private static OzoneDelegationTokenSecretManager delegationTokenMgr;
|
||||
private OzoneDelegationTokenSecretManager delegationTokenMgr;
|
||||
private OzoneBlockTokenSecretManager blockTokenMgr;
|
||||
private KeyPair keyPair;
|
||||
private CertificateClient certClient;
|
||||
|
@ -257,7 +257,7 @@ public final class OzoneManager extends ServiceRuntimeInfoImpl
|
|||
private final IAccessAuthorizer accessAuthorizer;
|
||||
private JvmPauseMonitor jvmPauseMonitor;
|
||||
private final SecurityConfig secConfig;
|
||||
private final S3SecretManager s3SecretManager;
|
||||
private S3SecretManager s3SecretManager;
|
||||
private volatile boolean isOmRpcServerRunning = false;
|
||||
private String omComponent;
|
||||
|
||||
|
@ -305,18 +305,7 @@ public final class OzoneManager extends ServiceRuntimeInfoImpl
|
|||
omRpcAddressTxt = new Text(omNodeDetails.getRpcAddressString());
|
||||
|
||||
secConfig = new SecurityConfig(configuration);
|
||||
if (secConfig.isSecurityEnabled()) {
|
||||
omComponent = OM_DAEMON + "-" + omId;
|
||||
certClient = new OMCertificateClient(new SecurityConfig(conf));
|
||||
delegationTokenMgr = createDelegationTokenSecretManager(configuration);
|
||||
}
|
||||
if (secConfig.isBlockTokenEnabled()) {
|
||||
blockTokenMgr = createBlockTokenSecretManager(configuration);
|
||||
}
|
||||
|
||||
omRpcServer = getRpcServer(conf);
|
||||
omRpcAddress = updateRPCListenAddress(configuration,
|
||||
OZONE_OM_ADDRESS_KEY, omNodeRpcAddr, omRpcServer);
|
||||
metadataManager = new OmMetadataManagerImpl(configuration);
|
||||
volumeManager = new VolumeManagerImpl(metadataManager, configuration);
|
||||
|
||||
|
@ -333,9 +322,20 @@ public final class OzoneManager extends ServiceRuntimeInfoImpl
|
|||
|
||||
s3BucketManager = new S3BucketManagerImpl(configuration, metadataManager,
|
||||
volumeManager, bucketManager);
|
||||
if (secConfig.isSecurityEnabled()) {
|
||||
omComponent = OM_DAEMON + "-" + omId;
|
||||
certClient = new OMCertificateClient(new SecurityConfig(conf));
|
||||
s3SecretManager = new S3SecretManagerImpl(configuration, metadataManager);
|
||||
delegationTokenMgr = createDelegationTokenSecretManager(configuration);
|
||||
}
|
||||
if (secConfig.isBlockTokenEnabled()) {
|
||||
blockTokenMgr = createBlockTokenSecretManager(configuration);
|
||||
}
|
||||
omRpcServer = getRpcServer(conf);
|
||||
omRpcAddress = updateRPCListenAddress(configuration,
|
||||
OZONE_OM_ADDRESS_KEY, omNodeRpcAddr, omRpcServer);
|
||||
keyManager = new KeyManagerImpl(scmBlockClient, metadataManager,
|
||||
configuration, omStorage.getOmId(), blockTokenMgr, getKmsProvider());
|
||||
s3SecretManager = new S3SecretManagerImpl(configuration, metadataManager);
|
||||
|
||||
shutdownHook = () -> {
|
||||
saveOmMetrics();
|
||||
|
@ -601,7 +601,8 @@ public final class OzoneManager extends ServiceRuntimeInfoImpl
|
|||
TimeUnit.MILLISECONDS);
|
||||
|
||||
return new OzoneDelegationTokenSecretManager(conf, tokenMaxLifetime,
|
||||
tokenRenewInterval, tokenRemoverScanInterval, omRpcAddressTxt);
|
||||
tokenRenewInterval, tokenRemoverScanInterval, omRpcAddressTxt,
|
||||
s3SecretManager);
|
||||
}
|
||||
|
||||
private OzoneBlockTokenSecretManager createBlockTokenSecretManager(
|
||||
|
@ -811,7 +812,7 @@ public final class OzoneManager extends ServiceRuntimeInfoImpl
|
|||
* @return RPC server
|
||||
* @throws IOException if there is an I/O error while creating RPC server
|
||||
*/
|
||||
private static RPC.Server startRpcServer(OzoneConfiguration conf,
|
||||
private RPC.Server startRpcServer(OzoneConfiguration conf,
|
||||
InetSocketAddress addr, Class<?> protocol, BlockingService instance,
|
||||
int handlerCount) throws IOException {
|
||||
RPC.Server rpcServer = new RPC.Builder(conf)
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.ozone.s3;
|
||||
|
||||
import java.nio.charset.Charset;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
|
||||
/*
|
||||
* Parser to request auth parser for http request.
|
||||
* */
|
||||
interface AWSAuthParser {
|
||||
|
||||
String UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD";
|
||||
String NEWLINE = "\n";
|
||||
String CONTENT_TYPE = "content-type";
|
||||
String X_AMAZ_DATE = "X-Amz-Date";
|
||||
String CONTENT_MD5 = "content-md5";
|
||||
String AUTHORIZATION_HEADER = "Authorization";
|
||||
Charset UTF_8 = Charset.forName("utf-8");
|
||||
String X_AMZ_CONTENT_SHA256 = "X-Amz-Content-SHA256";
|
||||
String HOST = "host";
|
||||
|
||||
String AWS4_TERMINATOR = "aws4_request";
|
||||
|
||||
String AWS4_SIGNING_ALGORITHM = "AWS4-HMAC-SHA256";
|
||||
|
||||
/**
|
||||
* Seconds in a week, which is the max expiration time Sig-v4 accepts.
|
||||
*/
|
||||
long PRESIGN_URL_MAX_EXPIRATION_SECONDS =
|
||||
60 * 60 * 24 * 7;
|
||||
|
||||
String X_AMZ_SECURITY_TOKEN = "X-Amz-Security-Token";
|
||||
|
||||
String X_AMZ_CREDENTIAL = "X-Amz-Credential";
|
||||
|
||||
String X_AMZ_DATE = "X-Amz-Date";
|
||||
|
||||
String X_AMZ_EXPIRES = "X-Amz-Expires";
|
||||
|
||||
String X_AMZ_SIGNED_HEADER = "X-Amz-SignedHeaders";
|
||||
|
||||
String X_AMZ_SIGNATURE = "X-Amz-Signature";
|
||||
|
||||
String X_AMZ_ALGORITHM = "X-Amz-Algorithm";
|
||||
|
||||
String AUTHORIZATION = "Authorization";
|
||||
|
||||
String HOST_HEADER = "Host";
|
||||
|
||||
DateTimeFormatter DATE_FORMATTER =
|
||||
DateTimeFormatter.ofPattern("yyyyMMdd");
|
||||
|
||||
DateTimeFormatter TIME_FORMATTER =
|
||||
DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'")
|
||||
.withZone(ZoneOffset.UTC);
|
||||
|
||||
/**
|
||||
* API to return string to sign.
|
||||
*/
|
||||
String getStringToSign() throws Exception;
|
||||
}
|
|
@ -0,0 +1,300 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.ozone.s3;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
|
||||
import org.apache.hadoop.ozone.s3.header.AuthorizationHeaderV4;
|
||||
import org.apache.hadoop.ozone.s3.header.Credential;
|
||||
import org.apache.kerby.util.Hex;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.ws.rs.container.ContainerRequestContext;
|
||||
import javax.ws.rs.core.MultivaluedMap;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URLEncoder;
|
||||
import java.net.UnknownHostException;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static java.time.temporal.ChronoUnit.SECONDS;
|
||||
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.S3_TOKEN_CREATION_ERROR;
|
||||
|
||||
/**
|
||||
* Parser to process AWS v4 auth request. Creates string to sign and auth
|
||||
* header. For more details refer to AWS documentation https://docs.aws
|
||||
* .amazon.com/general/latest/gr/sigv4-create-canonical-request.html.
|
||||
**/
|
||||
public class AWSV4AuthParser implements AWSAuthParser {
|
||||
|
||||
private final static Logger LOG =
|
||||
LoggerFactory.getLogger(AWSV4AuthParser.class);
|
||||
private MultivaluedMap<String, String> headerMap;
|
||||
private MultivaluedMap<String, String> queryMap;
|
||||
private String uri;
|
||||
private String method;
|
||||
private AuthorizationHeaderV4 v4Header;
|
||||
private String stringToSign;
|
||||
private String amzContentPayload;
|
||||
|
||||
public AWSV4AuthParser(ContainerRequestContext context)
|
||||
throws OS3Exception {
|
||||
this.headerMap = context.getHeaders();
|
||||
this.queryMap = context.getUriInfo().getQueryParameters();
|
||||
try {
|
||||
this.uri = new URI(context.getUriInfo().getRequestUri()
|
||||
.getPath().replaceAll("\\/+",
|
||||
"/")).normalize().getPath();
|
||||
} catch (URISyntaxException e) {
|
||||
throw S3_TOKEN_CREATION_ERROR;
|
||||
}
|
||||
|
||||
this.method = context.getMethod();
|
||||
v4Header = new AuthorizationHeaderV4(
|
||||
headerMap.getFirst(AUTHORIZATION_HEADER));
|
||||
}
|
||||
|
||||
public void parse() throws Exception {
|
||||
StringBuilder strToSign = new StringBuilder();
|
||||
|
||||
// According to AWS sigv4 documentation, authorization header should be
|
||||
// in following format.
|
||||
// Authorization: algorithm Credential=access key ID/credential scope,
|
||||
// SignedHeaders=SignedHeaders, Signature=signature
|
||||
|
||||
// Construct String to sign in below format.
|
||||
// StringToSign =
|
||||
// Algorithm + \n +
|
||||
// RequestDateTime + \n +
|
||||
// CredentialScope + \n +
|
||||
// HashedCanonicalRequest
|
||||
String algorithm, requestDateTime, credentialScope, canonicalRequest;
|
||||
algorithm = v4Header.getAlgorithm();
|
||||
requestDateTime = headerMap.getFirst(X_AMAZ_DATE);
|
||||
Credential credential = v4Header.getCredentialObj();
|
||||
credentialScope = String.format("%s/%s/%s/%s", credential.getDate(),
|
||||
credential.getAwsRegion(), credential.getAwsService(),
|
||||
credential.getAwsRequest());
|
||||
|
||||
// If the absolute path is empty, use a forward slash (/)
|
||||
uri = (uri.trim().length() > 0) ? uri : "/";
|
||||
// Encode URI and preserve forward slashes
|
||||
strToSign.append(algorithm + NEWLINE);
|
||||
strToSign.append(requestDateTime + NEWLINE);
|
||||
strToSign.append(credentialScope + NEWLINE);
|
||||
|
||||
canonicalRequest = buildCanonicalRequest();
|
||||
strToSign.append(hash(canonicalRequest));
|
||||
LOG.debug("canonicalRequest:[{}]", canonicalRequest);
|
||||
|
||||
headerMap.keySet().forEach(k -> LOG.trace("Header:{},value:{}", k,
|
||||
headerMap.get(k)));
|
||||
|
||||
LOG.debug("StringToSign:[{}]", strToSign);
|
||||
stringToSign = strToSign.toString();
|
||||
}
|
||||
|
||||
private String buildCanonicalRequest() throws OS3Exception {
|
||||
Iterable<String> parts = split("/", uri);
|
||||
List<String> encParts = new ArrayList<>();
|
||||
for (String p : parts) {
|
||||
encParts.add(urlEncode(p));
|
||||
}
|
||||
String canonicalUri = join("/", encParts);
|
||||
|
||||
String canonicalQueryStr = getQueryParamString();
|
||||
|
||||
StringBuilder canonicalHeaders = new StringBuilder();
|
||||
|
||||
for (String header : v4Header.getSignedHeaders()) {
|
||||
List<String> headerValue = new ArrayList<>();
|
||||
canonicalHeaders.append(header.toLowerCase());
|
||||
canonicalHeaders.append(":");
|
||||
for (String originalHeader : headerMap.keySet()) {
|
||||
if (originalHeader.toLowerCase().equals(header)) {
|
||||
headerValue.add(headerMap.getFirst(originalHeader).trim());
|
||||
}
|
||||
}
|
||||
|
||||
if (headerValue.size() == 0) {
|
||||
throw new RuntimeException("Header " + header + " not present in " +
|
||||
"request");
|
||||
}
|
||||
if (headerValue.size() > 1) {
|
||||
Collections.sort(headerValue);
|
||||
}
|
||||
|
||||
// Set for testing purpose only to skip date and host validation.
|
||||
validateSignedHeader(header, headerValue.get(0));
|
||||
|
||||
canonicalHeaders.append(join(",", headerValue));
|
||||
canonicalHeaders.append(NEWLINE);
|
||||
}
|
||||
|
||||
String payloadHash;
|
||||
if (UNSIGNED_PAYLOAD.equals(
|
||||
headerMap.get(X_AMZ_CONTENT_SHA256))) {
|
||||
payloadHash = UNSIGNED_PAYLOAD;
|
||||
} else {
|
||||
payloadHash = headerMap.getFirst(X_AMZ_CONTENT_SHA256);
|
||||
}
|
||||
|
||||
String signedHeaderStr = v4Header.getSignedHeaderString();
|
||||
String canonicalRequest = method + NEWLINE
|
||||
+ canonicalUri + NEWLINE
|
||||
+ canonicalQueryStr + NEWLINE
|
||||
+ canonicalHeaders + NEWLINE
|
||||
+ signedHeaderStr + NEWLINE
|
||||
+ payloadHash;
|
||||
|
||||
return canonicalRequest;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void validateSignedHeader(String header, String headerValue)
|
||||
throws OS3Exception {
|
||||
switch (header) {
|
||||
case HOST:
|
||||
try {
|
||||
URI hostUri = new URI(headerValue);
|
||||
InetAddress.getByName(hostUri.getHost());
|
||||
// TODO: Validate if current request is coming from same host.
|
||||
} catch (UnknownHostException|URISyntaxException e) {
|
||||
LOG.error("Host value mentioned in signed header is not valid. " +
|
||||
"Host:{}", headerValue);
|
||||
throw S3_TOKEN_CREATION_ERROR;
|
||||
}
|
||||
break;
|
||||
case X_AMAZ_DATE:
|
||||
LocalDate date = LocalDate.parse(headerValue, TIME_FORMATTER);
|
||||
LocalDate now = LocalDate.now();
|
||||
if (date.isBefore(now.minus(PRESIGN_URL_MAX_EXPIRATION_SECONDS, SECONDS))
|
||||
|| date.isAfter(now.plus(PRESIGN_URL_MAX_EXPIRATION_SECONDS,
|
||||
SECONDS))) {
|
||||
LOG.error("AWS date not in valid range. Request timestamp:{} should " +
|
||||
"not be older than {} seconds.", headerValue,
|
||||
PRESIGN_URL_MAX_EXPIRATION_SECONDS);
|
||||
throw S3_TOKEN_CREATION_ERROR;
|
||||
}
|
||||
break;
|
||||
case X_AMZ_CONTENT_SHA256:
|
||||
// TODO: Construct request payload and match HEX(SHA256(requestPayload))
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* String join that also works with empty strings.
|
||||
*
|
||||
* @return joined string
|
||||
*/
|
||||
private static String join(String glue, List<String> parts) {
|
||||
StringBuilder result = new StringBuilder();
|
||||
boolean addSeparator = false;
|
||||
for (String p : parts) {
|
||||
if (addSeparator) {
|
||||
result.append(glue);
|
||||
}
|
||||
result.append(p);
|
||||
addSeparator = true;
|
||||
}
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns matching strings.
|
||||
*
|
||||
* @param regex Regular expression to split by
|
||||
* @param whole The string to split
|
||||
* @return pieces
|
||||
*/
|
||||
private static Iterable<String> split(String regex, String whole) {
|
||||
Pattern p = Pattern.compile(regex);
|
||||
Matcher m = p.matcher(whole);
|
||||
List<String> result = new ArrayList<>();
|
||||
int pos = 0;
|
||||
while (m.find()) {
|
||||
result.add(whole.substring(pos, m.start()));
|
||||
pos = m.end();
|
||||
}
|
||||
result.add(whole.substring(pos));
|
||||
return result;
|
||||
}
|
||||
|
||||
private String urlEncode(String str) {
|
||||
try {
|
||||
|
||||
return URLEncoder.encode(str, UTF_8.name())
|
||||
.replaceAll("\\+", "%20")
|
||||
.replaceAll("%7E", "~");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private String getQueryParamString() {
|
||||
List<String> params = new ArrayList<>(queryMap.keySet());
|
||||
|
||||
// Sort by name, then by value
|
||||
Collections.sort(params, (o1, o2) -> o1.equals(o2) ?
|
||||
queryMap.getFirst(o1).compareTo(queryMap.getFirst(o2)) :
|
||||
o1.compareTo(o2));
|
||||
|
||||
StringBuilder result = new StringBuilder();
|
||||
for (String p : params) {
|
||||
if (result.length() > 0) {
|
||||
result.append("&");
|
||||
}
|
||||
result.append(urlEncode(p));
|
||||
result.append('=');
|
||||
|
||||
result.append(urlEncode(queryMap.getFirst(p)));
|
||||
}
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
public static String hash(String payload) throws NoSuchAlgorithmException {
|
||||
MessageDigest md = MessageDigest.getInstance("SHA-256");
|
||||
md.update(payload.getBytes(UTF_8));
|
||||
return Hex.encode(md.digest()).toLowerCase();
|
||||
}
|
||||
|
||||
public String getAwsAccessId() {
|
||||
return v4Header.getAccessKeyID();
|
||||
}
|
||||
|
||||
public String getSignature() {
|
||||
return v4Header.getSignature();
|
||||
}
|
||||
|
||||
public String getStringToSign() throws Exception {
|
||||
return stringToSign;
|
||||
}
|
||||
}
|
|
@ -17,32 +17,104 @@
|
|||
*/
|
||||
package org.apache.hadoop.ozone.s3;
|
||||
|
||||
import javax.enterprise.context.ApplicationScoped;
|
||||
import javax.enterprise.inject.Produces;
|
||||
import javax.inject.Inject;
|
||||
import java.io.IOException;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.ozone.OzoneSecurityUtil;
|
||||
import org.apache.hadoop.ozone.client.OzoneClient;
|
||||
import org.apache.hadoop.ozone.client.OzoneClientFactory;
|
||||
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
|
||||
import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.enterprise.context.RequestScoped;
|
||||
import javax.enterprise.inject.Produces;
|
||||
import javax.inject.Inject;
|
||||
import javax.ws.rs.container.ContainerRequestContext;
|
||||
import javax.ws.rs.core.Context;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
import static org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OMTokenProto.Type.S3TOKEN;
|
||||
import static org.apache.hadoop.ozone.s3.AWSAuthParser.AUTHORIZATION_HEADER;
|
||||
import static org.apache.hadoop.ozone.s3.AWSAuthParser.UTF_8;
|
||||
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.AUTH_PROTOCOL_NOT_SUPPORTED;
|
||||
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.S3_TOKEN_CREATION_ERROR;
|
||||
|
||||
/**
|
||||
* This class creates the OzoneClient for the Rest endpoints.
|
||||
*/
|
||||
@ApplicationScoped
|
||||
@RequestScoped
|
||||
public class OzoneClientProducer {
|
||||
|
||||
private final static Logger LOG =
|
||||
LoggerFactory.getLogger(OzoneClientProducer.class);
|
||||
|
||||
@Context
|
||||
private ContainerRequestContext context;
|
||||
|
||||
@Inject
|
||||
private OzoneConfiguration ozoneConfiguration;
|
||||
|
||||
@Inject
|
||||
public OzoneClientProducer(
|
||||
OzoneConfiguration ozoneConfiguration) {
|
||||
this.ozoneConfiguration = ozoneConfiguration;
|
||||
}
|
||||
private Text omService;
|
||||
|
||||
|
||||
@Produces
|
||||
public OzoneClient createClient() throws IOException {
|
||||
return getClient(ozoneConfiguration);
|
||||
}
|
||||
|
||||
private OzoneClient getClient(OzoneConfiguration config) throws IOException {
|
||||
try {
|
||||
if (OzoneSecurityUtil.isSecurityEnabled(config)) {
|
||||
LOG.debug("Creating s3 token for client.");
|
||||
if (context.getHeaderString(AUTHORIZATION_HEADER).startsWith("AWS4")) {
|
||||
try {
|
||||
AWSV4AuthParser v4RequestParser = new AWSV4AuthParser(context);
|
||||
v4RequestParser.parse();
|
||||
|
||||
OzoneTokenIdentifier identifier = new OzoneTokenIdentifier();
|
||||
identifier.setTokenType(S3TOKEN);
|
||||
identifier.setStrToSign(v4RequestParser.getStringToSign());
|
||||
identifier.setSignature(v4RequestParser.getSignature());
|
||||
identifier.setAwsAccessId(v4RequestParser.getAwsAccessId());
|
||||
identifier.setOwner(new Text(v4RequestParser.getAwsAccessId()));
|
||||
|
||||
LOG.trace("Adding token for service:{}", omService);
|
||||
Token<OzoneTokenIdentifier> token = new Token(identifier.getBytes(),
|
||||
identifier.getSignature().getBytes(UTF_8),
|
||||
identifier.getKind(),
|
||||
omService);
|
||||
UserGroupInformation remoteUser =
|
||||
UserGroupInformation.createRemoteUser(
|
||||
v4RequestParser.getAwsAccessId());
|
||||
remoteUser.addToken(token);
|
||||
UserGroupInformation.setLoginUser(remoteUser);
|
||||
} catch (OS3Exception | URISyntaxException ex) {
|
||||
LOG.error("S3 token creation failed.");
|
||||
throw S3_TOKEN_CREATION_ERROR;
|
||||
}
|
||||
} else {
|
||||
throw AUTH_PROTOCOL_NOT_SUPPORTED;
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOG.error("Error: ", e);
|
||||
}
|
||||
return OzoneClientFactory.getClient(ozoneConfiguration);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setContext(ContainerRequestContext context) {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setOzoneConfiguration(OzoneConfiguration config) {
|
||||
this.ozoneConfiguration = config;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.ozone.s3;
|
||||
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.ozone.OmUtils;
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.enterprise.context.ApplicationScoped;
|
||||
import javax.enterprise.inject.Produces;
|
||||
import javax.inject.Inject;
|
||||
/**
|
||||
* This class creates the OM service .
|
||||
*/
|
||||
@ApplicationScoped
|
||||
public class OzoneServiceProvider {
|
||||
|
||||
private Text omServiceAdd;
|
||||
|
||||
@Inject
|
||||
private OzoneConfiguration conf;
|
||||
|
||||
@PostConstruct
|
||||
public void init() {
|
||||
omServiceAdd = SecurityUtil.buildTokenService(OmUtils.
|
||||
getOmAddressForClients(conf));
|
||||
}
|
||||
|
||||
|
||||
@Produces
|
||||
public Text getService() {
|
||||
return omServiceAdd;
|
||||
}
|
||||
|
||||
}
|
|
@ -42,6 +42,14 @@ public final class S3ErrorTable {
|
|||
public static final OS3Exception NO_SUCH_BUCKET = new OS3Exception(
|
||||
"NoSuchBucket", "The specified bucket does not exist", HTTP_NOT_FOUND);
|
||||
|
||||
public static final OS3Exception AUTH_PROTOCOL_NOT_SUPPORTED =
|
||||
new OS3Exception("AuthProtocolNotSupported", "Auth protocol used for" +
|
||||
" this request is not supported.", HTTP_BAD_REQUEST);
|
||||
|
||||
public static final OS3Exception S3_TOKEN_CREATION_ERROR =
|
||||
new OS3Exception("InvalidRequest", "Error creating s3 token creation.",
|
||||
HTTP_BAD_REQUEST);
|
||||
|
||||
public static final OS3Exception BUCKET_NOT_EMPTY = new OS3Exception(
|
||||
"BucketNotEmpty", "The bucket you tried to delete is not empty.",
|
||||
HTTP_CONFLICT);
|
||||
|
|
|
@ -1,67 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with this
|
||||
* work for additional information regarding copyright ownership. The ASF
|
||||
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* <p>
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* <p>
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
package org.apache.hadoop.ozone.s3.header;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
||||
import java.time.format.DateTimeFormatter;
|
||||
|
||||
/**
|
||||
* AWS constants.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public final class AWSConstants {
|
||||
|
||||
private AWSConstants() {
|
||||
}
|
||||
|
||||
public static final String LINE_SEPARATOR = "\n";
|
||||
|
||||
public static final String AWS4_TERMINATOR = "aws4_request";
|
||||
|
||||
public static final String AWS4_SIGNING_ALGORITHM = "AWS4-HMAC-SHA256";
|
||||
|
||||
/**
|
||||
* Seconds in a week, which is the max expiration time Sig-v4 accepts.
|
||||
*/
|
||||
public static final long PRESIGN_URL_MAX_EXPIRATION_SECONDS =
|
||||
60 * 60 * 24 * 7;
|
||||
|
||||
public static final String X_AMZ_SECURITY_TOKEN = "X-Amz-Security-Token";
|
||||
|
||||
public static final String X_AMZ_CREDENTIAL = "X-Amz-Credential";
|
||||
|
||||
public static final String X_AMZ_DATE = "X-Amz-Date";
|
||||
|
||||
public static final String X_AMZ_EXPIRES = "X-Amz-Expires";
|
||||
|
||||
public static final String X_AMZ_SIGNED_HEADER = "X-Amz-SignedHeaders";
|
||||
|
||||
public static final String X_AMZ_CONTENT_SHA256 = "x-amz-content-sha256";
|
||||
|
||||
public static final String X_AMZ_SIGNATURE = "X-Amz-Signature";
|
||||
|
||||
public static final String X_AMZ_ALGORITHM = "X-Amz-Algorithm";
|
||||
|
||||
public static final String AUTHORIZATION = "Authorization";
|
||||
|
||||
public static final String HOST = "Host";
|
||||
|
||||
public static final DateTimeFormatter DATE_FORMATTER =
|
||||
DateTimeFormatter.ofPattern("yyyyMMdd");
|
||||
|
||||
}
|
|
@ -1,19 +1,18 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with this
|
||||
* work for additional information regarding copyright ownership. The ASF
|
||||
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* <p>
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* <p>
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.ozone.s3.header;
|
||||
|
@ -35,8 +34,8 @@ import static java.time.temporal.ChronoUnit.DAYS;
|
|||
import static org.apache.commons.lang3.StringUtils.isAllEmpty;
|
||||
import static org.apache.commons.lang3.StringUtils.isNoneEmpty;
|
||||
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.MALFORMED_HEADER;
|
||||
import static org.apache.hadoop.ozone.s3.header.AWSConstants.AWS4_SIGNING_ALGORITHM;
|
||||
import static org.apache.hadoop.ozone.s3.header.AWSConstants.DATE_FORMATTER;
|
||||
import static org.apache.hadoop.ozone.s3.AWSV4AuthParser.AWS4_SIGNING_ALGORITHM;
|
||||
import static org.apache.hadoop.ozone.s3.AWSV4AuthParser.DATE_FORMATTER;
|
||||
|
||||
/**
|
||||
* S3 Authorization header.
|
||||
|
@ -44,11 +43,12 @@ import static org.apache.hadoop.ozone.s3.header.AWSConstants.DATE_FORMATTER;
|
|||
* -authorization-header.html
|
||||
*/
|
||||
public class AuthorizationHeaderV4 {
|
||||
|
||||
private final static Logger LOG = LoggerFactory.getLogger(
|
||||
AuthorizationHeaderV4.class);
|
||||
|
||||
private final static String CREDENTIAL = "Credential=";
|
||||
private final static String SIGNEDHEADERS= "SignedHeaders=";
|
||||
private final static String SIGNEDHEADERS = "SignedHeaders=";
|
||||
private final static String SIGNATURE = "Signature=";
|
||||
|
||||
private String authHeader;
|
||||
|
@ -243,4 +243,11 @@ public class AuthorizationHeaderV4 {
|
|||
return credentialObj.getAwsRequest();
|
||||
}
|
||||
|
||||
public Collection<String> getSignedHeaders() {
|
||||
return signedHeaders;
|
||||
}
|
||||
|
||||
public Credential getCredentialObj() {
|
||||
return credentialObj;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.ozone.s3;
|
||||
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import org.apache.hadoop.ozone.OzoneConfigKeys;
|
||||
import org.apache.hadoop.test.LambdaTestUtils;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import javax.ws.rs.container.ContainerRequestContext;
|
||||
import javax.ws.rs.core.MultivaluedHashMap;
|
||||
import javax.ws.rs.core.MultivaluedMap;
|
||||
import javax.ws.rs.core.UriInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.apache.hadoop.ozone.s3.AWSAuthParser.AUTHORIZATION_HEADER;
|
||||
import static org.apache.hadoop.ozone.s3.AWSAuthParser.CONTENT_MD5;
|
||||
import static org.apache.hadoop.ozone.s3.AWSAuthParser.CONTENT_TYPE;
|
||||
import static org.apache.hadoop.ozone.s3.AWSAuthParser.HOST_HEADER;
|
||||
import static org.apache.hadoop.ozone.s3.AWSAuthParser.X_AMAZ_DATE;
|
||||
import static org.apache.hadoop.ozone.s3.AWSAuthParser.X_AMZ_CONTENT_SHA256;
|
||||
|
||||
/**
|
||||
* Test class for @{@link OzoneClientProducer}.
|
||||
* */
|
||||
@RunWith(Parameterized.class)
|
||||
public class TestOzoneClientProducer {
|
||||
|
||||
private OzoneClientProducer producer;
|
||||
private MultivaluedMap<String, String> headerMap;
|
||||
private MultivaluedMap<String, String> queryMap;
|
||||
private String authHeader;
|
||||
private String contentMd5;
|
||||
private String host;
|
||||
private String amzContentSha256;
|
||||
private String date;
|
||||
private String contentType;
|
||||
|
||||
|
||||
private ContainerRequestContext context;
|
||||
private UriInfo uriInfo;
|
||||
|
||||
public TestOzoneClientProducer(String authHeader, String contentMd5,
|
||||
String host, String amzContentSha256, String date, String contentType)
|
||||
throws Exception {
|
||||
this.authHeader = authHeader;
|
||||
this.contentMd5 = contentMd5;
|
||||
this.host = host;
|
||||
this.amzContentSha256 = amzContentSha256;
|
||||
this.date = date;
|
||||
this.contentType = contentType;
|
||||
producer = new OzoneClientProducer();
|
||||
headerMap = new MultivaluedHashMap<>();
|
||||
queryMap = new MultivaluedHashMap<>();
|
||||
uriInfo = Mockito.mock(UriInfo.class);
|
||||
context = Mockito.mock(ContainerRequestContext.class);
|
||||
OzoneConfiguration config = new OzoneConfiguration();
|
||||
config.setBoolean(OzoneConfigKeys.OZONE_SECURITY_ENABLED_KEY, true);
|
||||
setupContext();
|
||||
producer.setContext(context);
|
||||
producer.setOzoneConfiguration(config);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetClientFailure() throws Exception {
|
||||
LambdaTestUtils.intercept(IOException.class, "Couldn't create" +
|
||||
" protocol ", () -> producer.createClient());
|
||||
}
|
||||
|
||||
private void setupContext() throws Exception {
|
||||
headerMap.putSingle(AUTHORIZATION_HEADER, authHeader);
|
||||
headerMap.putSingle(CONTENT_MD5, contentMd5);
|
||||
headerMap.putSingle(HOST_HEADER, host);
|
||||
headerMap.putSingle(X_AMZ_CONTENT_SHA256, amzContentSha256);
|
||||
headerMap.putSingle(X_AMAZ_DATE, date);
|
||||
headerMap.putSingle(CONTENT_TYPE, contentType);
|
||||
|
||||
Mockito.when(uriInfo.getQueryParameters()).thenReturn(queryMap);
|
||||
Mockito.when(uriInfo.getRequestUri()).thenReturn(new URI(""));
|
||||
|
||||
Mockito.when(context.getUriInfo()).thenReturn(uriInfo);
|
||||
Mockito.when(context.getHeaders()).thenReturn(headerMap);
|
||||
Mockito.when(context.getHeaderString(AUTHORIZATION_HEADER))
|
||||
.thenReturn(authHeader);
|
||||
Mockito.when(context.getUriInfo().getQueryParameters())
|
||||
.thenReturn(queryMap);
|
||||
}
|
||||
|
||||
@Parameterized.Parameters
|
||||
public static Collection<Object[]> data() {
|
||||
return Arrays.asList(new Object[][]{
|
||||
{
|
||||
"AWS4-HMAC-SHA256 Credential=testuser1/20190221/us-west-1/s3" +
|
||||
"/aws4_request, SignedHeaders=content-md5;host;" +
|
||||
"x-amz-content-sha256;x-amz-date, " +
|
||||
"Signature" +
|
||||
"=56ec73ba1974f8feda8365c3caef89c5d4a688d5f9baccf47" +
|
||||
"65f46a14cd745ad",
|
||||
"Zi68x2nPDDXv5qfDC+ZWTg==",
|
||||
"s3g:9878",
|
||||
"e2bd43f11c97cde3465e0e8d1aad77af7ec7aa2ed8e213cd0e24" +
|
||||
"1e28375860c6",
|
||||
"20190221T002037Z",
|
||||
""
|
||||
},
|
||||
{
|
||||
"AWS4-HMAC-SHA256 " +
|
||||
"Credential=AKIDEXAMPLE/20150830/us-east-1/iam/aws4_request," +
|
||||
" SignedHeaders=content-type;host;x-amz-date, " +
|
||||
"Signature=" +
|
||||
"5d672d79c15b13162d9279b0855cfba6789a8edb4c82c400" +
|
||||
"e06b5924a6f2b5d7",
|
||||
"",
|
||||
"iam.amazonaws.com",
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
"20150830T123600Z",
|
||||
"application/x-www-form-urlencoded; charset=utf-8"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
|
@ -26,7 +26,7 @@ import org.junit.Test;
|
|||
import java.time.LocalDate;
|
||||
|
||||
import static java.time.temporal.ChronoUnit.DAYS;
|
||||
import static org.apache.hadoop.ozone.s3.header.AWSConstants.DATE_FORMATTER;
|
||||
import static org.apache.hadoop.ozone.s3.AWSV4AuthParser.DATE_FORMATTER;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
|
|
Loading…
Reference in New Issue