HDFS-14611. Move handshake secret field from Token to BlockAccessToken. Contributed by Chen Liang.
This commit is contained in:
parent
f507bc059d
commit
d8a6098a96
|
@ -56,7 +56,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
|
||||||
private Text kind;
|
private Text kind;
|
||||||
private Text service;
|
private Text service;
|
||||||
private TokenRenewer renewer;
|
private TokenRenewer renewer;
|
||||||
private byte[] dnHandshakeSecret;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a token given a token identifier and a secret manager for the
|
* Construct a token given a token identifier and a secret manager for the
|
||||||
|
@ -69,7 +68,14 @@ public class Token<T extends TokenIdentifier> implements Writable {
|
||||||
identifier = id.getBytes();
|
identifier = id.getBytes();
|
||||||
kind = id.getKind();
|
kind = id.getKind();
|
||||||
service = new Text();
|
service = new Text();
|
||||||
dnHandshakeSecret = new byte[0];
|
}
|
||||||
|
|
||||||
|
public void setID(byte[] bytes) {
|
||||||
|
identifier = bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPassword(byte[] newPassword) {
|
||||||
|
password = newPassword;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -84,7 +90,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
|
||||||
this.password = (password == null)? new byte[0] : password;
|
this.password = (password == null)? new byte[0] : password;
|
||||||
this.kind = (kind == null)? new Text() : kind;
|
this.kind = (kind == null)? new Text() : kind;
|
||||||
this.service = (service == null)? new Text() : service;
|
this.service = (service == null)? new Text() : service;
|
||||||
this.dnHandshakeSecret = new byte[0];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -95,7 +100,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
|
||||||
password = new byte[0];
|
password = new byte[0];
|
||||||
kind = new Text();
|
kind = new Text();
|
||||||
service = new Text();
|
service = new Text();
|
||||||
dnHandshakeSecret = new byte[0];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -107,7 +111,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
|
||||||
this.password = other.password.clone();
|
this.password = other.password.clone();
|
||||||
this.kind = new Text(other.kind);
|
this.kind = new Text(other.kind);
|
||||||
this.service = new Text(other.service);
|
this.service = new Text(other.service);
|
||||||
this.dnHandshakeSecret = other.dnHandshakeSecret.clone();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Token<T> copyToken() {
|
public Token<T> copyToken() {
|
||||||
|
@ -123,7 +126,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
|
||||||
this.password = tokenPB.getPassword().toByteArray();
|
this.password = tokenPB.getPassword().toByteArray();
|
||||||
this.kind = new Text(tokenPB.getKindBytes().toByteArray());
|
this.kind = new Text(tokenPB.getKindBytes().toByteArray());
|
||||||
this.service = new Text(tokenPB.getServiceBytes().toByteArray());
|
this.service = new Text(tokenPB.getServiceBytes().toByteArray());
|
||||||
this.dnHandshakeSecret = new byte[0];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -149,14 +151,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
|
||||||
return identifier;
|
return identifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte[] getDnHandshakeSecret() {
|
|
||||||
return dnHandshakeSecret;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDNHandshakeSecret(byte[] secret) {
|
|
||||||
this.dnHandshakeSecret = secret;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Class<? extends TokenIdentifier>
|
private static Class<? extends TokenIdentifier>
|
||||||
getClassForIdentifier(Text kind) {
|
getClassForIdentifier(Text kind) {
|
||||||
Class<? extends TokenIdentifier> cls = null;
|
Class<? extends TokenIdentifier> cls = null;
|
||||||
|
@ -351,11 +345,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
|
||||||
in.readFully(password);
|
in.readFully(password);
|
||||||
kind.readFields(in);
|
kind.readFields(in);
|
||||||
service.readFields(in);
|
service.readFields(in);
|
||||||
len = WritableUtils.readVInt(in);
|
|
||||||
if (dnHandshakeSecret == null || dnHandshakeSecret.length != len) {
|
|
||||||
dnHandshakeSecret = new byte[len];
|
|
||||||
}
|
|
||||||
in.readFully(dnHandshakeSecret);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -366,8 +355,6 @@ public class Token<T extends TokenIdentifier> implements Writable {
|
||||||
out.write(password);
|
out.write(password);
|
||||||
kind.write(out);
|
kind.write(out);
|
||||||
service.write(out);
|
service.write(out);
|
||||||
WritableUtils.writeVInt(out, dnHandshakeSecret.length);
|
|
||||||
out.write(dnHandshakeSecret);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -36,7 +36,6 @@ message TokenProto {
|
||||||
required bytes password = 2;
|
required bytes password = 2;
|
||||||
required string kind = 3;
|
required string kind = 3;
|
||||||
required string service = 4;
|
required string service = 4;
|
||||||
optional bytes handshakeSecret = 5;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
message CredentialsKVProto {
|
message CredentialsKVProto {
|
||||||
|
|
|
@ -329,9 +329,7 @@ public class SaslDataTransferClient {
|
||||||
if (secretKey != null) {
|
if (secretKey != null) {
|
||||||
LOG.debug("DataNode overwriting downstream QOP" +
|
LOG.debug("DataNode overwriting downstream QOP" +
|
||||||
saslProps.get(Sasl.QOP));
|
saslProps.get(Sasl.QOP));
|
||||||
byte[] newSecret = SecretManager.createPassword(saslProps.get(Sasl.QOP)
|
updateToken(accessToken, secretKey, saslProps);
|
||||||
.getBytes(Charsets.UTF_8), secretKey);
|
|
||||||
accessToken.setDNHandshakeSecret(newSecret);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LOG.debug("Client using encryption algorithm {}",
|
LOG.debug("Client using encryption algorithm {}",
|
||||||
|
@ -447,9 +445,7 @@ public class SaslDataTransferClient {
|
||||||
}
|
}
|
||||||
LOG.debug("DataNode overwriting downstream QOP " +
|
LOG.debug("DataNode overwriting downstream QOP " +
|
||||||
saslProps.get(Sasl.QOP));
|
saslProps.get(Sasl.QOP));
|
||||||
byte[] newSecret = SecretManager.createPassword(
|
updateToken(accessToken, secretKey, saslProps);
|
||||||
saslProps.get(Sasl.QOP).getBytes(Charsets.UTF_8), secretKey);
|
|
||||||
accessToken.setDNHandshakeSecret(newSecret);
|
|
||||||
}
|
}
|
||||||
targetQOP = saslProps.get(Sasl.QOP);
|
targetQOP = saslProps.get(Sasl.QOP);
|
||||||
String userName = buildUserName(accessToken);
|
String userName = buildUserName(accessToken);
|
||||||
|
@ -460,6 +456,18 @@ public class SaslDataTransferClient {
|
||||||
saslProps, callbackHandler, accessToken);
|
saslProps, callbackHandler, accessToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void updateToken(Token<BlockTokenIdentifier> accessToken,
|
||||||
|
SecretKey secretKey, Map<String, String> saslProps)
|
||||||
|
throws IOException {
|
||||||
|
byte[] newSecret = saslProps.get(Sasl.QOP).getBytes(Charsets.UTF_8);
|
||||||
|
BlockTokenIdentifier bkid = accessToken.decodeIdentifier();
|
||||||
|
bkid.setHandshakeMsg(newSecret);
|
||||||
|
byte[] bkidBytes = bkid.getBytes();
|
||||||
|
accessToken.setPassword(
|
||||||
|
SecretManager.createPassword(bkidBytes, secretKey));
|
||||||
|
accessToken.setID(bkidBytes);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Builds the client's user name for the general-purpose handshake, consisting
|
* Builds the client's user name for the general-purpose handshake, consisting
|
||||||
* of the base64-encoded serialized block access token identifier. Note that
|
* of the base64-encoded serialized block access token identifier. Note that
|
||||||
|
@ -516,12 +524,16 @@ public class SaslDataTransferClient {
|
||||||
try {
|
try {
|
||||||
// Start of handshake - "initial response" in SASL terminology.
|
// Start of handshake - "initial response" in SASL terminology.
|
||||||
// The handshake secret can be null, this happens when client is running
|
// The handshake secret can be null, this happens when client is running
|
||||||
// a new version but the cluster does not have this feature. In which case
|
// a new version but the cluster does not have this feature.
|
||||||
// there will be no encrypted secret sent from NN.
|
// In which case there will be no encrypted secret sent from NN.
|
||||||
byte[] handshakeSecret = accessToken.getDnHandshakeSecret();
|
BlockTokenIdentifier blockTokenIdentifier =
|
||||||
|
accessToken.decodeIdentifier();
|
||||||
|
if (blockTokenIdentifier != null) {
|
||||||
|
byte[] handshakeSecret =
|
||||||
|
accessToken.decodeIdentifier().getHandshakeMsg();
|
||||||
if (handshakeSecret == null || handshakeSecret.length == 0) {
|
if (handshakeSecret == null || handshakeSecret.length == 0) {
|
||||||
LOG.debug("Handshake secret is null, sending without "
|
LOG.debug("Handshake secret is null, "
|
||||||
+ "handshake secret.");
|
+ "sending without handshake secret.");
|
||||||
sendSaslMessage(out, new byte[0]);
|
sendSaslMessage(out, new byte[0]);
|
||||||
} else {
|
} else {
|
||||||
LOG.debug("Sending handshake secret.");
|
LOG.debug("Sending handshake secret.");
|
||||||
|
@ -529,7 +541,12 @@ public class SaslDataTransferClient {
|
||||||
identifier.readFields(new DataInputStream(
|
identifier.readFields(new DataInputStream(
|
||||||
new ByteArrayInputStream(accessToken.getIdentifier())));
|
new ByteArrayInputStream(accessToken.getIdentifier())));
|
||||||
String bpid = identifier.getBlockPoolId();
|
String bpid = identifier.getBlockPoolId();
|
||||||
sendSaslMessageHandshakeSecret(out, new byte[0], handshakeSecret, bpid);
|
sendSaslMessageHandshakeSecret(out, new byte[0],
|
||||||
|
handshakeSecret, bpid);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
LOG.debug("Block token id is null, sending without handshake secret.");
|
||||||
|
sendSaslMessage(out, new byte[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// step 1
|
// step 1
|
||||||
|
|
|
@ -354,10 +354,6 @@ public class PBHelperClient {
|
||||||
setPassword(getByteString(tok.getPassword())).
|
setPassword(getByteString(tok.getPassword())).
|
||||||
setKindBytes(getFixedByteString(tok.getKind())).
|
setKindBytes(getFixedByteString(tok.getKind())).
|
||||||
setServiceBytes(getFixedByteString(tok.getService()));
|
setServiceBytes(getFixedByteString(tok.getService()));
|
||||||
if (tok.getDnHandshakeSecret() != null) {
|
|
||||||
builder.setHandshakeSecret(
|
|
||||||
ByteString.copyFrom(tok.getDnHandshakeSecret()));
|
|
||||||
}
|
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -779,6 +775,11 @@ public class PBHelperClient {
|
||||||
for (String storageId : blockTokenSecret.getStorageIds()) {
|
for (String storageId : blockTokenSecret.getStorageIds()) {
|
||||||
builder.addStorageIds(storageId);
|
builder.addStorageIds(storageId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
byte[] handshake = blockTokenSecret.getHandshakeMsg();
|
||||||
|
if (handshake != null && handshake.length > 0) {
|
||||||
|
builder.setHandshakeSecret(getByteString(handshake));
|
||||||
|
}
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -835,9 +836,6 @@ public class PBHelperClient {
|
||||||
new Token<>(blockToken.getIdentifier()
|
new Token<>(blockToken.getIdentifier()
|
||||||
.toByteArray(), blockToken.getPassword().toByteArray(), new Text(
|
.toByteArray(), blockToken.getPassword().toByteArray(), new Text(
|
||||||
blockToken.getKind()), new Text(blockToken.getService()));
|
blockToken.getKind()), new Text(blockToken.getService()));
|
||||||
if (blockToken.hasHandshakeSecret()) {
|
|
||||||
token.setDNHandshakeSecret(blockToken.getHandshakeSecret().toByteArray());
|
|
||||||
}
|
|
||||||
return token;
|
return token;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs.security.token.block;
|
||||||
import java.io.DataInput;
|
import java.io.DataInput;
|
||||||
import java.io.DataInputStream;
|
import java.io.DataInputStream;
|
||||||
import java.io.DataOutput;
|
import java.io.DataOutput;
|
||||||
|
import java.io.EOFException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
|
@ -55,6 +56,7 @@ public class BlockTokenIdentifier extends TokenIdentifier {
|
||||||
private StorageType[] storageTypes;
|
private StorageType[] storageTypes;
|
||||||
private String[] storageIds;
|
private String[] storageIds;
|
||||||
private boolean useProto;
|
private boolean useProto;
|
||||||
|
private byte[] handshakeMsg;
|
||||||
|
|
||||||
private byte [] cache;
|
private byte [] cache;
|
||||||
|
|
||||||
|
@ -76,6 +78,7 @@ public class BlockTokenIdentifier extends TokenIdentifier {
|
||||||
this.storageIds = Optional.ofNullable(storageIds)
|
this.storageIds = Optional.ofNullable(storageIds)
|
||||||
.orElse(new String[0]);
|
.orElse(new String[0]);
|
||||||
this.useProto = useProto;
|
this.useProto = useProto;
|
||||||
|
this.handshakeMsg = new byte[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -134,6 +137,14 @@ public class BlockTokenIdentifier extends TokenIdentifier {
|
||||||
return storageIds;
|
return storageIds;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public byte[] getHandshakeMsg() {
|
||||||
|
return handshakeMsg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setHandshakeMsg(byte[] bytes) {
|
||||||
|
handshakeMsg = bytes;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "block_token_identifier (expiryDate=" + this.getExpiryDate()
|
return "block_token_identifier (expiryDate=" + this.getExpiryDate()
|
||||||
|
@ -241,6 +252,16 @@ public class BlockTokenIdentifier extends TokenIdentifier {
|
||||||
storageIds = readStorageIds;
|
storageIds = readStorageIds;
|
||||||
|
|
||||||
useProto = false;
|
useProto = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
int handshakeMsgLen = WritableUtils.readVInt(in);
|
||||||
|
if (handshakeMsgLen != 0) {
|
||||||
|
handshakeMsg = new byte[handshakeMsgLen];
|
||||||
|
in.readFully(handshakeMsg);
|
||||||
|
}
|
||||||
|
} catch (EOFException eof) {
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
@ -271,6 +292,13 @@ public class BlockTokenIdentifier extends TokenIdentifier {
|
||||||
storageIds = blockTokenSecretProto.getStorageIdsList().stream()
|
storageIds = blockTokenSecretProto.getStorageIdsList().stream()
|
||||||
.toArray(String[]::new);
|
.toArray(String[]::new);
|
||||||
useProto = true;
|
useProto = true;
|
||||||
|
|
||||||
|
if(blockTokenSecretProto.hasHandshakeSecret()) {
|
||||||
|
handshakeMsg = blockTokenSecretProto
|
||||||
|
.getHandshakeSecret().toByteArray();
|
||||||
|
} else {
|
||||||
|
handshakeMsg = new byte[0];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -301,6 +329,10 @@ public class BlockTokenIdentifier extends TokenIdentifier {
|
||||||
for (String id: storageIds) {
|
for (String id: storageIds) {
|
||||||
WritableUtils.writeString(out, id);
|
WritableUtils.writeString(out, id);
|
||||||
}
|
}
|
||||||
|
if (handshakeMsg != null && handshakeMsg.length > 0) {
|
||||||
|
WritableUtils.writeVInt(out, handshakeMsg.length);
|
||||||
|
out.write(handshakeMsg);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
|
|
@ -671,4 +671,5 @@ message BlockTokenSecretProto {
|
||||||
repeated AccessModeProto modes = 6;
|
repeated AccessModeProto modes = 6;
|
||||||
repeated StorageTypeProto storageTypes = 7;
|
repeated StorageTypeProto storageTypes = 7;
|
||||||
repeated string storageIds = 8;
|
repeated string storageIds = 8;
|
||||||
|
optional bytes handshakeSecret = 9;
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,11 +28,9 @@ import java.io.DataOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import javax.crypto.SecretKey;
|
|
||||||
import javax.security.auth.callback.Callback;
|
import javax.security.auth.callback.Callback;
|
||||||
import javax.security.auth.callback.CallbackHandler;
|
import javax.security.auth.callback.CallbackHandler;
|
||||||
import javax.security.auth.callback.NameCallback;
|
import javax.security.auth.callback.NameCallback;
|
||||||
|
@ -52,15 +50,12 @@ import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||||
import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair;
|
import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair;
|
||||||
import org.apache.hadoop.hdfs.protocol.datatransfer.InvalidEncryptionKeyException;
|
import org.apache.hadoop.hdfs.protocol.datatransfer.InvalidEncryptionKeyException;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto.DataTransferEncryptorStatus;
|
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto.DataTransferEncryptorStatus;
|
||||||
import org.apache.hadoop.hdfs.security.token.block.BlockKey;
|
|
||||||
import org.apache.hadoop.hdfs.security.token.block.BlockPoolTokenSecretManager;
|
import org.apache.hadoop.hdfs.security.token.block.BlockPoolTokenSecretManager;
|
||||||
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DNConf;
|
import org.apache.hadoop.hdfs.server.datanode.DNConf;
|
||||||
import org.apache.hadoop.security.SaslPropertiesResolver;
|
import org.apache.hadoop.security.SaslPropertiesResolver;
|
||||||
import org.apache.hadoop.security.SaslRpcServer;
|
|
||||||
import org.apache.hadoop.security.SecurityUtil;
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.token.SecretManager;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@ -348,21 +343,6 @@ public class SaslDataTransferServer {
|
||||||
return identifier;
|
return identifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
private String examineSecret(byte[] secret, String bpid) {
|
|
||||||
BlockKey blockKey = blockPoolTokenSecretManager.get(bpid).getCurrentKey();
|
|
||||||
SecretKey secretKey = blockKey.getKey();
|
|
||||||
for (SaslRpcServer.QualityOfProtection qop :
|
|
||||||
SaslRpcServer.QualityOfProtection.values()) {
|
|
||||||
String qopString = qop.getSaslQop();
|
|
||||||
byte[] data = qopString.getBytes(Charsets.UTF_8);
|
|
||||||
byte[] encryptedData = SecretManager.createPassword(data, secretKey);
|
|
||||||
if (Arrays.equals(encryptedData, secret)) {
|
|
||||||
return qopString;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
public String getNegotiatedQOP() {
|
public String getNegotiatedQOP() {
|
||||||
return negotiatedQOP;
|
return negotiatedQOP;
|
||||||
|
@ -399,12 +379,8 @@ public class SaslDataTransferServer {
|
||||||
if (secret != null || bpid != null) {
|
if (secret != null || bpid != null) {
|
||||||
// sanity check, if one is null, the other must also not be null
|
// sanity check, if one is null, the other must also not be null
|
||||||
assert(secret != null && bpid != null);
|
assert(secret != null && bpid != null);
|
||||||
String qop = examineSecret(secret, bpid);
|
String qop = new String(secret, Charsets.UTF_8);
|
||||||
if (qop != null) {
|
|
||||||
saslProps.put(Sasl.QOP, qop);
|
saslProps.put(Sasl.QOP, qop);
|
||||||
} else {
|
|
||||||
LOG.error("Unable to match secret to a QOP!");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
SaslParticipant sasl = SaslParticipant.createServerSaslParticipant(
|
SaslParticipant sasl = SaslParticipant.createServerSaslParticipant(
|
||||||
saslProps, callbackHandler);
|
saslProps, callbackHandler);
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hdfs.security.token.block;
|
package org.apache.hadoop.hdfs.security.token.block;
|
||||||
|
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.DataInputStream;
|
import java.io.DataInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -29,6 +30,7 @@ import java.util.Iterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.lang3.ArrayUtils;
|
import org.apache.commons.lang3.ArrayUtils;
|
||||||
|
import org.apache.hadoop.ipc.Server;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -94,6 +96,8 @@ public class BlockTokenSecretManager extends
|
||||||
|
|
||||||
private final boolean useProto;
|
private final boolean useProto;
|
||||||
|
|
||||||
|
private final boolean shouldWrapQOP;
|
||||||
|
|
||||||
private final SecureRandom nonceGenerator = new SecureRandom();
|
private final SecureRandom nonceGenerator = new SecureRandom();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -112,7 +116,25 @@ public class BlockTokenSecretManager extends
|
||||||
long tokenLifetime, String blockPoolId, String encryptionAlgorithm,
|
long tokenLifetime, String blockPoolId, String encryptionAlgorithm,
|
||||||
boolean useProto) {
|
boolean useProto) {
|
||||||
this(false, keyUpdateInterval, tokenLifetime, blockPoolId,
|
this(false, keyUpdateInterval, tokenLifetime, blockPoolId,
|
||||||
encryptionAlgorithm, 0, 1, useProto);
|
encryptionAlgorithm, 0, 1, useProto, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public BlockTokenSecretManager(long keyUpdateInterval,
|
||||||
|
long tokenLifetime, int nnIndex, int numNNs, String blockPoolId,
|
||||||
|
String encryptionAlgorithm, boolean useProto) {
|
||||||
|
this(keyUpdateInterval, tokenLifetime, nnIndex, numNNs,
|
||||||
|
blockPoolId, encryptionAlgorithm, useProto, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public BlockTokenSecretManager(long keyUpdateInterval,
|
||||||
|
long tokenLifetime, int nnIndex, int numNNs, String blockPoolId,
|
||||||
|
String encryptionAlgorithm, boolean useProto, boolean shouldWrapQOP) {
|
||||||
|
this(true, keyUpdateInterval, tokenLifetime, blockPoolId,
|
||||||
|
encryptionAlgorithm, nnIndex, numNNs, useProto, shouldWrapQOP);
|
||||||
|
Preconditions.checkArgument(nnIndex >= 0);
|
||||||
|
Preconditions.checkArgument(numNNs > 0);
|
||||||
|
setSerialNo(new SecureRandom().nextInt());
|
||||||
|
generateKeys();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -125,21 +147,11 @@ public class BlockTokenSecretManager extends
|
||||||
* @param encryptionAlgorithm encryption algorithm to use
|
* @param encryptionAlgorithm encryption algorithm to use
|
||||||
* @param numNNs number of namenodes possible
|
* @param numNNs number of namenodes possible
|
||||||
* @param useProto should we use new protobuf style tokens
|
* @param useProto should we use new protobuf style tokens
|
||||||
|
* @param shouldWrapQOP should wrap QOP in the block access token
|
||||||
*/
|
*/
|
||||||
public BlockTokenSecretManager(long keyUpdateInterval,
|
|
||||||
long tokenLifetime, int nnIndex, int numNNs, String blockPoolId,
|
|
||||||
String encryptionAlgorithm, boolean useProto) {
|
|
||||||
this(true, keyUpdateInterval, tokenLifetime, blockPoolId,
|
|
||||||
encryptionAlgorithm, nnIndex, numNNs, useProto);
|
|
||||||
Preconditions.checkArgument(nnIndex >= 0);
|
|
||||||
Preconditions.checkArgument(numNNs > 0);
|
|
||||||
setSerialNo(new SecureRandom().nextInt());
|
|
||||||
generateKeys();
|
|
||||||
}
|
|
||||||
|
|
||||||
private BlockTokenSecretManager(boolean isMaster, long keyUpdateInterval,
|
private BlockTokenSecretManager(boolean isMaster, long keyUpdateInterval,
|
||||||
long tokenLifetime, String blockPoolId, String encryptionAlgorithm,
|
long tokenLifetime, String blockPoolId, String encryptionAlgorithm,
|
||||||
int nnIndex, int numNNs, boolean useProto) {
|
int nnIndex, int numNNs, boolean useProto, boolean shouldWrapQOP) {
|
||||||
this.nnIndex = nnIndex;
|
this.nnIndex = nnIndex;
|
||||||
this.isMaster = isMaster;
|
this.isMaster = isMaster;
|
||||||
this.keyUpdateInterval = keyUpdateInterval;
|
this.keyUpdateInterval = keyUpdateInterval;
|
||||||
|
@ -148,6 +160,7 @@ public class BlockTokenSecretManager extends
|
||||||
this.blockPoolId = blockPoolId;
|
this.blockPoolId = blockPoolId;
|
||||||
this.encryptionAlgorithm = encryptionAlgorithm;
|
this.encryptionAlgorithm = encryptionAlgorithm;
|
||||||
this.useProto = useProto;
|
this.useProto = useProto;
|
||||||
|
this.shouldWrapQOP = shouldWrapQOP;
|
||||||
this.timer = new Timer();
|
this.timer = new Timer();
|
||||||
generateKeys();
|
generateKeys();
|
||||||
}
|
}
|
||||||
|
@ -277,10 +290,16 @@ public class BlockTokenSecretManager extends
|
||||||
/** Generate a block token for a specified user */
|
/** Generate a block token for a specified user */
|
||||||
public Token<BlockTokenIdentifier> generateToken(String userId,
|
public Token<BlockTokenIdentifier> generateToken(String userId,
|
||||||
ExtendedBlock block, EnumSet<BlockTokenIdentifier.AccessMode> modes,
|
ExtendedBlock block, EnumSet<BlockTokenIdentifier.AccessMode> modes,
|
||||||
StorageType[] storageTypes, String[] storageIds) throws IOException {
|
StorageType[] storageTypes, String[] storageIds) {
|
||||||
BlockTokenIdentifier id = new BlockTokenIdentifier(userId, block
|
BlockTokenIdentifier id = new BlockTokenIdentifier(userId, block
|
||||||
.getBlockPoolId(), block.getBlockId(), modes, storageTypes,
|
.getBlockPoolId(), block.getBlockId(), modes, storageTypes,
|
||||||
storageIds, useProto);
|
storageIds, useProto);
|
||||||
|
if (shouldWrapQOP) {
|
||||||
|
String qop = Server.getEstablishedQOP();
|
||||||
|
if (qop != null) {
|
||||||
|
id.setHandshakeMsg(qop.getBytes(Charsets.UTF_8));
|
||||||
|
}
|
||||||
|
}
|
||||||
return new Token<BlockTokenIdentifier>(id, this);
|
return new Token<BlockTokenIdentifier>(id, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -523,18 +542,6 @@ public class BlockTokenSecretManager extends
|
||||||
return createPassword(nonce, key.getKey());
|
return createPassword(nonce, key.getKey());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Encrypt the given message with the current block key, using the current
|
|
||||||
* block key.
|
|
||||||
*
|
|
||||||
* @param message the message to be encrypted.
|
|
||||||
* @return the secret created by encrypting the given message.
|
|
||||||
*/
|
|
||||||
public byte[] secretGen(byte[] message) {
|
|
||||||
return createPassword(message, currentKey.getKey());
|
|
||||||
}
|
|
||||||
|
|
||||||
@VisibleForTesting
|
|
||||||
public BlockKey getCurrentKey() {
|
public BlockKey getCurrentKey() {
|
||||||
return currentKey;
|
return currentKey;
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
|
||||||
import static org.apache.hadoop.hdfs.protocol.BlockType.CONTIGUOUS;
|
import static org.apache.hadoop.hdfs.protocol.BlockType.CONTIGUOUS;
|
||||||
import static org.apache.hadoop.hdfs.protocol.BlockType.STRIPED;
|
import static org.apache.hadoop.hdfs.protocol.BlockType.STRIPED;
|
||||||
import static org.apache.hadoop.util.ExitUtil.terminate;
|
import static org.apache.hadoop.util.ExitUtil.terminate;
|
||||||
|
@ -624,6 +625,9 @@ public class BlockManager implements BlockStatsMXBean {
|
||||||
DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_PROTOBUF_ENABLE,
|
DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_PROTOBUF_ENABLE,
|
||||||
DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_PROTOBUF_ENABLE_DEFAULT);
|
DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_PROTOBUF_ENABLE_DEFAULT);
|
||||||
|
|
||||||
|
boolean shouldWrapQOP = conf.getBoolean(
|
||||||
|
DFS_NAMENODE_SEND_QOP_ENABLED, DFS_NAMENODE_SEND_QOP_ENABLED_DEFAULT);
|
||||||
|
|
||||||
if (isHaEnabled) {
|
if (isHaEnabled) {
|
||||||
// figure out which index we are of the nns
|
// figure out which index we are of the nns
|
||||||
Collection<String> nnIds = DFSUtilClient.getNameNodeIds(conf, nsId);
|
Collection<String> nnIds = DFSUtilClient.getNameNodeIds(conf, nsId);
|
||||||
|
@ -637,11 +641,11 @@ public class BlockManager implements BlockStatsMXBean {
|
||||||
}
|
}
|
||||||
return new BlockTokenSecretManager(updateMin * 60 * 1000L,
|
return new BlockTokenSecretManager(updateMin * 60 * 1000L,
|
||||||
lifetimeMin * 60 * 1000L, nnIndex, nnIds.size(), null,
|
lifetimeMin * 60 * 1000L, nnIndex, nnIds.size(), null,
|
||||||
encryptionAlgorithm, shouldWriteProtobufToken);
|
encryptionAlgorithm, shouldWriteProtobufToken, shouldWrapQOP);
|
||||||
} else {
|
} else {
|
||||||
return new BlockTokenSecretManager(updateMin*60*1000L,
|
return new BlockTokenSecretManager(updateMin*60*1000L,
|
||||||
lifetimeMin*60*1000L, 0, 1, null, encryptionAlgorithm,
|
lifetimeMin*60*1000L, 0, 1, null, encryptionAlgorithm,
|
||||||
shouldWriteProtobufToken);
|
shouldWriteProtobufToken, shouldWrapQOP);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,13 +27,10 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_HANDLER
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_HANDLER_COUNT_DEFAULT;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_HANDLER_COUNT_DEFAULT;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_HANDLER_COUNT_KEY;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_HANDLER_COUNT_KEY;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_AUXILIARY_KEY;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_AUXILIARY_KEY;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SEND_QOP_ENABLED;
|
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SEND_QOP_ENABLED_DEFAULT;
|
|
||||||
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.MAX_PATH_DEPTH;
|
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.MAX_PATH_DEPTH;
|
||||||
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.MAX_PATH_LENGTH;
|
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.MAX_PATH_LENGTH;
|
||||||
import static org.apache.hadoop.util.Time.now;
|
import static org.apache.hadoop.util.Time.now;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
|
||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
@ -147,8 +144,6 @@ import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB;
|
||||||
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolServerSideTranslatorPB;
|
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolServerSideTranslatorPB;
|
||||||
import org.apache.hadoop.hdfs.protocolPB.ReconfigurationProtocolPB;
|
import org.apache.hadoop.hdfs.protocolPB.ReconfigurationProtocolPB;
|
||||||
import org.apache.hadoop.hdfs.protocolPB.ReconfigurationProtocolServerSideTranslatorPB;
|
import org.apache.hadoop.hdfs.protocolPB.ReconfigurationProtocolServerSideTranslatorPB;
|
||||||
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
|
||||||
import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager;
|
|
||||||
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
|
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
|
||||||
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
|
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
|
||||||
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
||||||
|
@ -269,8 +264,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
|
|
||||||
private final String defaultECPolicyName;
|
private final String defaultECPolicyName;
|
||||||
|
|
||||||
private final boolean shouldSendQOP;
|
|
||||||
|
|
||||||
public NameNodeRpcServer(Configuration conf, NameNode nn)
|
public NameNodeRpcServer(Configuration conf, NameNode nn)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this.nn = nn;
|
this.nn = nn;
|
||||||
|
@ -553,8 +546,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
this.clientRpcServer.addAuxiliaryListener(auxiliaryPort);
|
this.clientRpcServer.addAuxiliaryListener(auxiliaryPort);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this.shouldSendQOP = conf.getBoolean(
|
|
||||||
DFS_NAMENODE_SEND_QOP_ENABLED, DFS_NAMENODE_SEND_QOP_ENABLED_DEFAULT);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Allow access to the lifeline RPC server for testing */
|
/** Allow access to the lifeline RPC server for testing */
|
||||||
|
@ -762,11 +753,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
metrics.incrGetBlockLocations();
|
metrics.incrGetBlockLocations();
|
||||||
LocatedBlocks locatedBlocks =
|
LocatedBlocks locatedBlocks =
|
||||||
namesystem.getBlockLocations(getClientMachine(), src, offset, length);
|
namesystem.getBlockLocations(getClientMachine(), src, offset, length);
|
||||||
if (shouldSendQOP) {
|
|
||||||
for (LocatedBlock lb : locatedBlocks.getLocatedBlocks()) {
|
|
||||||
wrapEstablishedQOP(lb, getEstablishedClientQOP());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return locatedBlocks;
|
return locatedBlocks;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -840,9 +826,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
RetryCache.setState(cacheEntry, success, info);
|
RetryCache.setState(cacheEntry, success, info);
|
||||||
}
|
}
|
||||||
metrics.incrFilesAppended();
|
metrics.incrFilesAppended();
|
||||||
if (shouldSendQOP) {
|
|
||||||
wrapEstablishedQOP(info.getLastBlock(), getEstablishedClientQOP());
|
|
||||||
}
|
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -911,9 +894,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
if (locatedBlock != null) {
|
if (locatedBlock != null) {
|
||||||
metrics.incrAddBlockOps();
|
metrics.incrAddBlockOps();
|
||||||
}
|
}
|
||||||
if (shouldSendQOP) {
|
|
||||||
wrapEstablishedQOP(locatedBlock, getEstablishedClientQOP());
|
|
||||||
}
|
|
||||||
return locatedBlock;
|
return locatedBlock;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -947,9 +927,6 @@ public class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
LocatedBlock locatedBlock = namesystem.getAdditionalDatanode(src, fileId,
|
LocatedBlock locatedBlock = namesystem.getAdditionalDatanode(src, fileId,
|
||||||
blk, existings, existingStorageIDs, excludeSet, numAdditionalNodes,
|
blk, existings, existingStorageIDs, excludeSet, numAdditionalNodes,
|
||||||
clientName);
|
clientName);
|
||||||
if (shouldSendQOP) {
|
|
||||||
wrapEstablishedQOP(locatedBlock, getEstablishedClientQOP());
|
|
||||||
}
|
|
||||||
return locatedBlock;
|
return locatedBlock;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -1877,7 +1854,7 @@ public class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
*
|
*
|
||||||
* @return the established QOP of this client.
|
* @return the established QOP of this client.
|
||||||
*/
|
*/
|
||||||
private static String getEstablishedClientQOP() {
|
public static String getEstablishedClientQOP() {
|
||||||
return Server.getEstablishedQOP();
|
return Server.getEstablishedQOP();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2631,26 +2608,4 @@ public class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
}
|
}
|
||||||
return namesystem.getBlockManager().getSPSManager().getNextPathId();
|
return namesystem.getBlockManager().getSPSManager().getNextPathId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wrapping the QOP information into the LocatedBlock instance.
|
|
||||||
* The wrapped QOP will be used by DataNode, i.e. DataNode will simply use
|
|
||||||
* this QOP to accept client calls, because this this QOP is viewed
|
|
||||||
* as the QOP that NameNode has accepted.
|
|
||||||
*
|
|
||||||
* @param locatedBlock the LocatedBlock instance
|
|
||||||
* @param qop the QOP to wrap in
|
|
||||||
* @throws RuntimeException
|
|
||||||
*/
|
|
||||||
private void wrapEstablishedQOP(LocatedBlock locatedBlock, String qop) {
|
|
||||||
if (qop == null || locatedBlock == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
BlockTokenSecretManager btsm = namesystem.getBlockManager()
|
|
||||||
.getBlockTokenSecretManager();
|
|
||||||
Token<BlockTokenIdentifier> token = locatedBlock.getBlockToken();
|
|
||||||
byte[] secret = btsm.secretGen(qop.getBytes(Charsets.UTF_8));
|
|
||||||
token.setDNHandshakeSecret(secret);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
import javax.crypto.Mac;
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.fs.CreateFlag;
|
import org.apache.hadoop.fs.CreateFlag;
|
||||||
|
@ -32,7 +31,6 @@ import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
||||||
import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferTestCase;
|
import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferTestCase;
|
||||||
import org.apache.hadoop.hdfs.security.token.block.BlockKey;
|
|
||||||
import org.apache.hadoop.io.EnumSetWritable;
|
import org.apache.hadoop.io.EnumSetWritable;
|
||||||
import org.apache.hadoop.security.TestPermission;
|
import org.apache.hadoop.security.TestPermission;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
|
@ -55,7 +53,6 @@ public class TestBlockTokenWrappingQOP extends SaslDataTransferTestCase {
|
||||||
|
|
||||||
private HdfsConfiguration conf;
|
private HdfsConfiguration conf;
|
||||||
private MiniDFSCluster cluster;
|
private MiniDFSCluster cluster;
|
||||||
private String encryptionAlgorithm;
|
|
||||||
private DistributedFileSystem dfs;
|
private DistributedFileSystem dfs;
|
||||||
|
|
||||||
private String configKey;
|
private String configKey;
|
||||||
|
@ -84,7 +81,6 @@ public class TestBlockTokenWrappingQOP extends SaslDataTransferTestCase {
|
||||||
conf.setBoolean(DFS_NAMENODE_SEND_QOP_ENABLED, true);
|
conf.setBoolean(DFS_NAMENODE_SEND_QOP_ENABLED, true);
|
||||||
conf.set(HADOOP_RPC_PROTECTION, this.configKey);
|
conf.set(HADOOP_RPC_PROTECTION, this.configKey);
|
||||||
cluster = null;
|
cluster = null;
|
||||||
encryptionAlgorithm = "HmacSHA1";
|
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
|
||||||
cluster.waitActive();
|
cluster.waitActive();
|
||||||
}
|
}
|
||||||
|
@ -109,12 +105,8 @@ public class TestBlockTokenWrappingQOP extends SaslDataTransferTestCase {
|
||||||
|
|
||||||
LocatedBlock lb = client.namenode.addBlock(src, clientName, null, null,
|
LocatedBlock lb = client.namenode.addBlock(src, clientName, null, null,
|
||||||
HdfsConstants.GRANDFATHER_INODE_ID, null, null);
|
HdfsConstants.GRANDFATHER_INODE_ID, null, null);
|
||||||
byte[] secret = lb.getBlockToken().getDnHandshakeSecret();
|
byte[] secret = lb.getBlockToken().decodeIdentifier().getHandshakeMsg();
|
||||||
BlockKey currentKey = cluster.getNamesystem().getBlockManager()
|
assertEquals(this.qopValue, new String(secret));
|
||||||
.getBlockTokenSecretManager().getCurrentKey();
|
|
||||||
String decrypted = decryptMessage(secret, currentKey,
|
|
||||||
encryptionAlgorithm);
|
|
||||||
assertEquals(this.qopValue, decrypted);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -137,12 +129,8 @@ public class TestBlockTokenWrappingQOP extends SaslDataTransferTestCase {
|
||||||
new EnumSetWritable<>(EnumSet.of(CreateFlag.APPEND)));
|
new EnumSetWritable<>(EnumSet.of(CreateFlag.APPEND)));
|
||||||
|
|
||||||
byte[] secret = lastBlock.getLastBlock().getBlockToken()
|
byte[] secret = lastBlock.getLastBlock().getBlockToken()
|
||||||
.getDnHandshakeSecret();
|
.decodeIdentifier().getHandshakeMsg();
|
||||||
BlockKey currentKey = cluster.getNamesystem().getBlockManager()
|
assertEquals(this.qopValue, new String(secret));
|
||||||
.getBlockTokenSecretManager().getCurrentKey();
|
|
||||||
String decrypted = decryptMessage(secret, currentKey,
|
|
||||||
encryptionAlgorithm);
|
|
||||||
assertEquals(this.qopValue, decrypted);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -164,27 +152,10 @@ public class TestBlockTokenWrappingQOP extends SaslDataTransferTestCase {
|
||||||
|
|
||||||
assertTrue(lbs.getLocatedBlocks().size() > 0);
|
assertTrue(lbs.getLocatedBlocks().size() > 0);
|
||||||
|
|
||||||
BlockKey currentKey = cluster.getNamesystem().getBlockManager()
|
|
||||||
.getBlockTokenSecretManager().getCurrentKey();
|
|
||||||
for (LocatedBlock lb : lbs.getLocatedBlocks()) {
|
for (LocatedBlock lb : lbs.getLocatedBlocks()) {
|
||||||
byte[] secret = lb.getBlockToken().getDnHandshakeSecret();
|
byte[] secret = lb.getBlockToken()
|
||||||
String decrypted = decryptMessage(secret, currentKey,
|
.decodeIdentifier().getHandshakeMsg();
|
||||||
encryptionAlgorithm);
|
assertEquals(this.qopValue, new String(secret));
|
||||||
assertEquals(this.qopValue, decrypted);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private String decryptMessage(byte[] secret, BlockKey key,
|
|
||||||
String algorithm) throws Exception {
|
|
||||||
String[] qops = {"auth", "auth-conf", "auth-int"};
|
|
||||||
Mac mac = Mac.getInstance(algorithm);
|
|
||||||
mac.init(key.getKey());
|
|
||||||
for (String qop : qops) {
|
|
||||||
byte[] encrypted = mac.doFinal(qop.getBytes());
|
|
||||||
if (Arrays.equals(encrypted, secret)) {
|
|
||||||
return qop;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue