HDFS-7077. Separate CipherSuite from crypto protocol version. (wang)

(cherry picked from commit e96ce6f3e3)

Conflicts:
	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
This commit is contained in:
Andrew Wang 2014-09-25 18:40:40 -07:00
parent b923c291b4
commit 3f71a15813
24 changed files with 514 additions and 194 deletions

View File

@ -82,14 +82,9 @@ public abstract class CryptoCodec implements Configurable {
} }
} }
if (codec != null) {
return codec; return codec;
} }
throw new RuntimeException("No available crypto codec which meets " +
"the cipher suite " + cipherSuite.getName() + ".");
}
/** /**
* Get crypto codec for algorithm/mode/padding in config value * Get crypto codec for algorithm/mode/padding in config value
* hadoop.security.crypto.cipher.suite * hadoop.security.crypto.cipher.suite

View File

@ -0,0 +1,90 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.crypto;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Versions of the client/server protocol used for HDFS encryption.
*/
@InterfaceAudience.Private
public enum CryptoProtocolVersion {
UNKNOWN("Unknown", 1),
ENCRYPTION_ZONES("Encryption zones", 2);
private final String description;
private final int version;
private Integer unknownValue = null;
private static CryptoProtocolVersion[] supported = {ENCRYPTION_ZONES};
/**
* @return Array of supported protocol versions.
*/
public static CryptoProtocolVersion[] supported() {
return supported;
}
CryptoProtocolVersion(String description, int version) {
this.description = description;
this.version = version;
}
/**
* Returns if a given protocol version is supported.
*
* @param version version number
* @return true if the version is supported, else false
*/
public static boolean supports(CryptoProtocolVersion version) {
if (version.getVersion() == UNKNOWN.getVersion()) {
return false;
}
for (CryptoProtocolVersion v : CryptoProtocolVersion.values()) {
if (v.getVersion() == version.getVersion()) {
return true;
}
}
return false;
}
public void setUnknownValue(int unknown) {
this.unknownValue = unknown;
}
public int getUnknownValue() {
return unknownValue;
}
public String getDescription() {
return description;
}
public int getVersion() {
return version;
}
@Override
public String toString() {
return "CryptoProtocolVersion{" +
"description='" + description + '\'' +
", version=" + version +
", unknownValue=" + unknownValue +
'}';
}
}

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.fs;
import org.apache.commons.codec.binary.Hex; import org.apache.commons.codec.binary.Hex;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkNotNull;
@ -32,6 +33,7 @@ import static com.google.common.base.Preconditions.checkNotNull;
public class FileEncryptionInfo { public class FileEncryptionInfo {
private final CipherSuite cipherSuite; private final CipherSuite cipherSuite;
private final CryptoProtocolVersion version;
private final byte[] edek; private final byte[] edek;
private final byte[] iv; private final byte[] iv;
private final String keyName; private final String keyName;
@ -47,9 +49,11 @@ public class FileEncryptionInfo {
* @param ezKeyVersionName name of the KeyVersion used to encrypt the * @param ezKeyVersionName name of the KeyVersion used to encrypt the
* encrypted data encryption key. * encrypted data encryption key.
*/ */
public FileEncryptionInfo(final CipherSuite suite, final byte[] edek, public FileEncryptionInfo(final CipherSuite suite,
final CryptoProtocolVersion version, final byte[] edek,
final byte[] iv, final String keyName, final String ezKeyVersionName) { final byte[] iv, final String keyName, final String ezKeyVersionName) {
checkNotNull(suite); checkNotNull(suite);
checkNotNull(version);
checkNotNull(edek); checkNotNull(edek);
checkNotNull(iv); checkNotNull(iv);
checkNotNull(keyName); checkNotNull(keyName);
@ -59,6 +63,7 @@ public class FileEncryptionInfo {
checkArgument(iv.length == suite.getAlgorithmBlockSize(), checkArgument(iv.length == suite.getAlgorithmBlockSize(),
"Unexpected IV length"); "Unexpected IV length");
this.cipherSuite = suite; this.cipherSuite = suite;
this.version = version;
this.edek = edek; this.edek = edek;
this.iv = iv; this.iv = iv;
this.keyName = keyName; this.keyName = keyName;
@ -73,6 +78,14 @@ public class FileEncryptionInfo {
return cipherSuite; return cipherSuite;
} }
/**
* @return {@link org.apache.hadoop.crypto.CryptoProtocolVersion} to use
* to access the file.
*/
public CryptoProtocolVersion getCryptoProtocolVersion() {
return version;
}
/** /**
* @return encrypted data encryption key (EDEK) for the file * @return encrypted data encryption key (EDEK) for the file
*/ */
@ -102,6 +115,7 @@ public class FileEncryptionInfo {
public String toString() { public String toString() {
StringBuilder builder = new StringBuilder("{"); StringBuilder builder = new StringBuilder("{");
builder.append("cipherSuite: " + cipherSuite); builder.append("cipherSuite: " + cipherSuite);
builder.append(", cryptoProtocolVersion: " + version);
builder.append(", edek: " + Hex.encodeHexString(edek)); builder.append(", edek: " + Hex.encodeHexString(edek));
builder.append(", iv: " + Hex.encodeHexString(iv)); builder.append(", iv: " + Hex.encodeHexString(iv));
builder.append(", keyName: " + keyName); builder.append(", keyName: " + keyName);

View File

@ -249,6 +249,8 @@ Release 2.6.0 - UNRELEASED
HDFS-7119. Split error checks in AtomicFileOutputStream#close into separate HDFS-7119. Split error checks in AtomicFileOutputStream#close into separate
conditions to improve diagnostics. (cnauroth) conditions to improve diagnostics. (cnauroth)
HDFS-7077. Separate CipherSuite from crypto protocol version. (wang)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-6690. Deduplicate xattr names in memory. (wang) HDFS-6690. Deduplicate xattr names in memory. (wang)

View File

@ -21,7 +21,6 @@ import static org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
import static org.apache.hadoop.crypto.key.KeyProviderCryptoExtension import static org.apache.hadoop.crypto.key.KeyProviderCryptoExtension
.EncryptedKeyVersion; .EncryptedKeyVersion;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT;
@ -104,6 +103,7 @@ import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoCodec; import org.apache.hadoop.crypto.CryptoCodec;
import org.apache.hadoop.crypto.CryptoInputStream; import org.apache.hadoop.crypto.CryptoInputStream;
import org.apache.hadoop.crypto.CryptoOutputStream; import org.apache.hadoop.crypto.CryptoOutputStream;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.BlockStorageLocation; import org.apache.hadoop.fs.BlockStorageLocation;
@ -263,9 +263,6 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
private static final DFSHedgedReadMetrics HEDGED_READ_METRIC = private static final DFSHedgedReadMetrics HEDGED_READ_METRIC =
new DFSHedgedReadMetrics(); new DFSHedgedReadMetrics();
private static ThreadPoolExecutor HEDGED_READ_THREAD_POOL; private static ThreadPoolExecutor HEDGED_READ_THREAD_POOL;
private final CryptoCodec codec;
@VisibleForTesting
List<CipherSuite> cipherSuites;
@VisibleForTesting @VisibleForTesting
KeyProviderCryptoExtension provider; KeyProviderCryptoExtension provider;
/** /**
@ -599,11 +596,6 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
this.authority = nameNodeUri == null? "null": nameNodeUri.getAuthority(); this.authority = nameNodeUri == null? "null": nameNodeUri.getAuthority();
this.clientName = "DFSClient_" + dfsClientConf.taskId + "_" + this.clientName = "DFSClient_" + dfsClientConf.taskId + "_" +
DFSUtil.getRandom().nextInt() + "_" + Thread.currentThread().getId(); DFSUtil.getRandom().nextInt() + "_" + Thread.currentThread().getId();
this.codec = CryptoCodec.getInstance(conf);
this.cipherSuites = Lists.newArrayListWithCapacity(1);
if (codec != null) {
cipherSuites.add(codec.getCipherSuite());
}
provider = DFSUtil.createKeyProviderCryptoExtension(conf); provider = DFSUtil.createKeyProviderCryptoExtension(conf);
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
if (provider == null) { if (provider == null) {
@ -1329,6 +1321,55 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
} }
} }
/**
* Obtain the crypto protocol version from the provided FileEncryptionInfo,
* checking to see if this version is supported by.
*
* @param feInfo FileEncryptionInfo
* @return CryptoProtocolVersion from the feInfo
* @throws IOException if the protocol version is unsupported.
*/
private static CryptoProtocolVersion getCryptoProtocolVersion
(FileEncryptionInfo feInfo) throws IOException {
final CryptoProtocolVersion version = feInfo.getCryptoProtocolVersion();
if (!CryptoProtocolVersion.supports(version)) {
throw new IOException("Client does not support specified " +
"CryptoProtocolVersion " + version.getDescription() + " version " +
"number" + version.getVersion());
}
return version;
}
/**
* Obtain a CryptoCodec based on the CipherSuite set in a FileEncryptionInfo
* and the available CryptoCodecs configured in the Configuration.
*
* @param conf Configuration
* @param feInfo FileEncryptionInfo
* @return CryptoCodec
* @throws IOException if no suitable CryptoCodec for the CipherSuite is
* available.
*/
private static CryptoCodec getCryptoCodec(Configuration conf,
FileEncryptionInfo feInfo) throws IOException {
final CipherSuite suite = feInfo.getCipherSuite();
if (suite.equals(CipherSuite.UNKNOWN)) {
throw new IOException("NameNode specified unknown CipherSuite with ID "
+ suite.getUnknownValue() + ", cannot instantiate CryptoCodec.");
}
final CryptoCodec codec = CryptoCodec.getInstance(conf, suite);
if (codec == null) {
throw new UnknownCipherSuiteException(
"No configuration found for the cipher suite "
+ suite.getConfigSuffix() + " prefixed with "
+ HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX
+ ". Please see the example configuration "
+ "hadoop.security.crypto.codec.classes.EXAMPLECIPHERSUITE "
+ "at core-default.xml for details.");
}
return codec;
}
/** /**
* Wraps the stream in a CryptoInputStream if the underlying file is * Wraps the stream in a CryptoInputStream if the underlying file is
* encrypted. * encrypted.
@ -1338,17 +1379,10 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
final FileEncryptionInfo feInfo = dfsis.getFileEncryptionInfo(); final FileEncryptionInfo feInfo = dfsis.getFileEncryptionInfo();
if (feInfo != null) { if (feInfo != null) {
// File is encrypted, wrap the stream in a crypto stream. // File is encrypted, wrap the stream in a crypto stream.
KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo); // Currently only one version, so no special logic based on the version #
CryptoCodec codec = CryptoCodec getCryptoProtocolVersion(feInfo);
.getInstance(conf, feInfo.getCipherSuite()); final CryptoCodec codec = getCryptoCodec(conf, feInfo);
if (codec == null) { final KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo);
throw new IOException("No configuration found for the cipher suite "
+ feInfo.getCipherSuite().getConfigSuffix() + " prefixed with "
+ HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX
+ ". Please see the example configuration "
+ "hadoop.security.crypto.codec.classes.EXAMPLECIPHERSUITE "
+ "at core-default.xml for details.");
}
final CryptoInputStream cryptoIn = final CryptoInputStream cryptoIn =
new CryptoInputStream(dfsis, codec, decrypted.getMaterial(), new CryptoInputStream(dfsis, codec, decrypted.getMaterial(),
feInfo.getIV()); feInfo.getIV());
@ -1376,15 +1410,10 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
FileSystem.Statistics statistics, long startPos) throws IOException { FileSystem.Statistics statistics, long startPos) throws IOException {
final FileEncryptionInfo feInfo = dfsos.getFileEncryptionInfo(); final FileEncryptionInfo feInfo = dfsos.getFileEncryptionInfo();
if (feInfo != null) { if (feInfo != null) {
if (codec == null) {
throw new IOException("No configuration found for the cipher suite "
+ HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY + " value prefixed with "
+ HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX
+ ". Please see the example configuration "
+ "hadoop.security.crypto.codec.classes.EXAMPLECIPHERSUITE "
+ "at core-default.xml for details.");
}
// File is encrypted, wrap the stream in a crypto stream. // File is encrypted, wrap the stream in a crypto stream.
// Currently only one version, so no special logic based on the version #
getCryptoProtocolVersion(feInfo);
final CryptoCodec codec = getCryptoCodec(conf, feInfo);
KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo); KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo);
final CryptoOutputStream cryptoOut = final CryptoOutputStream cryptoOut =
new CryptoOutputStream(dfsos, codec, new CryptoOutputStream(dfsos, codec,
@ -1599,7 +1628,7 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
final DFSOutputStream result = DFSOutputStream.newStreamForCreate(this, final DFSOutputStream result = DFSOutputStream.newStreamForCreate(this,
src, masked, flag, createParent, replication, blockSize, progress, src, masked, flag, createParent, replication, blockSize, progress,
buffersize, dfsClientConf.createChecksum(checksumOpt), buffersize, dfsClientConf.createChecksum(checksumOpt),
favoredNodeStrs, cipherSuites); favoredNodeStrs);
beginFileLease(result.getFileId(), result); beginFileLease(result.getFileId(), result);
return result; return result;
} }
@ -1646,7 +1675,7 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
DataChecksum checksum = dfsClientConf.createChecksum(checksumOpt); DataChecksum checksum = dfsClientConf.createChecksum(checksumOpt);
result = DFSOutputStream.newStreamForCreate(this, src, absPermission, result = DFSOutputStream.newStreamForCreate(this, src, absPermission,
flag, createParent, replication, blockSize, progress, buffersize, flag, createParent, replication, blockSize, progress, buffersize,
checksum, null, cipherSuites); checksum, null);
} }
beginFileLease(result.getFileId(), result); beginFileLease(result.getFileId(), result);
return result; return result;

View File

@ -43,7 +43,6 @@ import java.util.concurrent.atomic.AtomicReference;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.fs.CanSetDropBehind; import org.apache.hadoop.fs.CanSetDropBehind;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSOutputSummer; import org.apache.hadoop.fs.FSOutputSummer;
@ -54,6 +53,7 @@ import org.apache.hadoop.fs.Syncable;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag; import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
@ -134,7 +134,10 @@ public class DFSOutputStream extends FSOutputSummer
* errors (typically related to encryption zones and KeyProvider operations). * errors (typically related to encryption zones and KeyProvider operations).
*/ */
@VisibleForTesting @VisibleForTesting
public static final int CREATE_RETRY_COUNT = 10; static final int CREATE_RETRY_COUNT = 10;
@VisibleForTesting
static CryptoProtocolVersion[] SUPPORTED_CRYPTO_VERSIONS =
CryptoProtocolVersion.supported();
private final DFSClient dfsClient; private final DFSClient dfsClient;
private Socket s; private Socket s;
@ -1655,8 +1658,7 @@ public class DFSOutputStream extends FSOutputSummer
static DFSOutputStream newStreamForCreate(DFSClient dfsClient, String src, static DFSOutputStream newStreamForCreate(DFSClient dfsClient, String src,
FsPermission masked, EnumSet<CreateFlag> flag, boolean createParent, FsPermission masked, EnumSet<CreateFlag> flag, boolean createParent,
short replication, long blockSize, Progressable progress, int buffersize, short replication, long blockSize, Progressable progress, int buffersize,
DataChecksum checksum, String[] favoredNodes, DataChecksum checksum, String[] favoredNodes) throws IOException {
List<CipherSuite> cipherSuites) throws IOException {
HdfsFileStatus stat = null; HdfsFileStatus stat = null;
// Retry the create if we get a RetryStartFileException up to a maximum // Retry the create if we get a RetryStartFileException up to a maximum
@ -1668,7 +1670,7 @@ public class DFSOutputStream extends FSOutputSummer
try { try {
stat = dfsClient.namenode.create(src, masked, dfsClient.clientName, stat = dfsClient.namenode.create(src, masked, dfsClient.clientName,
new EnumSetWritable<CreateFlag>(flag), createParent, replication, new EnumSetWritable<CreateFlag>(flag), createParent, replication,
blockSize, cipherSuites); blockSize, SUPPORTED_CRYPTO_VERSIONS);
break; break;
} catch (RemoteException re) { } catch (RemoteException re) {
IOException e = re.unwrapRemoteException( IOException e = re.unwrapRemoteException(
@ -1682,7 +1684,7 @@ public class DFSOutputStream extends FSOutputSummer
SafeModeException.class, SafeModeException.class,
UnresolvedPathException.class, UnresolvedPathException.class,
SnapshotAccessControlException.class, SnapshotAccessControlException.class,
UnknownCipherSuiteException.class); UnknownCryptoProtocolVersionException.class);
if (e instanceof RetryStartFileException) { if (e instanceof RetryStartFileException) {
if (retryCount > 0) { if (retryCount > 0) {
shouldRetry = true; shouldRetry = true;

View File

@ -23,15 +23,12 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
/**
* Thrown when an unknown cipher suite is encountered.
*/
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Stable
public class UnknownCipherSuiteException extends IOException { public class UnknownCipherSuiteException extends IOException {
private static final long serialVersionUID = 8957192l;
public UnknownCipherSuiteException() {
super();
}
public UnknownCipherSuiteException(String msg) { public UnknownCipherSuiteException(String msg) {
super(msg); super(msg);
} }

View File

@ -0,0 +1,38 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class UnknownCryptoProtocolVersionException extends IOException {
private static final long serialVersionUID = 8957192l;
public UnknownCryptoProtocolVersionException() {
super();
}
public UnknownCryptoProtocolVersionException(String msg) {
super(msg);
}
}

View File

@ -24,7 +24,7 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries; import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
import org.apache.hadoop.fs.CacheFlag; import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.ContentSummary;
@ -163,6 +163,7 @@ public interface ClientProtocol {
* @param createParent create missing parent directory if true * @param createParent create missing parent directory if true
* @param replication block replication factor. * @param replication block replication factor.
* @param blockSize maximum block size. * @param blockSize maximum block size.
* @param supportedVersions CryptoProtocolVersions supported by the client
* *
* @return the status of the created file, it could be null if the server * @return the status of the created file, it could be null if the server
* doesn't support returning the file status * doesn't support returning the file status
@ -191,7 +192,7 @@ public interface ClientProtocol {
public HdfsFileStatus create(String src, FsPermission masked, public HdfsFileStatus create(String src, FsPermission masked,
String clientName, EnumSetWritable<CreateFlag> flag, String clientName, EnumSetWritable<CreateFlag> flag,
boolean createParent, short replication, long blockSize, boolean createParent, short replication, long blockSize,
List<CipherSuite> cipherSuites) CryptoProtocolVersion[] supportedVersions)
throws AccessControlException, AlreadyBeingCreatedException, throws AccessControlException, AlreadyBeingCreatedException,
DSQuotaExceededException, FileAlreadyExistsException, DSQuotaExceededException, FileAlreadyExistsException,
FileNotFoundException, NSQuotaExceededException, FileNotFoundException, NSQuotaExceededException,

View File

@ -22,6 +22,7 @@ import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
/** /**
* A simple class for representing an encryption zone. Presently an encryption * A simple class for representing an encryption zone. Presently an encryption
@ -35,13 +36,15 @@ public class EncryptionZone {
private final long id; private final long id;
private final String path; private final String path;
private final CipherSuite suite; private final CipherSuite suite;
private final CryptoProtocolVersion version;
private final String keyName; private final String keyName;
public EncryptionZone(long id, String path, public EncryptionZone(long id, String path, CipherSuite suite,
CipherSuite suite, String keyName) { CryptoProtocolVersion version, String keyName) {
this.id = id; this.id = id;
this.path = path; this.path = path;
this.suite = suite; this.suite = suite;
this.version = version;
this.keyName = keyName; this.keyName = keyName;
} }
@ -57,15 +60,20 @@ public class EncryptionZone {
return suite; return suite;
} }
public CryptoProtocolVersion getVersion() { return version; }
public String getKeyName() { public String getKeyName() {
return keyName; return keyName;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return new HashCodeBuilder(13, 31). return new HashCodeBuilder(13, 31)
append(id).append(path). .append(id)
append(suite).append(keyName). .append(path)
.append(suite)
.append(version)
.append(keyName).
toHashCode(); toHashCode();
} }
@ -86,6 +94,7 @@ public class EncryptionZone {
append(id, rhs.id). append(id, rhs.id).
append(path, rhs.path). append(path, rhs.path).
append(suite, rhs.suite). append(suite, rhs.suite).
append(version, rhs.version).
append(keyName, rhs.keyName). append(keyName, rhs.keyName).
isEquals(); isEquals();
} }
@ -95,6 +104,7 @@ public class EncryptionZone {
return "EncryptionZone [id=" + id + return "EncryptionZone [id=" + id +
", path=" + path + ", path=" + path +
", suite=" + suite + ", suite=" + suite +
", version=" + version +
", keyName=" + keyName + "]"; ", keyName=" + keyName + "]";
} }
} }

View File

@ -395,7 +395,8 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
PBHelper.convert(req.getMasked()), req.getClientName(), PBHelper.convert(req.getMasked()), req.getClientName(),
PBHelper.convertCreateFlag(req.getCreateFlag()), req.getCreateParent(), PBHelper.convertCreateFlag(req.getCreateFlag()), req.getCreateParent(),
(short) req.getReplication(), req.getBlockSize(), (short) req.getReplication(), req.getBlockSize(),
PBHelper.convertCipherSuiteProtos(req.getCipherSuitesList())); PBHelper.convertCryptoProtocolVersions(
req.getCryptoProtocolVersionList()));
if (result != null) { if (result != null) {
return CreateResponseProto.newBuilder().setFs(PBHelper.convert(result)) return CreateResponseProto.newBuilder().setFs(PBHelper.convert(result))

View File

@ -28,6 +28,7 @@ import com.google.common.collect.Lists;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries; import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
import org.apache.hadoop.fs.CacheFlag; import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.ContentSummary;
@ -274,7 +275,7 @@ public class ClientNamenodeProtocolTranslatorPB implements
public HdfsFileStatus create(String src, FsPermission masked, public HdfsFileStatus create(String src, FsPermission masked,
String clientName, EnumSetWritable<CreateFlag> flag, String clientName, EnumSetWritable<CreateFlag> flag,
boolean createParent, short replication, long blockSize, boolean createParent, short replication, long blockSize,
List<CipherSuite> cipherSuites) CryptoProtocolVersion[] supportedVersions)
throws AccessControlException, AlreadyBeingCreatedException, throws AccessControlException, AlreadyBeingCreatedException,
DSQuotaExceededException, FileAlreadyExistsException, DSQuotaExceededException, FileAlreadyExistsException,
FileNotFoundException, NSQuotaExceededException, FileNotFoundException, NSQuotaExceededException,
@ -288,9 +289,7 @@ public class ClientNamenodeProtocolTranslatorPB implements
.setCreateParent(createParent) .setCreateParent(createParent)
.setReplication(replication) .setReplication(replication)
.setBlockSize(blockSize); .setBlockSize(blockSize);
if (cipherSuites != null) { builder.addAllCryptoProtocolVersion(PBHelper.convert(supportedVersions));
builder.addAllCipherSuites(PBHelper.convertCipherSuites(cipherSuites));
}
CreateRequestProto req = builder.build(); CreateRequestProto req = builder.build();
try { try {
CreateResponseProto res = rpcProxy.create(null, req); CreateResponseProto res = rpcProxy.create(null, req);

View File

@ -20,6 +20,8 @@ package org.apache.hadoop.hdfs.protocolPB;
import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos import static org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos
.EncryptionZoneProto; .EncryptionZoneProto;
import static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CipherSuiteProto;
import static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CryptoProtocolVersionProto;
import java.io.EOFException; import java.io.EOFException;
import java.io.IOException; import java.io.IOException;
@ -59,6 +61,7 @@ import org.apache.hadoop.hdfs.protocol.CachePoolStats;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks; import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates; import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
@ -2392,15 +2395,17 @@ public class PBHelper {
public static EncryptionZoneProto convert(EncryptionZone zone) { public static EncryptionZoneProto convert(EncryptionZone zone) {
return EncryptionZoneProto.newBuilder() return EncryptionZoneProto.newBuilder()
.setId(zone.getId()) .setId(zone.getId())
.setKeyName(zone.getKeyName())
.setPath(zone.getPath()) .setPath(zone.getPath())
.setSuite(convert(zone.getSuite())) .setSuite(convert(zone.getSuite()))
.setCryptoProtocolVersion(convert(zone.getVersion()))
.setKeyName(zone.getKeyName())
.build(); .build();
} }
public static EncryptionZone convert(EncryptionZoneProto proto) { public static EncryptionZone convert(EncryptionZoneProto proto) {
return new EncryptionZone(proto.getId(), proto.getPath(), return new EncryptionZone(proto.getId(), proto.getPath(),
convert(proto.getSuite()), proto.getKeyName()); convert(proto.getSuite()), convert(proto.getCryptoProtocolVersion()),
proto.getKeyName());
} }
public static ShortCircuitShmSlotProto convert(SlotId slotId) { public static ShortCircuitShmSlotProto convert(SlotId slotId) {
@ -2670,18 +2675,18 @@ public class PBHelper {
builder.build()).build(); builder.build()).build();
} }
public static HdfsProtos.CipherSuite convert(CipherSuite suite) { public static CipherSuiteProto convert(CipherSuite suite) {
switch (suite) { switch (suite) {
case UNKNOWN: case UNKNOWN:
return HdfsProtos.CipherSuite.UNKNOWN; return CipherSuiteProto.UNKNOWN;
case AES_CTR_NOPADDING: case AES_CTR_NOPADDING:
return HdfsProtos.CipherSuite.AES_CTR_NOPADDING; return CipherSuiteProto.AES_CTR_NOPADDING;
default: default:
return null; return null;
} }
} }
public static CipherSuite convert(HdfsProtos.CipherSuite proto) { public static CipherSuite convert(CipherSuiteProto proto) {
switch (proto) { switch (proto) {
case AES_CTR_NOPADDING: case AES_CTR_NOPADDING:
return CipherSuite.AES_CTR_NOPADDING; return CipherSuite.AES_CTR_NOPADDING;
@ -2693,26 +2698,49 @@ public class PBHelper {
} }
} }
public static List<HdfsProtos.CipherSuite> convertCipherSuites public static List<CryptoProtocolVersionProto> convert(
(List<CipherSuite> suites) { CryptoProtocolVersion[] versions) {
if (suites == null) { List<CryptoProtocolVersionProto> protos =
return null; Lists.newArrayListWithCapacity(versions.length);
} for (CryptoProtocolVersion v: versions) {
List<HdfsProtos.CipherSuite> protos = protos.add(convert(v));
Lists.newArrayListWithCapacity(suites.size());
for (CipherSuite suite : suites) {
protos.add(convert(suite));
} }
return protos; return protos;
} }
public static List<CipherSuite> convertCipherSuiteProtos( public static CryptoProtocolVersion[] convertCryptoProtocolVersions(
List<HdfsProtos.CipherSuite> protos) { List<CryptoProtocolVersionProto> protos) {
List<CipherSuite> suites = Lists.newArrayListWithCapacity(protos.size()); List<CryptoProtocolVersion> versions =
for (HdfsProtos.CipherSuite proto : protos) { Lists.newArrayListWithCapacity(protos.size());
suites.add(convert(proto)); for (CryptoProtocolVersionProto p: protos) {
versions.add(convert(p));
}
return versions.toArray(new CryptoProtocolVersion[] {});
}
public static CryptoProtocolVersion convert(CryptoProtocolVersionProto
proto) {
switch(proto) {
case ENCRYPTION_ZONES:
return CryptoProtocolVersion.ENCRYPTION_ZONES;
default:
// Set to UNKNOWN and stash the unknown enum value
CryptoProtocolVersion version = CryptoProtocolVersion.UNKNOWN;
version.setUnknownValue(proto.getNumber());
return version;
}
}
public static CryptoProtocolVersionProto convert(CryptoProtocolVersion
version) {
switch(version) {
case UNKNOWN:
return CryptoProtocolVersionProto.UNKNOWN_PROTOCOL_VERSION;
case ENCRYPTION_ZONES:
return CryptoProtocolVersionProto.ENCRYPTION_ZONES;
default:
return null;
} }
return suites;
} }
public static HdfsProtos.FileEncryptionInfoProto convert( public static HdfsProtos.FileEncryptionInfoProto convert(
@ -2722,6 +2750,7 @@ public class PBHelper {
} }
return HdfsProtos.FileEncryptionInfoProto.newBuilder() return HdfsProtos.FileEncryptionInfoProto.newBuilder()
.setSuite(convert(info.getCipherSuite())) .setSuite(convert(info.getCipherSuite()))
.setCryptoProtocolVersion(convert(info.getCryptoProtocolVersion()))
.setKey(getByteString(info.getEncryptedDataEncryptionKey())) .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
.setIv(getByteString(info.getIV())) .setIv(getByteString(info.getIV()))
.setEzKeyVersionName(info.getEzKeyVersionName()) .setEzKeyVersionName(info.getEzKeyVersionName())
@ -2742,12 +2771,13 @@ public class PBHelper {
} }
public static HdfsProtos.ZoneEncryptionInfoProto convert( public static HdfsProtos.ZoneEncryptionInfoProto convert(
CipherSuite suite, String keyName) { CipherSuite suite, CryptoProtocolVersion version, String keyName) {
if (suite == null || keyName == null) { if (suite == null || version == null || keyName == null) {
return null; return null;
} }
return HdfsProtos.ZoneEncryptionInfoProto.newBuilder() return HdfsProtos.ZoneEncryptionInfoProto.newBuilder()
.setSuite(convert(suite)) .setSuite(convert(suite))
.setCryptoProtocolVersion(convert(version))
.setKeyName(keyName) .setKeyName(keyName)
.build(); .build();
} }
@ -2758,23 +2788,27 @@ public class PBHelper {
return null; return null;
} }
CipherSuite suite = convert(proto.getSuite()); CipherSuite suite = convert(proto.getSuite());
CryptoProtocolVersion version = convert(proto.getCryptoProtocolVersion());
byte[] key = proto.getKey().toByteArray(); byte[] key = proto.getKey().toByteArray();
byte[] iv = proto.getIv().toByteArray(); byte[] iv = proto.getIv().toByteArray();
String ezKeyVersionName = proto.getEzKeyVersionName(); String ezKeyVersionName = proto.getEzKeyVersionName();
String keyName = proto.getKeyName(); String keyName = proto.getKeyName();
return new FileEncryptionInfo(suite, key, iv, keyName, ezKeyVersionName); return new FileEncryptionInfo(suite, version, key, iv, keyName,
ezKeyVersionName);
} }
public static FileEncryptionInfo convert( public static FileEncryptionInfo convert(
HdfsProtos.PerFileEncryptionInfoProto fileProto, HdfsProtos.PerFileEncryptionInfoProto fileProto,
CipherSuite suite, String keyName) { CipherSuite suite, CryptoProtocolVersion version, String keyName) {
if (fileProto == null || suite == null || keyName == null) { if (fileProto == null || suite == null || version == null ||
keyName == null) {
return null; return null;
} }
byte[] key = fileProto.getKey().toByteArray(); byte[] key = fileProto.getKey().toByteArray();
byte[] iv = fileProto.getIv().toByteArray(); byte[] iv = fileProto.getIv().toByteArray();
String ezKeyVersionName = fileProto.getEzKeyVersionName(); String ezKeyVersionName = fileProto.getEzKeyVersionName();
return new FileEncryptionInfo(suite, key, iv, keyName, ezKeyVersionName); return new FileEncryptionInfo(suite, version, key, iv, keyName,
ezKeyVersionName);
} }
} }

View File

@ -27,6 +27,7 @@ import com.google.common.base.Preconditions;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.XAttrSetFlag;
@ -57,7 +58,8 @@ public class EncryptionZoneManager {
.class); .class);
public static final EncryptionZone NULL_EZ = public static final EncryptionZone NULL_EZ =
new EncryptionZone(-1, "", CipherSuite.UNKNOWN, ""); new EncryptionZone(-1, "", CipherSuite.UNKNOWN,
CryptoProtocolVersion.UNKNOWN, "");
/** /**
* EncryptionZoneInt is the internal representation of an encryption zone. The * EncryptionZoneInt is the internal representation of an encryption zone. The
@ -67,11 +69,16 @@ public class EncryptionZoneManager {
private static class EncryptionZoneInt { private static class EncryptionZoneInt {
private final long inodeId; private final long inodeId;
private final CipherSuite suite; private final CipherSuite suite;
private final CryptoProtocolVersion version;
private final String keyName; private final String keyName;
EncryptionZoneInt(long inodeId, CipherSuite suite, String keyName) { EncryptionZoneInt(long inodeId, CipherSuite suite,
CryptoProtocolVersion version, String keyName) {
Preconditions.checkArgument(suite != CipherSuite.UNKNOWN);
Preconditions.checkArgument(version != CryptoProtocolVersion.UNKNOWN);
this.inodeId = inodeId; this.inodeId = inodeId;
this.suite = suite; this.suite = suite;
this.version = version;
this.keyName = keyName; this.keyName = keyName;
} }
@ -83,6 +90,8 @@ public class EncryptionZoneManager {
return suite; return suite;
} }
CryptoProtocolVersion getVersion() { return version; }
String getKeyName() { String getKeyName() {
return keyName; return keyName;
} }
@ -118,9 +127,10 @@ public class EncryptionZoneManager {
* @param inodeId of the encryption zone * @param inodeId of the encryption zone
* @param keyName encryption zone key name * @param keyName encryption zone key name
*/ */
void addEncryptionZone(Long inodeId, CipherSuite suite, String keyName) { void addEncryptionZone(Long inodeId, CipherSuite suite,
CryptoProtocolVersion version, String keyName) {
assert dir.hasWriteLock(); assert dir.hasWriteLock();
unprotectedAddEncryptionZone(inodeId, suite, keyName); unprotectedAddEncryptionZone(inodeId, suite, version, keyName);
} }
/** /**
@ -132,9 +142,9 @@ public class EncryptionZoneManager {
* @param keyName encryption zone key name * @param keyName encryption zone key name
*/ */
void unprotectedAddEncryptionZone(Long inodeId, void unprotectedAddEncryptionZone(Long inodeId,
CipherSuite suite, String keyName) { CipherSuite suite, CryptoProtocolVersion version, String keyName) {
final EncryptionZoneInt ez = new EncryptionZoneInt( final EncryptionZoneInt ez = new EncryptionZoneInt(
inodeId, suite, keyName); inodeId, suite, version, keyName);
encryptionZones.put(inodeId, ez); encryptionZones.put(inodeId, ez);
} }
@ -219,7 +229,7 @@ public class EncryptionZoneManager {
return NULL_EZ; return NULL_EZ;
} else { } else {
return new EncryptionZone(ezi.getINodeId(), getFullPathName(ezi), return new EncryptionZone(ezi.getINodeId(), getFullPathName(ezi),
ezi.getSuite(), ezi.getKeyName()); ezi.getSuite(), ezi.getVersion(), ezi.getKeyName());
} }
} }
@ -275,7 +285,8 @@ public class EncryptionZoneManager {
* <p/> * <p/>
* Called while holding the FSDirectory lock. * Called while holding the FSDirectory lock.
*/ */
XAttr createEncryptionZone(String src, CipherSuite suite, String keyName) XAttr createEncryptionZone(String src, CipherSuite suite,
CryptoProtocolVersion version, String keyName)
throws IOException { throws IOException {
assert dir.hasWriteLock(); assert dir.hasWriteLock();
if (dir.isNonEmptyDirectory(src)) { if (dir.isNonEmptyDirectory(src)) {
@ -296,7 +307,7 @@ public class EncryptionZoneManager {
} }
final HdfsProtos.ZoneEncryptionInfoProto proto = final HdfsProtos.ZoneEncryptionInfoProto proto =
PBHelper.convert(suite, keyName); PBHelper.convert(suite, version, keyName);
final XAttr ezXAttr = XAttrHelper final XAttr ezXAttr = XAttrHelper
.buildXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE, proto.toByteArray()); .buildXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE, proto.toByteArray());
@ -341,7 +352,7 @@ public class EncryptionZoneManager {
} }
// Add the EZ to the result list // Add the EZ to the result list
zones.add(new EncryptionZone(ezi.getINodeId(), pathName, zones.add(new EncryptionZone(ezi.getINodeId(), pathName,
ezi.getSuite(), ezi.getKeyName())); ezi.getSuite(), ezi.getVersion(), ezi.getKeyName()));
count++; count++;
if (count >= numResponses) { if (count >= numResponses) {
break; break;

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.fs.FileEncryptionInfo;
@ -2173,6 +2174,7 @@ public class FSDirectory implements Closeable {
xattr.getValue()); xattr.getValue());
ezManager.unprotectedAddEncryptionZone(inode.getId(), ezManager.unprotectedAddEncryptionZone(inode.getId(),
PBHelper.convert(ezProto.getSuite()), PBHelper.convert(ezProto.getSuite()),
PBHelper.convert(ezProto.getCryptoProtocolVersion()),
ezProto.getKeyName()); ezProto.getKeyName());
} catch (InvalidProtocolBufferException e) { } catch (InvalidProtocolBufferException e) {
NameNode.LOG.warn("Error parsing protocol buffer of " + NameNode.LOG.warn("Error parsing protocol buffer of " +
@ -2766,11 +2768,12 @@ public class FSDirectory implements Closeable {
} }
} }
XAttr createEncryptionZone(String src, CipherSuite suite, String keyName) XAttr createEncryptionZone(String src, CipherSuite suite,
CryptoProtocolVersion version, String keyName)
throws IOException { throws IOException {
writeLock(); writeLock();
try { try {
return ezManager.createEncryptionZone(src, suite, keyName); return ezManager.createEncryptionZone(src, suite, version, keyName);
} finally { } finally {
writeUnlock(); writeUnlock();
} }
@ -2854,8 +2857,9 @@ public class FSDirectory implements Closeable {
} }
} }
CipherSuite suite = encryptionZone.getSuite(); final CryptoProtocolVersion version = encryptionZone.getVersion();
String keyName = encryptionZone.getKeyName(); final CipherSuite suite = encryptionZone.getSuite();
final String keyName = encryptionZone.getKeyName();
XAttr fileXAttr = unprotectedGetXAttrByName(inode, snapshotId, XAttr fileXAttr = unprotectedGetXAttrByName(inode, snapshotId,
CRYPTO_XATTR_FILE_ENCRYPTION_INFO); CRYPTO_XATTR_FILE_ENCRYPTION_INFO);
@ -2871,7 +2875,7 @@ public class FSDirectory implements Closeable {
HdfsProtos.PerFileEncryptionInfoProto fileProto = HdfsProtos.PerFileEncryptionInfoProto fileProto =
HdfsProtos.PerFileEncryptionInfoProto.parseFrom( HdfsProtos.PerFileEncryptionInfoProto.parseFrom(
fileXAttr.getValue()); fileXAttr.getValue());
return PBHelper.convert(fileProto, suite, keyName); return PBHelper.convert(fileProto, suite, version, keyName);
} catch (InvalidProtocolBufferException e) { } catch (InvalidProtocolBufferException e) {
throw new IOException("Could not parse file encryption info for " + throw new IOException("Could not parse file encryption info for " +
"inode " + inode, e); "inode " + inode, e);
@ -2914,6 +2918,7 @@ public class FSDirectory implements Closeable {
HdfsProtos.ZoneEncryptionInfoProto.parseFrom(xattr.getValue()); HdfsProtos.ZoneEncryptionInfoProto.parseFrom(xattr.getValue());
ezManager.addEncryptionZone(inode.getId(), ezManager.addEncryptionZone(inode.getId(),
PBHelper.convert(ezProto.getSuite()), PBHelper.convert(ezProto.getSuite()),
PBHelper.convert(ezProto.getCryptoProtocolVersion()),
ezProto.getKeyName()); ezProto.getKeyName());
} }

View File

@ -136,6 +136,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.crypto.key.KeyProvider; import org.apache.hadoop.crypto.key.KeyProvider;
import org.apache.hadoop.crypto.CryptoCodec; import org.apache.hadoop.crypto.CryptoCodec;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
@ -170,9 +171,8 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.HAUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.StorageType; import org.apache.hadoop.hdfs.UnknownCryptoProtocolVersionException;
import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.XAttrHelper;
import org.apache.hadoop.hdfs.UnknownCipherSuiteException;
import org.apache.hadoop.hdfs.protocol.AclException; import org.apache.hadoop.hdfs.protocol.AclException;
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
@ -2358,46 +2358,41 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
/** /**
* If the file is within an encryption zone, select the appropriate * If the file is within an encryption zone, select the appropriate
* CipherSuite from the list provided by the client. Since the client may * CryptoProtocolVersion from the list provided by the client. Since the
* be newer, need to handle unknown CipherSuites. * client may be newer, we need to handle unknown versions.
* *
* @param srcIIP path of the file * @param zone EncryptionZone of the file
* @param cipherSuites client-provided list of supported CipherSuites, * @param supportedVersions List of supported protocol versions
* in desired order. * @return chosen protocol version
* @return chosen CipherSuite, or null if file is not in an EncryptionZone
* @throws IOException * @throws IOException
*/ */
private CipherSuite chooseCipherSuite(INodesInPath srcIIP, List<CipherSuite> private CryptoProtocolVersion chooseProtocolVersion(EncryptionZone zone,
cipherSuites) CryptoProtocolVersion[] supportedVersions)
throws UnknownCipherSuiteException, UnresolvedLinkException, throws UnknownCryptoProtocolVersionException, UnresolvedLinkException,
SnapshotAccessControlException { SnapshotAccessControlException {
// Not in an EZ Preconditions.checkNotNull(zone);
if (!dir.isInAnEZ(srcIIP)) { Preconditions.checkNotNull(supportedVersions);
return null; // Right now, we only support a single protocol version,
} // so simply look for it in the list of provided options
CipherSuite chosen = null; final CryptoProtocolVersion required = zone.getVersion();
for (CipherSuite c : cipherSuites) {
if (c.equals(CipherSuite.UNKNOWN)) { for (CryptoProtocolVersion c : supportedVersions) {
if (c.equals(CryptoProtocolVersion.UNKNOWN)) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Ignoring unknown CipherSuite provided by client: " LOG.debug("Ignoring unknown CryptoProtocolVersion provided by " +
+ c.getUnknownValue()); "client: " + c.getUnknownValue());
} }
continue; continue;
} }
for (CipherSuite supported : CipherSuite.values()) { if (c.equals(required)) {
if (supported.equals(c)) { return c;
chosen = c;
break;
} }
} }
} throw new UnknownCryptoProtocolVersionException(
if (chosen == null) { "No crypto protocol versions provided by the client are supported."
throw new UnknownCipherSuiteException( + " Client provided: " + Arrays.toString(supportedVersions)
"No cipher suites provided by the client are supported." + " NameNode supports: " + Arrays.toString(CryptoProtocolVersion
+ " Client provided: " + Arrays.toString(cipherSuites.toArray()) .values()));
+ " NameNode supports: " + Arrays.toString(CipherSuite.values()));
}
return chosen;
} }
/** /**
@ -2433,7 +2428,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
HdfsFileStatus startFile(String src, PermissionStatus permissions, HdfsFileStatus startFile(String src, PermissionStatus permissions,
String holder, String clientMachine, EnumSet<CreateFlag> flag, String holder, String clientMachine, EnumSet<CreateFlag> flag,
boolean createParent, short replication, long blockSize, boolean createParent, short replication, long blockSize,
List<CipherSuite> cipherSuites) CryptoProtocolVersion[] supportedVersions)
throws AccessControlException, SafeModeException, throws AccessControlException, SafeModeException,
FileAlreadyExistsException, UnresolvedLinkException, FileAlreadyExistsException, UnresolvedLinkException,
FileNotFoundException, ParentNotDirectoryException, IOException { FileNotFoundException, ParentNotDirectoryException, IOException {
@ -2446,7 +2441,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
try { try {
status = startFileInt(src, permissions, holder, clientMachine, flag, status = startFileInt(src, permissions, holder, clientMachine, flag,
createParent, replication, blockSize, cipherSuites, createParent, replication, blockSize, supportedVersions,
cacheEntry != null); cacheEntry != null);
} catch (AccessControlException e) { } catch (AccessControlException e) {
logAuditEvent(false, "create", src); logAuditEvent(false, "create", src);
@ -2460,7 +2455,8 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
private HdfsFileStatus startFileInt(final String srcArg, private HdfsFileStatus startFileInt(final String srcArg,
PermissionStatus permissions, String holder, String clientMachine, PermissionStatus permissions, String holder, String clientMachine,
EnumSet<CreateFlag> flag, boolean createParent, short replication, EnumSet<CreateFlag> flag, boolean createParent, short replication,
long blockSize, List<CipherSuite> cipherSuites, boolean logRetryCache) long blockSize, CryptoProtocolVersion[] supportedVersions,
boolean logRetryCache)
throws AccessControlException, SafeModeException, throws AccessControlException, SafeModeException,
FileAlreadyExistsException, UnresolvedLinkException, FileAlreadyExistsException, UnresolvedLinkException,
FileNotFoundException, ParentNotDirectoryException, IOException { FileNotFoundException, ParentNotDirectoryException, IOException {
@ -2474,9 +2470,9 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
+ ", replication=" + replication + ", replication=" + replication
+ ", createFlag=" + flag.toString() + ", createFlag=" + flag.toString()
+ ", blockSize=" + blockSize); + ", blockSize=" + blockSize);
builder.append(", cipherSuites="); builder.append(", supportedVersions=");
if (cipherSuites != null) { if (supportedVersions != null) {
builder.append(Arrays.toString(cipherSuites.toArray())); builder.append(Arrays.toString(supportedVersions));
} else { } else {
builder.append("null"); builder.append("null");
} }
@ -2514,6 +2510,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
* special RetryStartFileException to ask the DFSClient to try the create * special RetryStartFileException to ask the DFSClient to try the create
* again later. * again later.
*/ */
CryptoProtocolVersion protocolVersion = null;
CipherSuite suite = null; CipherSuite suite = null;
String ezKeyName = null; String ezKeyName = null;
readLock(); readLock();
@ -2522,13 +2519,16 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
INodesInPath iip = dir.getINodesInPath4Write(src); INodesInPath iip = dir.getINodesInPath4Write(src);
// Nothing to do if the path is not within an EZ // Nothing to do if the path is not within an EZ
if (dir.isInAnEZ(iip)) { if (dir.isInAnEZ(iip)) {
suite = chooseCipherSuite(iip, cipherSuites); EncryptionZone zone = dir.getEZForPath(iip);
if (suite != null) { protocolVersion = chooseProtocolVersion(zone, supportedVersions);
suite = zone.getSuite();
ezKeyName = dir.getKeyName(iip);
Preconditions.checkNotNull(protocolVersion);
Preconditions.checkNotNull(suite);
Preconditions.checkArgument(!suite.equals(CipherSuite.UNKNOWN), Preconditions.checkArgument(!suite.equals(CipherSuite.UNKNOWN),
"Chose an UNKNOWN CipherSuite!"); "Chose an UNKNOWN CipherSuite!");
} Preconditions.checkNotNull(ezKeyName);
ezKeyName = dir.getKeyName(iip);
Preconditions.checkState(ezKeyName != null);
} }
} finally { } finally {
readUnlock(); readUnlock();
@ -2554,7 +2554,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
src = resolvePath(src, pathComponents); src = resolvePath(src, pathComponents);
toRemoveBlocks = startFileInternal(pc, src, permissions, holder, toRemoveBlocks = startFileInternal(pc, src, permissions, holder,
clientMachine, create, overwrite, createParent, replication, clientMachine, create, overwrite, createParent, replication,
blockSize, suite, edek, logRetryCache); blockSize, suite, protocolVersion, edek, logRetryCache);
stat = dir.getFileInfo(src, false, stat = dir.getFileInfo(src, false,
FSDirectory.isReservedRawName(srcArg), false); FSDirectory.isReservedRawName(srcArg), false);
} catch (StandbyException se) { } catch (StandbyException se) {
@ -2590,7 +2590,8 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
String src, PermissionStatus permissions, String holder, String src, PermissionStatus permissions, String holder,
String clientMachine, boolean create, boolean overwrite, String clientMachine, boolean create, boolean overwrite,
boolean createParent, short replication, long blockSize, boolean createParent, short replication, long blockSize,
CipherSuite suite, EncryptedKeyVersion edek, boolean logRetryEntry) CipherSuite suite, CryptoProtocolVersion version,
EncryptedKeyVersion edek, boolean logRetryEntry)
throws FileAlreadyExistsException, AccessControlException, throws FileAlreadyExistsException, AccessControlException,
UnresolvedLinkException, FileNotFoundException, UnresolvedLinkException, FileNotFoundException,
ParentNotDirectoryException, RetryStartFileException, IOException { ParentNotDirectoryException, RetryStartFileException, IOException {
@ -2615,7 +2616,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
if (!ezKeyName.equals(edek.getEncryptionKeyName())) { if (!ezKeyName.equals(edek.getEncryptionKeyName())) {
throw new RetryStartFileException(); throw new RetryStartFileException();
} }
feInfo = new FileEncryptionInfo(suite, feInfo = new FileEncryptionInfo(suite, version,
edek.getEncryptedKeyVersion().getMaterial(), edek.getEncryptedKeyVersion().getMaterial(),
edek.getEncryptedKeyIv(), edek.getEncryptedKeyIv(),
ezKeyName, edek.getEncryptionKeyVersionName()); ezKeyName, edek.getEncryptionKeyVersionName());
@ -8683,7 +8684,11 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
src = resolvePath(src, pathComponents); src = resolvePath(src, pathComponents);
final CipherSuite suite = CipherSuite.convert(cipher); final CipherSuite suite = CipherSuite.convert(cipher);
final XAttr ezXAttr = dir.createEncryptionZone(src, suite, keyName); // For now this is hardcoded, as we only support one method.
final CryptoProtocolVersion version =
CryptoProtocolVersion.ENCRYPTION_ZONES;
final XAttr ezXAttr = dir.createEncryptionZone(src, suite,
version, keyName);
List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1); List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
xAttrs.add(ezXAttr); xAttrs.add(ezXAttr);
getEditLog().logSetXAttrs(src, xAttrs, logRetryCache); getEditLog().logSetXAttrs(src, xAttrs, logRetryCache);

View File

@ -38,7 +38,7 @@ import com.google.common.collect.Lists;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries; import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
import org.apache.hadoop.fs.CacheFlag; import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
@ -537,7 +537,7 @@ class NameNodeRpcServer implements NamenodeProtocols {
public HdfsFileStatus create(String src, FsPermission masked, public HdfsFileStatus create(String src, FsPermission masked,
String clientName, EnumSetWritable<CreateFlag> flag, String clientName, EnumSetWritable<CreateFlag> flag,
boolean createParent, short replication, long blockSize, boolean createParent, short replication, long blockSize,
List<CipherSuite> cipherSuites) CryptoProtocolVersion[] supportedVersions)
throws IOException { throws IOException {
String clientMachine = getClientMachine(); String clientMachine = getClientMachine();
if (stateChangeLog.isDebugEnabled()) { if (stateChangeLog.isDebugEnabled()) {
@ -551,7 +551,7 @@ class NameNodeRpcServer implements NamenodeProtocols {
HdfsFileStatus fileStatus = namesystem.startFile(src, new PermissionStatus( HdfsFileStatus fileStatus = namesystem.startFile(src, new PermissionStatus(
getRemoteUser().getShortUserName(), null, masked), getRemoteUser().getShortUserName(), null, masked),
clientName, clientMachine, flag.get(), createParent, replication, clientName, clientMachine, flag.get(), createParent, replication,
blockSize, cipherSuites); blockSize, supportedVersions);
metrics.incrFilesCreated(); metrics.incrFilesCreated();
metrics.incrCreateFileOps(); metrics.incrCreateFileOps();
return fileStatus; return fileStatus;

View File

@ -75,7 +75,7 @@ message CreateRequestProto {
required bool createParent = 5; required bool createParent = 5;
required uint32 replication = 6; // Short: Only 16 bits used required uint32 replication = 6; // Short: Only 16 bits used
required uint64 blockSize = 7; required uint64 blockSize = 7;
repeated CipherSuite cipherSuites = 8; repeated CryptoProtocolVersionProto cryptoProtocolVersion = 8;
} }
message CreateResponseProto { message CreateResponseProto {

View File

@ -48,8 +48,9 @@ message ListEncryptionZonesRequestProto {
message EncryptionZoneProto { message EncryptionZoneProto {
required int64 id = 1; required int64 id = 1;
required string path = 2; required string path = 2;
required CipherSuite suite = 3; required CipherSuiteProto suite = 3;
required string keyName = 4; required CryptoProtocolVersionProto cryptoProtocolVersion = 4;
required string keyName = 5;
} }
message ListEncryptionZonesResponseProto { message ListEncryptionZonesResponseProto {

View File

@ -218,20 +218,29 @@ message DataEncryptionKeyProto {
/** /**
* Cipher suite. * Cipher suite.
*/ */
enum CipherSuite { enum CipherSuiteProto {
UNKNOWN = 1; UNKNOWN = 1;
AES_CTR_NOPADDING = 2; AES_CTR_NOPADDING = 2;
} }
/**
* Crypto protocol version used to access encrypted files.
*/
enum CryptoProtocolVersionProto {
UNKNOWN_PROTOCOL_VERSION = 1;
ENCRYPTION_ZONES = 2;
}
/** /**
* Encryption information for a file. * Encryption information for a file.
*/ */
message FileEncryptionInfoProto { message FileEncryptionInfoProto {
required CipherSuite suite = 1; required CipherSuiteProto suite = 1;
required bytes key = 2; required CryptoProtocolVersionProto cryptoProtocolVersion = 2;
required bytes iv = 3; required bytes key = 3;
required string keyName = 4; required bytes iv = 4;
required string ezKeyVersionName = 5; required string keyName = 5;
required string ezKeyVersionName = 6;
} }
/** /**
@ -249,8 +258,9 @@ message PerFileEncryptionInfoProto {
* zone * zone
*/ */
message ZoneEncryptionInfoProto { message ZoneEncryptionInfoProto {
required CipherSuite suite = 1; required CipherSuiteProto suite = 1;
required string keyName = 2; required CryptoProtocolVersionProto cryptoProtocolVersion = 2;
required string keyName = 3;
} }
/** /**

View File

@ -53,6 +53,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
@ -264,7 +265,7 @@ public class TestDFSClientRetries {
.when(mockNN) .when(mockNN)
.create(anyString(), (FsPermission) anyObject(), anyString(), .create(anyString(), (FsPermission) anyObject(), anyString(),
(EnumSetWritable<CreateFlag>) anyObject(), anyBoolean(), (EnumSetWritable<CreateFlag>) anyObject(), anyBoolean(),
anyShort(), anyLong(), (List<CipherSuite>) anyList()); anyShort(), anyLong(), (CryptoProtocolVersion[]) anyObject());
final DFSClient client = new DFSClient(null, mockNN, conf, null); final DFSClient client = new DFSClient(null, mockNN, conf, null);
OutputStream os = client.create("testfile", true); OutputStream os = client.create("testfile", true);

View File

@ -40,10 +40,13 @@ import java.util.concurrent.Future;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider; import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
import org.apache.hadoop.crypto.key.KeyProvider; import org.apache.hadoop.crypto.key.KeyProvider;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.crypto.key.KeyProviderFactory; import org.apache.hadoop.crypto.key.KeyProviderFactory;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FSTestWrapper; import org.apache.hadoop.fs.FSTestWrapper;
import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileContext;
@ -57,7 +60,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.client.HdfsAdmin; import org.apache.hadoop.hdfs.client.HdfsAdmin;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
import org.apache.hadoop.hdfs.server.namenode.EncryptionFaultInjector; import org.apache.hadoop.hdfs.server.namenode.EncryptionFaultInjector;
@ -68,6 +73,7 @@ import org.apache.hadoop.hdfs.tools.DFSck;
import org.apache.hadoop.hdfs.tools.offlineImageViewer.PBImageXmlWriter; import org.apache.hadoop.hdfs.tools.offlineImageViewer.PBImageXmlWriter;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
import org.apache.hadoop.io.EnumSetWritable;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
@ -83,6 +89,11 @@ import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyShort;
import static org.mockito.Mockito.withSettings; import static org.mockito.Mockito.withSettings;
import static org.mockito.Mockito.any; import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.anyString;
@ -628,7 +639,7 @@ public class TestEncryptionZones {
} }
@Test(timeout = 60000) @Test(timeout = 60000)
public void testCipherSuiteNegotiation() throws Exception { public void testVersionAndSuiteNegotiation() throws Exception {
final HdfsAdmin dfsAdmin = final HdfsAdmin dfsAdmin =
new HdfsAdmin(FileSystem.getDefaultUri(conf), conf); new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
final Path zone = new Path("/zone"); final Path zone = new Path("/zone");
@ -637,43 +648,44 @@ public class TestEncryptionZones {
// Create a file in an EZ, which should succeed // Create a file in an EZ, which should succeed
DFSTestUtil DFSTestUtil
.createFile(fs, new Path(zone, "success1"), 0, (short) 1, 0xFEED); .createFile(fs, new Path(zone, "success1"), 0, (short) 1, 0xFEED);
// Pass no cipherSuites, fail // Pass no supported versions, fail
fs.getClient().cipherSuites = Lists.newArrayListWithCapacity(0); DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS = new CryptoProtocolVersion[] {};
try { try {
DFSTestUtil.createFile(fs, new Path(zone, "fail"), 0, (short) 1, 0xFEED); DFSTestUtil.createFile(fs, new Path(zone, "fail"), 0, (short) 1, 0xFEED);
fail("Created a file without specifying a CipherSuite!"); fail("Created a file without specifying a crypto protocol version");
} catch (UnknownCipherSuiteException e) { } catch (UnknownCryptoProtocolVersionException e) {
assertExceptionContains("No cipher suites", e); assertExceptionContains("No crypto protocol versions", e);
} }
// Pass some unknown cipherSuites, fail // Pass some unknown versions, fail
fs.getClient().cipherSuites = Lists.newArrayListWithCapacity(3); DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS = new CryptoProtocolVersion[]
fs.getClient().cipherSuites.add(CipherSuite.UNKNOWN); { CryptoProtocolVersion.UNKNOWN, CryptoProtocolVersion.UNKNOWN };
fs.getClient().cipherSuites.add(CipherSuite.UNKNOWN);
fs.getClient().cipherSuites.add(CipherSuite.UNKNOWN);
try { try {
DFSTestUtil.createFile(fs, new Path(zone, "fail"), 0, (short) 1, 0xFEED); DFSTestUtil.createFile(fs, new Path(zone, "fail"), 0, (short) 1, 0xFEED);
fail("Created a file without specifying a CipherSuite!"); fail("Created a file without specifying a known crypto protocol version");
} catch (UnknownCipherSuiteException e) { } catch (UnknownCryptoProtocolVersionException e) {
assertExceptionContains("No cipher suites", e); assertExceptionContains("No crypto protocol versions", e);
} }
// Pass some unknown and a good cipherSuites, success // Pass some unknown and a good cipherSuites, success
fs.getClient().cipherSuites = Lists.newArrayListWithCapacity(3); DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS =
fs.getClient().cipherSuites.add(CipherSuite.AES_CTR_NOPADDING); new CryptoProtocolVersion[] {
fs.getClient().cipherSuites.add(CipherSuite.UNKNOWN); CryptoProtocolVersion.UNKNOWN,
fs.getClient().cipherSuites.add(CipherSuite.UNKNOWN); CryptoProtocolVersion.UNKNOWN,
CryptoProtocolVersion.ENCRYPTION_ZONES };
DFSTestUtil DFSTestUtil
.createFile(fs, new Path(zone, "success2"), 0, (short) 1, 0xFEED); .createFile(fs, new Path(zone, "success2"), 0, (short) 1, 0xFEED);
fs.getClient().cipherSuites = Lists.newArrayListWithCapacity(3); DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS =
fs.getClient().cipherSuites.add(CipherSuite.UNKNOWN); new CryptoProtocolVersion[] {
fs.getClient().cipherSuites.add(CipherSuite.UNKNOWN); CryptoProtocolVersion.ENCRYPTION_ZONES,
fs.getClient().cipherSuites.add(CipherSuite.AES_CTR_NOPADDING); CryptoProtocolVersion.UNKNOWN,
CryptoProtocolVersion.UNKNOWN} ;
DFSTestUtil DFSTestUtil
.createFile(fs, new Path(zone, "success3"), 4096, (short) 1, 0xFEED); .createFile(fs, new Path(zone, "success3"), 4096, (short) 1, 0xFEED);
// Check KeyProvider state // Check KeyProvider state
// Flushing the KP on the NN, since it caches, and init a test one // Flushing the KP on the NN, since it caches, and init a test one
cluster.getNamesystem().getProvider().flush(); cluster.getNamesystem().getProvider().flush();
KeyProvider provider = KeyProviderFactory KeyProvider provider = KeyProviderFactory
.get(new URI(conf.get(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI)), conf); .get(new URI(conf.get(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI)),
conf);
List<String> keys = provider.getKeys(); List<String> keys = provider.getKeys();
assertEquals("Expected NN to have created one key per zone", 1, assertEquals("Expected NN to have created one key per zone", 1,
keys.size()); keys.size());
@ -691,6 +703,66 @@ public class TestEncryptionZones {
"/success" + i)); "/success" + i));
assertEquals(feInfo.getCipherSuite(), CipherSuite.AES_CTR_NOPADDING); assertEquals(feInfo.getCipherSuite(), CipherSuite.AES_CTR_NOPADDING);
} }
DFSClient old = fs.dfs;
try {
testCipherSuiteNegotiation(fs, conf);
} finally {
fs.dfs = old;
}
}
@SuppressWarnings("unchecked")
private static void mockCreate(ClientProtocol mcp,
CipherSuite suite, CryptoProtocolVersion version) throws Exception {
Mockito.doReturn(
new HdfsFileStatus(0, false, 1, 1024, 0, 0, new FsPermission(
(short) 777), "owner", "group", new byte[0], new byte[0],
1010, 0, new FileEncryptionInfo(suite,
version, new byte[suite.getAlgorithmBlockSize()],
new byte[suite.getAlgorithmBlockSize()],
"fakeKey", "fakeVersion"),
(byte) 0))
.when(mcp)
.create(anyString(), (FsPermission) anyObject(), anyString(),
(EnumSetWritable<CreateFlag>) anyObject(), anyBoolean(),
anyShort(), anyLong(), (CryptoProtocolVersion[]) anyObject());
}
// This test only uses mocks. Called from the end of an existing test to
// avoid an extra mini cluster.
private static void testCipherSuiteNegotiation(DistributedFileSystem fs,
Configuration conf) throws Exception {
// Set up mock ClientProtocol to test client-side CipherSuite negotiation
final ClientProtocol mcp = Mockito.mock(ClientProtocol.class);
// Try with an empty conf
final Configuration noCodecConf = new Configuration(conf);
final CipherSuite suite = CipherSuite.AES_CTR_NOPADDING;
final String confKey = CommonConfigurationKeysPublic
.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX + suite
.getConfigSuffix();
noCodecConf.set(confKey, "");
fs.dfs = new DFSClient(null, mcp, noCodecConf, null);
mockCreate(mcp, suite, CryptoProtocolVersion.ENCRYPTION_ZONES);
try {
fs.create(new Path("/mock"));
fail("Created with no configured codecs!");
} catch (UnknownCipherSuiteException e) {
assertExceptionContains("No configuration found for the cipher", e);
}
// Try create with an UNKNOWN CipherSuite
fs.dfs = new DFSClient(null, mcp, conf, null);
CipherSuite unknown = CipherSuite.UNKNOWN;
unknown.setUnknownValue(989);
mockCreate(mcp, unknown, CryptoProtocolVersion.ENCRYPTION_ZONES);
try {
fs.create(new Path("/mock"));
fail("Created with unknown cipher!");
} catch (IOException e) {
assertExceptionContains("unknown CipherSuite with ID 989", e);
}
} }
@Test(timeout = 120000) @Test(timeout = 120000)

View File

@ -38,6 +38,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -352,7 +353,7 @@ public class TestLease {
.when(mcp) .when(mcp)
.create(anyString(), (FsPermission) anyObject(), anyString(), .create(anyString(), (FsPermission) anyObject(), anyString(),
(EnumSetWritable<CreateFlag>) anyObject(), anyBoolean(), (EnumSetWritable<CreateFlag>) anyObject(), anyBoolean(),
anyShort(), anyLong(), (List<CipherSuite>) anyList()); anyShort(), anyLong(), (CryptoProtocolVersion[]) anyObject());
final Configuration conf = new Configuration(); final Configuration conf = new Configuration();
final DFSClient c1 = createDFSClientAs(ugi[0], conf); final DFSClient c1 = createDFSClientAs(ugi[0], conf);

View File

@ -39,6 +39,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.CacheFlag; import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -395,7 +396,8 @@ public class TestRetryCacheWithHA {
this.status = client.getNamenode().create(fileName, this.status = client.getNamenode().create(fileName,
FsPermission.getFileDefault(), client.getClientName(), FsPermission.getFileDefault(), client.getClientName(),
new EnumSetWritable<CreateFlag>(createFlag), false, DataNodes, new EnumSetWritable<CreateFlag>(createFlag), false, DataNodes,
BlockSize, null); BlockSize,
new CryptoProtocolVersion[] {CryptoProtocolVersion.ENCRYPTION_ZONES});
} }
@Override @Override