HDFS-10489. Deprecate dfs.encryption.key.provider.uri for HDFS encryption zones. Contributed by Xiao Chen.
(cherry picked from commit ea839bd48e
)
Conflicts:
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/HdfsConfiguration.java
hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
(cherry picked from commit 20585ade1d9d7fbd8b9df8b1e3db5a74fd4441a6)
This commit is contained in:
parent
8c24388af0
commit
900dc70a01
|
@ -29,6 +29,7 @@ import java.util.ServiceLoader;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A factory to create a list of KeyProvider based on the path given in a
|
* A factory to create a list of KeyProvider based on the path given in a
|
||||||
|
@ -39,7 +40,7 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
@InterfaceStability.Unstable
|
@InterfaceStability.Unstable
|
||||||
public abstract class KeyProviderFactory {
|
public abstract class KeyProviderFactory {
|
||||||
public static final String KEY_PROVIDER_PATH =
|
public static final String KEY_PROVIDER_PATH =
|
||||||
"hadoop.security.key.provider.path";
|
CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH;
|
||||||
|
|
||||||
public abstract KeyProvider createProvider(URI providerName,
|
public abstract KeyProvider createProvider(URI providerName,
|
||||||
Configuration conf
|
Configuration conf
|
||||||
|
|
|
@ -625,6 +625,14 @@ public class CommonConfigurationKeysPublic {
|
||||||
public static final String HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS =
|
public static final String HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS =
|
||||||
"hadoop.security.impersonation.provider.class";
|
"hadoop.security.impersonation.provider.class";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see
|
||||||
|
* <a href="{@docRoot}/../hadoop-project-dist/hadoop-common/core-default.xml">
|
||||||
|
* core-default.xml</a>
|
||||||
|
*/
|
||||||
|
public static final String HADOOP_SECURITY_KEY_PROVIDER_PATH =
|
||||||
|
"hadoop.security.key.provider.path";
|
||||||
|
|
||||||
// <!-- KMSClientProvider configurations -->
|
// <!-- KMSClientProvider configurations -->
|
||||||
/**
|
/**
|
||||||
* @see
|
* @see
|
||||||
|
|
|
@ -1976,6 +1976,14 @@
|
||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>hadoop.security.key.provider.path</name>
|
||||||
|
<description>
|
||||||
|
The KeyProvider to use when managing zone keys, and interacting with
|
||||||
|
encryption keys when reading and writing to an encryption zone.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>fs.har.impl.disable.cache</name>
|
<name>fs.har.impl.disable.cache</name>
|
||||||
<value>true</value>
|
<value>true</value>
|
||||||
|
|
|
@ -28,6 +28,7 @@ The following table lists the configuration property names that are deprecated i
|
||||||
| dfs.data.dir | dfs.datanode.data.dir |
|
| dfs.data.dir | dfs.datanode.data.dir |
|
||||||
| dfs.datanode.max.xcievers | dfs.datanode.max.transfer.threads |
|
| dfs.datanode.max.xcievers | dfs.datanode.max.transfer.threads |
|
||||||
| dfs.df.interval | fs.df.interval |
|
| dfs.df.interval | fs.df.interval |
|
||||||
|
| dfs.encryption.key.provider.uri | hadoop.security.key.provider.path |
|
||||||
| dfs.federation.nameservice.id | dfs.nameservice.id |
|
| dfs.federation.nameservice.id | dfs.nameservice.id |
|
||||||
| dfs.federation.nameservices | dfs.nameservices |
|
| dfs.federation.nameservices | dfs.nameservices |
|
||||||
| dfs.http.address | dfs.namenode.http-address |
|
| dfs.http.address | dfs.namenode.http-address |
|
||||||
|
|
|
@ -37,10 +37,10 @@ KMS Client Configuration
|
||||||
The KMS client `KeyProvider` uses the **kms** scheme, and the embedded URL must be the URL of the KMS. For example, for a KMS running on `http://localhost:16000/kms`, the KeyProvider URI is `kms://http@localhost:16000/kms`. And, for a KMS running on `https://localhost:16000/kms`, the KeyProvider URI is `kms://https@localhost:16000/kms`
|
The KMS client `KeyProvider` uses the **kms** scheme, and the embedded URL must be the URL of the KMS. For example, for a KMS running on `http://localhost:16000/kms`, the KeyProvider URI is `kms://http@localhost:16000/kms`. And, for a KMS running on `https://localhost:16000/kms`, the KeyProvider URI is `kms://https@localhost:16000/kms`
|
||||||
|
|
||||||
The following is an example to configure HDFS NameNode as a KMS client in
|
The following is an example to configure HDFS NameNode as a KMS client in
|
||||||
`hdfs-site.xml`:
|
`core-site.xml`:
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>dfs.encryption.key.provider.uri</name>
|
<name>hadoop.security.key.provider.path</name>
|
||||||
<value>kms://http@localhost:9600/kms</value>
|
<value>kms://http@localhost:9600/kms</value>
|
||||||
<description>
|
<description>
|
||||||
The KeyProvider to use when interacting with encryption keys used
|
The KeyProvider to use when interacting with encryption keys used
|
||||||
|
@ -664,15 +664,15 @@ is to use LoadBalancingKMSClientProvider. Using this approach, a KMS client
|
||||||
(for example, a HDFS NameNode) is aware of multiple KMS instances, and it sends
|
(for example, a HDFS NameNode) is aware of multiple KMS instances, and it sends
|
||||||
requests to them in a round-robin fashion. LoadBalancingKMSClientProvider is
|
requests to them in a round-robin fashion. LoadBalancingKMSClientProvider is
|
||||||
implicitly used when more than one URI is specified in
|
implicitly used when more than one URI is specified in
|
||||||
`dfs.encryption.key.provider.uri`.
|
`hadoop.security.key.provider.path`.
|
||||||
|
|
||||||
The following example in `hdfs-site.xml` configures two KMS
|
The following example in `core-site.xml` configures two KMS
|
||||||
instances, `kms01.example.com` and `kms02.example.com`.
|
instances, `kms01.example.com` and `kms02.example.com`.
|
||||||
The hostnames are separated by semi-colons, and all KMS instances must run
|
The hostnames are separated by semi-colons, and all KMS instances must run
|
||||||
on the same port.
|
on the same port.
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>dfs.encryption.key.provider.uri</name>
|
<name>hadoop.security.key.provider.path</name>
|
||||||
<value>kms://https@kms01.example.com;kms02.example.com:9600/kms</value>
|
<value>kms://https@kms01.example.com;kms02.example.com:9600/kms</value>
|
||||||
<description>
|
<description>
|
||||||
The KeyProvider to use when interacting with encryption keys used
|
The KeyProvider to use when interacting with encryption keys used
|
||||||
|
|
|
@ -509,7 +509,7 @@ public class DFSUtilClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String keyProviderUriKeyName =
|
private static String keyProviderUriKeyName =
|
||||||
HdfsClientConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI;
|
CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the key provider uri configuration key name for creating key providers.
|
* Set the key provider uri configuration key name for creating key providers.
|
||||||
|
@ -599,16 +599,17 @@ public class DFSUtilClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Probe for HDFS Encryption being enabled; this uses the value of
|
* Probe for HDFS Encryption being enabled; this uses the value of the option
|
||||||
* the option {@link HdfsClientConfigKeys#DFS_ENCRYPTION_KEY_PROVIDER_URI},
|
* {@link CommonConfigurationKeysPublic#HADOOP_SECURITY_KEY_PROVIDER_PATH}
|
||||||
* returning true if that property contains a non-empty, non-whitespace
|
* , returning true if that property contains a non-empty, non-whitespace
|
||||||
* string.
|
* string.
|
||||||
* @param conf configuration to probe
|
* @param conf configuration to probe
|
||||||
* @return true if encryption is considered enabled.
|
* @return true if encryption is considered enabled.
|
||||||
*/
|
*/
|
||||||
public static boolean isHDFSEncryptionEnabled(Configuration conf) {
|
public static boolean isHDFSEncryptionEnabled(Configuration conf) {
|
||||||
return !conf.getTrimmed(
|
return !(conf.getTrimmed(
|
||||||
HdfsClientConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, "").isEmpty();
|
CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH, "")
|
||||||
|
.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
public static InetSocketAddress getNNAddress(String address) {
|
public static InetSocketAddress getNNAddress(String address) {
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
||||||
|
|
||||||
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DeprecatedKeys;
|
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DeprecatedKeys;
|
||||||
|
@ -142,6 +143,8 @@ public class HdfsConfiguration extends Configuration {
|
||||||
new DeprecationDelta("dfs.client.file-block-storage-locations.timeout",
|
new DeprecationDelta("dfs.client.file-block-storage-locations.timeout",
|
||||||
HdfsClientConfigKeys.
|
HdfsClientConfigKeys.
|
||||||
DFS_CLIENT_FILE_BLOCK_STORAGE_LOCATIONS_TIMEOUT_MS),
|
DFS_CLIENT_FILE_BLOCK_STORAGE_LOCATIONS_TIMEOUT_MS),
|
||||||
|
new DeprecationDelta("dfs.encryption.key.provider.uri",
|
||||||
|
CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ import java.util.concurrent.TimeUnit;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.crypto.key.KeyProvider;
|
import org.apache.hadoop.crypto.key.KeyProvider;
|
||||||
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.cache.Cache;
|
import com.google.common.cache.Cache;
|
||||||
|
@ -86,11 +86,11 @@ public class KeyProviderCache {
|
||||||
|
|
||||||
private URI createKeyProviderURI(Configuration conf) {
|
private URI createKeyProviderURI(Configuration conf) {
|
||||||
final String providerUriStr = conf.getTrimmed(
|
final String providerUriStr = conf.getTrimmed(
|
||||||
HdfsClientConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, "");
|
CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH, "");
|
||||||
// No provider set in conf
|
// No provider set in conf
|
||||||
if (providerUriStr.isEmpty()) {
|
if (providerUriStr.isEmpty()) {
|
||||||
LOG.error("Could not find uri with key ["
|
LOG.error("Could not find uri with key ["
|
||||||
+ HdfsClientConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI
|
+ CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH
|
||||||
+ "] to create a keyProvider !!");
|
+ "] to create a keyProvider !!");
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -152,7 +152,6 @@ public interface HdfsClientConfigKeys {
|
||||||
"dfs.datanode.kerberos.principal";
|
"dfs.datanode.kerberos.principal";
|
||||||
String DFS_DATANODE_READAHEAD_BYTES_KEY = "dfs.datanode.readahead.bytes";
|
String DFS_DATANODE_READAHEAD_BYTES_KEY = "dfs.datanode.readahead.bytes";
|
||||||
long DFS_DATANODE_READAHEAD_BYTES_DEFAULT = 4 * 1024 * 1024; // 4MB
|
long DFS_DATANODE_READAHEAD_BYTES_DEFAULT = 4 * 1024 * 1024; // 4MB
|
||||||
String DFS_ENCRYPTION_KEY_PROVIDER_URI = "dfs.encryption.key.provider.uri";
|
|
||||||
|
|
||||||
String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY =
|
String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY =
|
||||||
"dfs.encrypt.data.transfer.cipher.suites";
|
"dfs.encrypt.data.transfer.cipher.suites";
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.FileSystemTestHelper;
|
import org.apache.hadoop.fs.FileSystemTestHelper;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -158,7 +159,8 @@ public class TestHdfsHelper extends TestDirHelper {
|
||||||
FileSystemTestHelper helper = new FileSystemTestHelper();
|
FileSystemTestHelper helper = new FileSystemTestHelper();
|
||||||
final String jceksPath = JavaKeyStoreProvider.SCHEME_NAME + "://file" +
|
final String jceksPath = JavaKeyStoreProvider.SCHEME_NAME + "://file" +
|
||||||
new Path(helper.getTestRootDir(), "test.jks").toUri();
|
new Path(helper.getTestRootDir(), "test.jks").toUri();
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, jceksPath);
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
|
jceksPath);
|
||||||
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
|
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
|
||||||
builder.numDataNodes(2);
|
builder.numDataNodes(2);
|
||||||
MiniDFSCluster miniHdfs = builder.build();
|
MiniDFSCluster miniHdfs = builder.build();
|
||||||
|
|
|
@ -30,10 +30,10 @@ import java.util.EnumSet;
|
||||||
|
|
||||||
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FileSystemTestHelper;
|
import org.apache.hadoop.fs.FileSystemTestHelper;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
|
||||||
import org.apache.hadoop.hdfs.DFSTestUtil;
|
import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||||
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
@ -135,7 +135,7 @@ public class TestRpcProgramNfs3 {
|
||||||
String testRoot = fsHelper.getTestRootDir();
|
String testRoot = fsHelper.getTestRootDir();
|
||||||
testRootDir = new File(testRoot).getAbsoluteFile();
|
testRootDir = new File(testRoot).getAbsoluteFile();
|
||||||
final Path jksPath = new Path(testRootDir.toString(), "test.jks");
|
final Path jksPath = new Path(testRootDir.toString(), "test.jks");
|
||||||
config.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
config.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri());
|
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri());
|
||||||
ProxyUsers.refreshSuperUserGroupsConfiguration(config);
|
ProxyUsers.refreshSuperUserGroupsConfiguration(config);
|
||||||
|
|
||||||
|
|
|
@ -743,8 +743,6 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
|
||||||
HdfsClientConfigKeys.DFS_DATA_TRANSFER_SASL_PROPS_RESOLVER_CLASS_KEY;
|
HdfsClientConfigKeys.DFS_DATA_TRANSFER_SASL_PROPS_RESOLVER_CLASS_KEY;
|
||||||
public static final int DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES_DEFAULT = 100;
|
public static final int DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES_DEFAULT = 100;
|
||||||
public static final String DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES = "dfs.namenode.list.encryption.zones.num.responses";
|
public static final String DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES = "dfs.namenode.list.encryption.zones.num.responses";
|
||||||
public static final String DFS_ENCRYPTION_KEY_PROVIDER_URI =
|
|
||||||
HdfsClientConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI;
|
|
||||||
public static final String DFS_NAMENODE_EDEKCACHELOADER_INTERVAL_MS_KEY = "dfs.namenode.edekcacheloader.interval.ms";
|
public static final String DFS_NAMENODE_EDEKCACHELOADER_INTERVAL_MS_KEY = "dfs.namenode.edekcacheloader.interval.ms";
|
||||||
public static final int DFS_NAMENODE_EDEKCACHELOADER_INTERVAL_MS_DEFAULT = 1000;
|
public static final int DFS_NAMENODE_EDEKCACHELOADER_INTERVAL_MS_DEFAULT = 1000;
|
||||||
public static final String DFS_NAMENODE_EDEKCACHELOADER_INITIAL_DELAY_MS_KEY = "dfs.namenode.edekcacheloader.initial.delay.ms";
|
public static final String DFS_NAMENODE_EDEKCACHELOADER_INITIAL_DELAY_MS_KEY = "dfs.namenode.edekcacheloader.initial.delay.ms";
|
||||||
|
|
|
@ -2716,14 +2716,6 @@
|
||||||
block layout (see HDFS-6482 for details on the layout).</description>
|
block layout (see HDFS-6482 for details on the layout).</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
|
||||||
<name>dfs.encryption.key.provider.uri</name>
|
|
||||||
<description>
|
|
||||||
The KeyProvider to use when interacting with encryption keys used
|
|
||||||
when reading and writing to an encryption zone.
|
|
||||||
</description>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>dfs.storage.policy.enabled</name>
|
<name>dfs.storage.policy.enabled</name>
|
||||||
<value>true</value>
|
<value>true</value>
|
||||||
|
|
|
@ -117,7 +117,7 @@ Once a KMS has been set up and the NameNode and HDFS clients have been correctly
|
||||||
|
|
||||||
### <a name="Configuring_the_cluster_KeyProvider"></a>Configuring the cluster KeyProvider
|
### <a name="Configuring_the_cluster_KeyProvider"></a>Configuring the cluster KeyProvider
|
||||||
|
|
||||||
#### dfs.encryption.key.provider.uri
|
#### hadoop.security.key.provider.path
|
||||||
|
|
||||||
The KeyProvider to use when interacting with encryption keys used when reading and writing to an encryption zone.
|
The KeyProvider to use when interacting with encryption keys used when reading and writing to an encryption zone.
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,7 @@ import org.apache.hadoop.cli.util.CommandExecutor.Result;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
||||||
import org.apache.hadoop.crypto.key.KeyProvider;
|
import org.apache.hadoop.crypto.key.KeyProvider;
|
||||||
import org.apache.hadoop.crypto.key.KeyProviderFactory;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
|
@ -66,7 +66,7 @@ public class TestCryptoAdminCLI extends CLITestHelperDFS {
|
||||||
tmpDir = new File(System.getProperty("test.build.data", "target"),
|
tmpDir = new File(System.getProperty("test.build.data", "target"),
|
||||||
UUID.randomUUID().toString()).getAbsoluteFile();
|
UUID.randomUUID().toString()).getAbsoluteFile();
|
||||||
final Path jksPath = new Path(tmpDir.toString(), "test.jks");
|
final Path jksPath = new Path(tmpDir.toString(), "test.jks");
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri());
|
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri());
|
||||||
|
|
||||||
dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
|
||||||
import org.apache.hadoop.crypto.key.kms.server.KMSConfiguration;
|
import org.apache.hadoop.crypto.key.kms.server.KMSConfiguration;
|
||||||
import org.apache.hadoop.crypto.key.kms.server.KeyAuthorizationKeyProvider;
|
import org.apache.hadoop.crypto.key.kms.server.KeyAuthorizationKeyProvider;
|
||||||
import org.apache.hadoop.crypto.key.kms.server.MiniKMS;
|
import org.apache.hadoop.crypto.key.kms.server.MiniKMS;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystemTestHelper;
|
import org.apache.hadoop.fs.FileSystemTestHelper;
|
||||||
|
@ -190,7 +191,7 @@ public class TestAclsEndToEnd {
|
||||||
"keyadmin,hdfs,user");
|
"keyadmin,hdfs,user");
|
||||||
conf.set(ProxyUsers.CONF_HADOOP_PROXYUSER + "." + realUser + ".hosts",
|
conf.set(ProxyUsers.CONF_HADOOP_PROXYUSER + "." + realUser + ".hosts",
|
||||||
"*");
|
"*");
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
getKeyProviderURI());
|
getKeyProviderURI());
|
||||||
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY,
|
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY,
|
||||||
true);
|
true);
|
||||||
|
|
|
@ -58,6 +58,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.BlockLocation;
|
import org.apache.hadoop.fs.BlockLocation;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
|
@ -1033,16 +1034,19 @@ public class TestDFSUtil {
|
||||||
@Test
|
@Test
|
||||||
public void testEncryptionProbe() throws Throwable {
|
public void testEncryptionProbe() throws Throwable {
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(false);
|
||||||
conf.unset(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI);
|
conf.unset(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH);
|
||||||
assertFalse("encryption enabled on no provider key",
|
assertFalse("encryption enabled on no provider key",
|
||||||
DFSUtilClient.isHDFSEncryptionEnabled(conf));
|
DFSUtilClient.isHDFSEncryptionEnabled(conf));
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, "");
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
|
"");
|
||||||
assertFalse("encryption enabled on empty provider key",
|
assertFalse("encryption enabled on empty provider key",
|
||||||
DFSUtilClient.isHDFSEncryptionEnabled(conf));
|
DFSUtilClient.isHDFSEncryptionEnabled(conf));
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, "\n\t\n");
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
|
"\n\t\n");
|
||||||
assertFalse("encryption enabled on whitespace provider key",
|
assertFalse("encryption enabled on whitespace provider key",
|
||||||
DFSUtilClient.isHDFSEncryptionEnabled(conf));
|
DFSUtilClient.isHDFSEncryptionEnabled(conf));
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, "http://hadoop.apache.org");
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
|
"http://hadoop.apache.org");
|
||||||
assertTrue("encryption disabled on valid provider key",
|
assertTrue("encryption disabled on valid provider key",
|
||||||
DFSUtilClient.isHDFSEncryptionEnabled(conf));
|
DFSUtilClient.isHDFSEncryptionEnabled(conf));
|
||||||
|
|
||||||
|
|
|
@ -150,7 +150,8 @@ public class TestEncryptionZones {
|
||||||
// Set up java key store
|
// Set up java key store
|
||||||
String testRoot = fsHelper.getTestRootDir();
|
String testRoot = fsHelper.getTestRootDir();
|
||||||
testRootDir = new File(testRoot).getAbsoluteFile();
|
testRootDir = new File(testRoot).getAbsoluteFile();
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, getKeyProviderURI());
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
|
getKeyProviderURI());
|
||||||
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
|
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
|
||||||
// Lower the batch size for testing
|
// Lower the batch size for testing
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES,
|
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES,
|
||||||
|
@ -845,8 +846,8 @@ public class TestEncryptionZones {
|
||||||
// Check KeyProvider state
|
// Check KeyProvider state
|
||||||
// Flushing the KP on the NN, since it caches, and init a test one
|
// Flushing the KP on the NN, since it caches, and init a test one
|
||||||
cluster.getNamesystem().getProvider().flush();
|
cluster.getNamesystem().getProvider().flush();
|
||||||
KeyProvider provider = KeyProviderFactory
|
KeyProvider provider = KeyProviderFactory.get(new URI(conf.getTrimmed(
|
||||||
.get(new URI(conf.getTrimmed(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI)),
|
CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH)),
|
||||||
conf);
|
conf);
|
||||||
List<String> keys = provider.getKeys();
|
List<String> keys = provider.getKeys();
|
||||||
assertEquals("Expected NN to have created one key per zone", 1,
|
assertEquals("Expected NN to have created one key per zone", 1,
|
||||||
|
@ -931,7 +932,8 @@ public class TestEncryptionZones {
|
||||||
public void testCreateEZWithNoProvider() throws Exception {
|
public void testCreateEZWithNoProvider() throws Exception {
|
||||||
// Unset the key provider and make sure EZ ops don't work
|
// Unset the key provider and make sure EZ ops don't work
|
||||||
final Configuration clusterConf = cluster.getConfiguration(0);
|
final Configuration clusterConf = cluster.getConfiguration(0);
|
||||||
clusterConf.unset(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI);
|
clusterConf
|
||||||
|
.unset(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH);
|
||||||
cluster.restartNameNode(true);
|
cluster.restartNameNode(true);
|
||||||
cluster.waitActive();
|
cluster.waitActive();
|
||||||
final Path zone1 = new Path("/zone1");
|
final Path zone1 = new Path("/zone1");
|
||||||
|
@ -943,7 +945,8 @@ public class TestEncryptionZones {
|
||||||
assertExceptionContains("since no key provider is available", e);
|
assertExceptionContains("since no key provider is available", e);
|
||||||
}
|
}
|
||||||
final Path jksPath = new Path(testRootDir.toString(), "test.jks");
|
final Path jksPath = new Path(testRootDir.toString(), "test.jks");
|
||||||
clusterConf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
clusterConf
|
||||||
|
.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri()
|
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri()
|
||||||
);
|
);
|
||||||
// Try listing EZs as well
|
// Try listing EZs as well
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
||||||
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
|
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.fs.FileSystemTestHelper;
|
import org.apache.hadoop.fs.FileSystemTestHelper;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -62,7 +63,7 @@ public class TestEncryptionZonesWithHA {
|
||||||
fsHelper = new FileSystemTestHelper();
|
fsHelper = new FileSystemTestHelper();
|
||||||
String testRoot = fsHelper.getTestRootDir();
|
String testRoot = fsHelper.getTestRootDir();
|
||||||
testRootDir = new File(testRoot).getAbsoluteFile();
|
testRootDir = new File(testRoot).getAbsoluteFile();
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
JavaKeyStoreProvider.SCHEME_NAME + "://file" +
|
JavaKeyStoreProvider.SCHEME_NAME + "://file" +
|
||||||
new Path(testRootDir.toString(), "test.jks").toUri()
|
new Path(testRootDir.toString(), "test.jks").toUri()
|
||||||
);
|
);
|
||||||
|
|
|
@ -24,7 +24,7 @@ import java.util.List;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.crypto.key.KeyProvider;
|
import org.apache.hadoop.crypto.key.KeyProvider;
|
||||||
import org.apache.hadoop.crypto.key.KeyProviderFactory;
|
import org.apache.hadoop.crypto.key.KeyProviderFactory;
|
||||||
import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -94,26 +94,26 @@ public class TestKeyProviderCache {
|
||||||
public void testCache() throws Exception {
|
public void testCache() throws Exception {
|
||||||
KeyProviderCache kpCache = new KeyProviderCache(10000);
|
KeyProviderCache kpCache = new KeyProviderCache(10000);
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
"dummy://foo:bar@test_provider1");
|
"dummy://foo:bar@test_provider1");
|
||||||
KeyProvider keyProvider1 = kpCache.get(conf);
|
KeyProvider keyProvider1 = kpCache.get(conf);
|
||||||
Assert.assertNotNull("Returned Key Provider is null !!", keyProvider1);
|
Assert.assertNotNull("Returned Key Provider is null !!", keyProvider1);
|
||||||
|
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
"dummy://foo:bar@test_provider1");
|
"dummy://foo:bar@test_provider1");
|
||||||
KeyProvider keyProvider2 = kpCache.get(conf);
|
KeyProvider keyProvider2 = kpCache.get(conf);
|
||||||
|
|
||||||
Assert.assertTrue("Different KeyProviders returned !!",
|
Assert.assertTrue("Different KeyProviders returned !!",
|
||||||
keyProvider1 == keyProvider2);
|
keyProvider1 == keyProvider2);
|
||||||
|
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
"dummy://test_provider3");
|
"dummy://test_provider3");
|
||||||
KeyProvider keyProvider3 = kpCache.get(conf);
|
KeyProvider keyProvider3 = kpCache.get(conf);
|
||||||
|
|
||||||
Assert.assertFalse("Same KeyProviders returned !!",
|
Assert.assertFalse("Same KeyProviders returned !!",
|
||||||
keyProvider1 == keyProvider3);
|
keyProvider1 == keyProvider3);
|
||||||
|
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
"dummy://hello:there@test_provider1");
|
"dummy://hello:there@test_provider1");
|
||||||
KeyProvider keyProvider4 = kpCache.get(conf);
|
KeyProvider keyProvider4 = kpCache.get(conf);
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,7 @@ import java.util.EnumSet;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileContext;
|
import org.apache.hadoop.fs.FileContext;
|
||||||
import org.apache.hadoop.fs.FileContextTestWrapper;
|
import org.apache.hadoop.fs.FileContextTestWrapper;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
|
@ -77,7 +78,7 @@ public class TestReservedRawPaths {
|
||||||
String testRoot = fsHelper.getTestRootDir();
|
String testRoot = fsHelper.getTestRootDir();
|
||||||
File testRootDir = new File(testRoot).getAbsoluteFile();
|
File testRootDir = new File(testRoot).getAbsoluteFile();
|
||||||
final Path jksPath = new Path(testRootDir.toString(), "test.jks");
|
final Path jksPath = new Path(testRootDir.toString(), "test.jks");
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri()
|
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri()
|
||||||
);
|
);
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
||||||
|
|
|
@ -49,6 +49,7 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
|
import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
|
||||||
import org.apache.hadoop.crypto.key.kms.server.KMSConfiguration;
|
import org.apache.hadoop.crypto.key.kms.server.KMSConfiguration;
|
||||||
import org.apache.hadoop.crypto.key.kms.server.MiniKMS;
|
import org.apache.hadoop.crypto.key.kms.server.MiniKMS;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.FileSystemTestWrapper;
|
import org.apache.hadoop.fs.FileSystemTestWrapper;
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
@ -237,7 +238,8 @@ public class TestSecureEncryptionZoneWithKMS {
|
||||||
@Before
|
@Before
|
||||||
public void setup() throws Exception {
|
public void setup() throws Exception {
|
||||||
// Start MiniDFS Cluster
|
// Start MiniDFS Cluster
|
||||||
baseConf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
baseConf
|
||||||
|
.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
getKeyProviderURI());
|
getKeyProviderURI());
|
||||||
baseConf.setBoolean(DFSConfigKeys
|
baseConf.setBoolean(DFSConfigKeys
|
||||||
.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
|
.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.namenode;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileSystemTestHelper;
|
import org.apache.hadoop.fs.FileSystemTestHelper;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
|
@ -85,7 +86,8 @@ public class TestNestedEncryptionZones {
|
||||||
// Set up java key store
|
// Set up java key store
|
||||||
String testRoot = fsHelper.getTestRootDir();
|
String testRoot = fsHelper.getTestRootDir();
|
||||||
testRootDir = new File(testRoot).getAbsoluteFile();
|
testRootDir = new File(testRoot).getAbsoluteFile();
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, getKeyProviderURI());
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
|
getKeyProviderURI());
|
||||||
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
|
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
|
||||||
// Lower the batch size for testing
|
// Lower the batch size for testing
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES,
|
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES,
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.server.namenode.metrics;
|
package org.apache.hadoop.hdfs.server.namenode.metrics;
|
||||||
|
|
||||||
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileSystemTestHelper;
|
import org.apache.hadoop.fs.FileSystemTestHelper;
|
||||||
import org.apache.hadoop.fs.FileSystemTestWrapper;
|
import org.apache.hadoop.fs.FileSystemTestWrapper;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
|
@ -643,7 +644,7 @@ public class TestNameNodeMetrics {
|
||||||
// Set up java key store
|
// Set up java key store
|
||||||
String testRoot = fsHelper.getTestRootDir();
|
String testRoot = fsHelper.getTestRootDir();
|
||||||
File testRootDir = new File(testRoot).getAbsoluteFile();
|
File testRootDir = new File(testRoot).getAbsoluteFile();
|
||||||
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
|
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
|
||||||
JavaKeyStoreProvider.SCHEME_NAME + "://file" +
|
JavaKeyStoreProvider.SCHEME_NAME + "://file" +
|
||||||
new Path(testRootDir.toString(), "test.jks").toUri());
|
new Path(testRootDir.toString(), "test.jks").toUri());
|
||||||
conf.setBoolean(DFSConfigKeys
|
conf.setBoolean(DFSConfigKeys
|
||||||
|
|
Loading…
Reference in New Issue