diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/kms/TestLoadBalancingKMSClientProvider.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/kms/TestLoadBalancingKMSClientProvider.java index 08a3d93d2fa..4e421da2219 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/kms/TestLoadBalancingKMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/kms/TestLoadBalancingKMSClientProvider.java @@ -60,14 +60,14 @@ public class TestLoadBalancingKMSClientProvider { providers[2].getKMSUrl())); kp = new KMSClientProvider.Factory().createProvider(new URI( - "kms://http@host1;host2;host3:16000/kms/foo"), conf); + "kms://http@host1;host2;host3:9600/kms/foo"), conf); assertTrue(kp instanceof LoadBalancingKMSClientProvider); providers = ((LoadBalancingKMSClientProvider) kp).getProviders(); assertEquals(3, providers.length); - assertEquals(Sets.newHashSet("http://host1:16000/kms/foo/v1/", - "http://host2:16000/kms/foo/v1/", - "http://host3:16000/kms/foo/v1/"), + assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/", + "http://host2:9600/kms/foo/v1/", + "http://host3:9600/kms/foo/v1/"), Sets.newHashSet(providers[0].getKMSUrl(), providers[1].getKMSUrl(), providers[2].getKMSUrl())); diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh index 7044fa86704..729e63a1218 100644 --- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh +++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh @@ -24,7 +24,7 @@ # The HTTP port used by KMS # -# export KMS_HTTP_PORT=16000 +# export KMS_HTTP_PORT=9600 # The Admin port used by KMS # diff --git a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh index 5e1ffa40c9d..927b4af1fc4 100644 --- a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh +++ b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh @@ -37,7 +37,7 @@ function hadoop_subproject_init export HADOOP_CATALINA_CONFIG="${HADOOP_CONF_DIR}" export HADOOP_CATALINA_LOG="${HADOOP_LOG_DIR}" - export HADOOP_CATALINA_HTTP_PORT="${KMS_HTTP_PORT:-16000}" + export HADOOP_CATALINA_HTTP_PORT="${KMS_HTTP_PORT:-9600}" export HADOOP_CATALINA_ADMIN_PORT="${KMS_ADMIN_PORT:-$((HADOOP_CATALINA_HTTP_PORT+1))}" export HADOOP_CATALINA_MAX_THREADS="${KMS_MAX_THREADS:-1000}" export HADOOP_CATALINA_MAX_HTTP_HEADER_SIZE="${KMS_MAX_HTTP_HEADER_SIZE:-65536}" diff --git a/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm b/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm index 65854cf1105..68663672b23 100644 --- a/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm +++ b/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm @@ -32,7 +32,7 @@ KMS is a Java web-application and it runs using a pre-configured Tomcat bundled KMS Client Configuration ------------------------ -The KMS client `KeyProvider` uses the **kms** scheme, and the embedded URL must be the URL of the KMS. For example, for a KMS running on `http://localhost:16000/kms`, the KeyProvider URI is `kms://http@localhost:16000/kms`. And, for a KMS running on `https://localhost:16000/kms`, the KeyProvider URI is `kms://https@localhost:16000/kms` +The KMS client `KeyProvider` uses the **kms** scheme, and the embedded URL must be the URL of the KMS. For example, for a KMS running on `http://localhost:9600/kms`, the KeyProvider URI is `kms://http@localhost:9600/kms`. And, for a KMS running on `https://localhost:9600/kms`, the KeyProvider URI is `kms://https@localhost:9600/kms` KMS --- @@ -178,7 +178,7 @@ $H3 Embedded Tomcat Configuration To configure the embedded Tomcat go to the `share/hadoop/kms/tomcat/conf`. -KMS pre-configures the HTTP and Admin ports in Tomcat's `server.xml` to 16000 and 16001. +KMS pre-configures the HTTP and Admin ports in Tomcat's `server.xml` to 9600 and 9601. Tomcat logs are also preconfigured to go to Hadoop's `logs/` directory.