diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
index 8658e448c12..8035f5722f5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
@@ -51,6 +51,19 @@
test
+
+ org.apache.hadoop
+ hadoop-auth
+ test-jar
+ test
+
+
+
+ org.apache.hadoop
+ hadoop-minikdc
+ provided
+
+
org.mockito
mockito-all
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpFilter.java
index ac4afa845cd..b5e7ef66105 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpFilter.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/amfilter/AmIpFilter.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.webproxy.amfilter;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.server.webproxy.ProxyUtils;
import org.apache.hadoop.yarn.server.webproxy.WebAppProxyServlet;
import org.slf4j.Logger;
@@ -214,15 +215,25 @@ public class AmIpFilter implements Filter {
return addr;
}
- private boolean isValidUrl(String url) {
+ @VisibleForTesting
+ public boolean isValidUrl(String url) {
boolean isValid = false;
try {
- HttpURLConnection conn =
- (HttpURLConnection) new URL(url).openConnection();
+ HttpURLConnection conn = (HttpURLConnection) new URL(url)
+ .openConnection();
conn.connect();
isValid = conn.getResponseCode() == HttpURLConnection.HTTP_OK;
+ // If security is enabled, any valid RM which can give 401 Unauthorized is
+ // good enough to access. Since AM doesn't have enough credential, auth
+ // cannot be completed and hence 401 is fine in such case.
+ if (!isValid && UserGroupInformation.isSecurityEnabled()) {
+ isValid = (conn
+ .getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED)
+ || (conn.getResponseCode() == HttpURLConnection.HTTP_FORBIDDEN);
+ return isValid;
+ }
} catch (Exception e) {
- LOG.debug("Failed to connect to " + url + ": " + e.toString());
+ LOG.warn("Failed to connect to " + url + ": " + e.toString());
}
return isValid;
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestSecureAmFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestSecureAmFilter.java
new file mode 100644
index 00000000000..e87b76541e5
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestSecureAmFilter.java
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.webproxy.amfilter;
+
+import java.io.File;
+import java.net.URI;
+import java.net.URL;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.HashMap;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.KerberosTestUtils;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.security.http.RMAuthenticationFilterInitializer;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+/**
+ * Test AmIpFilter. Requests to a no declared hosts should has way through
+ * proxy. Another requests can be filtered with (without) user name.
+ *
+ */
+public class TestSecureAmFilter {
+
+ private String proxyHost = "localhost";
+ private static final File TEST_ROOT_DIR = new File("target",
+ TestSecureAmFilter.class.getName() + "-root");
+ private static File httpSpnegoKeytabFile = new File(
+ KerberosTestUtils.getKeytabFile());
+ private static Configuration rmconf = new Configuration();
+ private static String httpSpnegoPrincipal = KerberosTestUtils
+ .getServerPrincipal();
+ private static boolean miniKDCStarted = false;
+ private static MiniKdc testMiniKDC;
+
+ @BeforeClass
+ public static void setUp() {
+ rmconf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
+ rmconf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
+ "kerberos");
+ rmconf.setBoolean(YarnConfiguration.RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER,
+ true);
+ rmconf.set("hadoop.http.filter.initializers",
+ RMAuthenticationFilterInitializer.class.getName());
+ rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY,
+ httpSpnegoPrincipal);
+ rmconf.set(YarnConfiguration.RM_KEYTAB,
+ httpSpnegoKeytabFile.getAbsolutePath());
+ rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,
+ httpSpnegoKeytabFile.getAbsolutePath());
+ UserGroupInformation.setConfiguration(rmconf);
+ try {
+ testMiniKDC = new MiniKdc(MiniKdc.createConf(), TEST_ROOT_DIR);
+ setupKDC();
+ } catch (Exception e) {
+ assertTrue("Couldn't create MiniKDC", false);
+ }
+ }
+
+ @AfterClass
+ public static void tearDown() {
+ if (testMiniKDC != null) {
+ testMiniKDC.stop();
+ }
+ }
+
+ private static void setupKDC() throws Exception {
+ if (!miniKDCStarted) {
+ testMiniKDC.start();
+ getKdc().createPrincipal(httpSpnegoKeytabFile, "HTTP/localhost");
+ miniKDCStarted = true;
+ }
+ }
+
+ private static MiniKdc getKdc() {
+ return testMiniKDC;
+ }
+
+ private class TestAmIpFilter extends AmIpFilter {
+
+ private Set proxyAddresses = null;
+
+ protected Set getProxyAddresses() {
+ if (proxyAddresses == null) {
+ proxyAddresses = new HashSet();
+ }
+ proxyAddresses.add(proxyHost);
+ return proxyAddresses;
+ }
+ }
+
+ @Test
+ public void testFindRedirectUrl() throws Exception {
+ final String rm1 = "rm1";
+ final String rm2 = "rm2";
+ // generate a valid URL
+ final String rm1Url = startSecureHttpServer();
+ // invalid url
+ final String rm2Url = "host2:8088";
+
+ TestAmIpFilter filter = new TestAmIpFilter();
+ TestAmIpFilter spy = Mockito.spy(filter);
+ // make sure findRedirectUrl() go to HA branch
+ spy.proxyUriBases = new HashMap<>();
+ spy.proxyUriBases.put(rm1, rm1Url);
+ spy.proxyUriBases.put(rm2, rm2Url);
+ spy.rmUrls = new String[] {rm1, rm2};
+
+ assertTrue(spy.isValidUrl(rm1Url));
+ assertFalse(spy.isValidUrl(rm2Url));
+ assertEquals(spy.findRedirectUrl(), rm1Url);
+ }
+
+ private String startSecureHttpServer() throws Exception {
+ HttpServer2.Builder builder = new HttpServer2.Builder()
+ .setName("test").setConf(rmconf)
+ .addEndpoint(new URI("http://localhost")).setACL(
+ new AccessControlList(rmconf.get(YarnConfiguration.YARN_ADMIN_ACL,
+ YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)));
+
+ builder.setUsernameConfKey(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY)
+ .setKeytabConfKey(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY)
+ .setSecurityEnabled(UserGroupInformation.isSecurityEnabled());
+ HttpServer2 server = builder.build();
+ server.start();
+ URL baseUrl = new URL(
+ "http://" + NetUtils.getHostPortString(server.getConnectorAddress(0)));
+ return baseUrl.toString();
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/resources/krb5.conf b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/resources/krb5.conf
new file mode 100644
index 00000000000..6cdd3d6923f
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/resources/krb5.conf
@@ -0,0 +1,33 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[libdefaults]
+ default_realm = APACHE.ORG
+ extra_addresses = 127.0.0.1
+ kdc_realm = _REALM_
+ udp_preference_limit = _UDP_LIMIT_
+ #_KDC_TCP_PORT_
+ #_KDC_UDP_PORT_
+
+[realms]
+ _REALM_ = {
+ admin_server = localhost:_KDC_PORT_
+ kdc = localhost:_KDC_PORT_
+ }
+[domain_realm]
+ localhost = _REALM_