diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index 887380e920d..d3176cabad0 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -61,6 +61,7 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.util.Shell; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; @@ -127,6 +128,10 @@ public class HttpServer implements FilterContainer { static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX = "kerberos.name.rules"; public static final String HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_KEY = HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_KRB_NAME_SUFFIX; + static final String HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_SUFFIX = "kerberos.proxyuser.enable"; + public static final String HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_KEY = + HTTP_SPNEGO_AUTHENTICATION_PREFIX + HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_SUFFIX; + public static final boolean HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_DEFAULT = false; static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX = "signature.secret.file"; public static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY = @@ -145,6 +150,7 @@ public class HttpServer implements FilterContainer { public static final String ADMINS_ACL = "admins.acl"; public static final String BIND_ADDRESS = "bind.address"; public static final String SPNEGO_FILTER = "SpnegoFilter"; + public static final String SPNEGO_PROXYUSER_FILTER = "SpnegoProxyUserFilter"; public static final String NO_CACHE_FILTER = "NoCacheFilter"; public static final String APP_DIR = "webapps"; @@ -1026,7 +1032,18 @@ public class HttpServer implements FilterContainer { + "to enable SPNEGO/Kerberos authentication for the Web UI"); } - addGlobalFilter(SPNEGO_FILTER, AuthenticationFilter.class.getName(), params); + if (conf.getBoolean(HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_KEY, + HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_DEFAULT)) { + //Copy/rename standard hadoop proxyuser settings to filter + for(Map.Entry proxyEntry : + conf.getPropsWithPrefix(ProxyUsers.CONF_HADOOP_PROXYUSER).entrySet()) { + params.put(ProxyUserAuthenticationFilter.PROXYUSER_PREFIX + proxyEntry.getKey(), + proxyEntry.getValue()); + } + addGlobalFilter(SPNEGO_PROXYUSER_FILTER, ProxyUserAuthenticationFilter.class.getName(), params); + } else { + addGlobalFilter(SPNEGO_FILTER, AuthenticationFilter.class.getName(), params); + } } /** diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProxyUserAuthenticationFilter.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProxyUserAuthenticationFilter.java new file mode 100644 index 00000000000..5fb17c97cf5 --- /dev/null +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProxyUserAuthenticationFilter.java @@ -0,0 +1,219 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.http; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.authorize.ProxyUsers; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.server.AuthenticationFilter; +import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.StringUtils; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.security.Principal; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; +import javax.servlet.http.HttpServletResponse; + +/** + * This file has been copied directly (changing only the package name and and the ASF license + * text format, and adding the Yetus annotations) from Hadoop, as the Hadoop version that HBase + * depends on doesn't have it yet + * (as of 2020 Apr 24, there is no Hadoop release that has it either). + * + * Hadoop version: + * unreleased, master branch commit 4ea6c2f457496461afc63f38ef4cef3ab0efce49 + * + * Haddop path: + * hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authentication/ + * server/ProxyUserAuthenticationFilter.java + * + * AuthenticationFilter which adds support to perform operations + * using end user instead of proxy user. Fetches the end user from + * doAs Query Parameter. + */ +@InterfaceAudience.Private +@InterfaceStability.Evolving +public class ProxyUserAuthenticationFilter extends AuthenticationFilter { + + private static final Logger LOG = LoggerFactory.getLogger( + ProxyUserAuthenticationFilter.class); + + private static final String DO_AS = "doas"; + public static final String PROXYUSER_PREFIX = "proxyuser"; + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + Configuration conf = getProxyuserConfiguration(filterConfig); + ProxyUsers.refreshSuperUserGroupsConfiguration(conf, PROXYUSER_PREFIX); + super.init(filterConfig); + } + + @Override + protected void doFilter(FilterChain filterChain, HttpServletRequest request, + HttpServletResponse response) throws IOException, ServletException { + final HttpServletRequest lowerCaseRequest = toLowerCase(request); + String doAsUser = lowerCaseRequest.getParameter(DO_AS); + + if (doAsUser != null && !doAsUser.equals(request.getRemoteUser())) { + LOG.debug("doAsUser = {}, RemoteUser = {} , RemoteAddress = {} ", + doAsUser, request.getRemoteUser(), request.getRemoteAddr()); + UserGroupInformation requestUgi = (request.getUserPrincipal() != null) ? + UserGroupInformation.createRemoteUser(request.getRemoteUser()) + : null; + if (requestUgi != null) { + requestUgi = UserGroupInformation.createProxyUser(doAsUser, + requestUgi); + try { + ProxyUsers.authorize(requestUgi, request.getRemoteAddr()); + + final UserGroupInformation ugiF = requestUgi; + request = new HttpServletRequestWrapper(request) { + @Override + public String getRemoteUser() { + return ugiF.getShortUserName(); + } + + @Override + public Principal getUserPrincipal() { + return new Principal() { + @Override + public String getName() { + return ugiF.getUserName(); + } + }; + } + }; + LOG.debug("Proxy user Authentication successful"); + } catch (AuthorizationException ex) { + HttpExceptionUtils.createServletExceptionResponse(response, + HttpServletResponse.SC_FORBIDDEN, ex); + LOG.warn("Proxy user Authentication exception", ex); + return; + } + } + } + super.doFilter(filterChain, request, response); + } + + protected Configuration getProxyuserConfiguration(FilterConfig filterConfig) + throws ServletException { + Configuration conf = new Configuration(false); + Enumeration names = filterConfig.getInitParameterNames(); + while (names.hasMoreElements()) { + String name = (String) names.nextElement(); + if (name.startsWith(PROXYUSER_PREFIX + ".")) { + String value = filterConfig.getInitParameter(name); + conf.set(name, value); + } + } + return conf; + } + + static boolean containsUpperCase(final Iterable strings) { + for(String s : strings) { + for(int i = 0; i < s.length(); i++) { + if (Character.isUpperCase(s.charAt(i))) { + return true; + } + } + } + return false; + } + + public static HttpServletRequest toLowerCase( + final HttpServletRequest request) { + @SuppressWarnings("unchecked") + final Map original = (Map) + request.getParameterMap(); + if (!containsUpperCase(original.keySet())) { + return request; + } + + final Map> m = new HashMap>(); + for (Map.Entry entry : original.entrySet()) { + final String key = StringUtils.toLowerCase(entry.getKey()); + List strings = m.get(key); + if (strings == null) { + strings = new ArrayList(); + m.put(key, strings); + } + for (String v : entry.getValue()) { + strings.add(v); + } + } + + return new HttpServletRequestWrapper(request) { + private Map parameters = null; + + @Override + public Map getParameterMap() { + if (parameters == null) { + parameters = new HashMap(); + for (Map.Entry> entry : m.entrySet()) { + final List a = entry.getValue(); + parameters.put(entry.getKey(), a.toArray(new String[a.size()])); + } + } + return parameters; + } + + @Override + public String getParameter(String name) { + final List a = m.get(name); + return a == null ? null : a.get(0); + } + + @Override + public String[] getParameterValues(String name) { + return getParameterMap().get(name); + } + + @Override + public Enumeration getParameterNames() { + final Iterator i = m.keySet().iterator(); + return new Enumeration() { + @Override + public boolean hasMoreElements() { + return i.hasNext(); + } + + @Override + public String nextElement() { + return i.next(); + } + }; + } + }; + } + +} diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java index a854690eb3f..1be7297536e 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java @@ -102,11 +102,22 @@ public class HttpServerFunctionalTest extends Assert { } public static HttpServer createTestServerWithSecurity(Configuration conf) throws IOException { + prepareTestWebapp(); + return localServerBuilder(TEST).setFindPort(true).setConf(conf).setSecurityEnabled(true) + // InfoServer normally sets these for us + .setUsernameConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY) + .setKeytabConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY) + .build(); + } + + public static HttpServer createTestServerWithSecurityAndAcl(Configuration conf, AccessControlList acl) throws IOException { prepareTestWebapp(); return localServerBuilder(TEST).setFindPort(true).setConf(conf).setSecurityEnabled(true) // InfoServer normally sets these for us .setUsernameConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY) .setKeytabConfKey(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY) + .setSecurityEnabled(true) + .setACL(acl) .build(); } diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestProxyUserSpnegoHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestProxyUserSpnegoHttpServer.java new file mode 100644 index 00000000000..2036b314487 --- /dev/null +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestProxyUserSpnegoHttpServer.java @@ -0,0 +1,295 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.http; + +import java.io.File; +import java.net.HttpURLConnection; +import java.net.URL; +import java.security.Principal; +import java.security.PrivilegedExceptionAction; +import java.util.Set; + +import javax.security.auth.Subject; +import javax.security.auth.kerberos.KerberosTicket; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseCommonTestingUtility; +import org.apache.hadoop.hbase.http.TestHttpServer.EchoServlet; +import org.apache.hadoop.hbase.http.resource.JerseyResource; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.security.authentication.util.KerberosName; +import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; +import org.apache.http.auth.AuthSchemeProvider; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.KerberosCredentials; +import org.apache.http.client.HttpClient; +import org.apache.http.client.config.AuthSchemes; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.http.config.Lookup; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.impl.auth.SPNegoSchemeFactory; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; +import org.apache.kerby.kerberos.kerb.KrbException; +import org.apache.kerby.kerberos.kerb.client.JaasKrbUtil; +import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer; +import org.ietf.jgss.GSSCredential; +import org.ietf.jgss.GSSManager; +import org.ietf.jgss.GSSName; +import org.ietf.jgss.Oid; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Test class for SPNEGO Proxyuser authentication on the HttpServer. Uses Kerby's MiniKDC and Apache + * HttpComponents to verify that the doas= mechanicsm works, and that the proxyuser settings are + * observed + */ +@Category({MiscTests.class, SmallTests.class}) +public class TestProxyUserSpnegoHttpServer extends HttpServerFunctionalTest { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestProxyUserSpnegoHttpServer.class); + + private static final Logger LOG = LoggerFactory.getLogger(TestProxyUserSpnegoHttpServer.class); + private static final String KDC_SERVER_HOST = "localhost"; + private static final String WHEEL_PRINCIPAL = "wheel"; + private static final String UNPRIVILEGED_PRINCIPAL = "unprivileged"; + private static final String PRIVILEGED_PRINCIPAL = "privileged"; + private static final String PRIVILEGED2_PRINCIPAL = "privileged2"; + + private static HttpServer server; + private static URL baseUrl; + private static SimpleKdcServer kdc; + private static File infoServerKeytab; + private static File wheelKeytab; + private static File unprivilegedKeytab; + private static File privilegedKeytab; + private static File privileged2Keytab; + + + @BeforeClass + public static void setupServer() throws Exception { + Configuration conf = new Configuration(); + HBaseCommonTestingUtility htu = new HBaseCommonTestingUtility(conf); + + final String serverPrincipal = "HTTP/" + KDC_SERVER_HOST; + + kdc = buildMiniKdc(); + kdc.start(); + File keytabDir = new File(htu.getDataTestDir("keytabs").toString()); + if (keytabDir.exists()) { + deleteRecursively(keytabDir); + } + keytabDir.mkdirs(); + + infoServerKeytab = new File(keytabDir, serverPrincipal.replace('/', '_') + ".keytab"); + wheelKeytab = new File(keytabDir, WHEEL_PRINCIPAL + ".keytab"); + unprivilegedKeytab = new File(keytabDir, UNPRIVILEGED_PRINCIPAL + ".keytab"); + privilegedKeytab = new File(keytabDir, PRIVILEGED_PRINCIPAL + ".keytab"); + privileged2Keytab = new File(keytabDir, PRIVILEGED2_PRINCIPAL + ".keytab"); + + setupUser(kdc, wheelKeytab, WHEEL_PRINCIPAL); + setupUser(kdc, unprivilegedKeytab, UNPRIVILEGED_PRINCIPAL); + setupUser(kdc, privilegedKeytab, PRIVILEGED_PRINCIPAL); + setupUser(kdc, privileged2Keytab, PRIVILEGED2_PRINCIPAL); + + setupUser(kdc, infoServerKeytab, serverPrincipal); + + buildSpnegoConfiguration(conf, serverPrincipal, infoServerKeytab); + AccessControlList acl = buildAdminAcl(conf); + + server = createTestServerWithSecurityAndAcl(conf, acl); + server.addPrivilegedServlet("echo", "/echo", EchoServlet.class); + server.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*"); + server.start(); + baseUrl = getServerURL(server); + + LOG.info("HTTP server started: "+ baseUrl); + } + + @AfterClass + public static void stopServer() throws Exception { + try { + if (null != server) { + server.stop(); + } + } catch (Exception e) { + LOG.info("Failed to stop info server", e); + } + try { + if (null != kdc) { + kdc.stop(); + } + } catch (Exception e) { + LOG.info("Failed to stop mini KDC", e); + } + } + + private static void setupUser(SimpleKdcServer kdc, File keytab, String principal) + throws KrbException { + kdc.createPrincipal(principal); + kdc.exportPrincipal(principal, keytab); + } + + private static SimpleKdcServer buildMiniKdc() throws Exception { + SimpleKdcServer kdc = new SimpleKdcServer(); + + final File target = new File(System.getProperty("user.dir"), "target"); + File kdcDir = new File(target, TestProxyUserSpnegoHttpServer.class.getSimpleName()); + if (kdcDir.exists()) { + deleteRecursively(kdcDir); + } + kdcDir.mkdirs(); + kdc.setWorkDir(kdcDir); + + kdc.setKdcHost(KDC_SERVER_HOST); + int kdcPort = getFreePort(); + kdc.setAllowTcp(true); + kdc.setAllowUdp(false); + kdc.setKdcTcpPort(kdcPort); + + LOG.info("Starting KDC server at " + KDC_SERVER_HOST + ":" + kdcPort); + + kdc.init(); + + return kdc; + } + + protected static Configuration buildSpnegoConfiguration(Configuration conf, String serverPrincipal, + File serverKeytab) { + KerberosName.setRules("DEFAULT"); + + conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS); + + // Enable Kerberos (pre-req) + conf.set("hbase.security.authentication", "kerberos"); + conf.set(HttpServer.HTTP_UI_AUTHENTICATION, "kerberos"); + conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY, serverPrincipal); + conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY, serverKeytab.getAbsolutePath()); + + conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY, PRIVILEGED_PRINCIPAL); + conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PROXYUSER_ENABLE_KEY, "true"); + conf.set("hadoop.security.authorization", "true"); + + conf.set("hadoop.proxyuser.wheel.hosts", "*"); + conf.set("hadoop.proxyuser.wheel.users", PRIVILEGED_PRINCIPAL + "," + UNPRIVILEGED_PRINCIPAL); + return conf; + } + + /** + * Builds an ACL that will restrict the users who can issue commands to endpoints on the UI + * which are meant only for administrators. + */ + public static AccessControlList buildAdminAcl(Configuration conf) { + final String userGroups = conf.get(HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY, null); + final String adminGroups = conf.get( + HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_GROUPS_KEY, null); + if (userGroups == null && adminGroups == null) { + // Backwards compatibility - if the user doesn't have anything set, allow all users in. + return new AccessControlList("*", null); + } + return new AccessControlList(userGroups, adminGroups); + } + + @Test + public void testProxyAllowed() throws Exception { + testProxy(WHEEL_PRINCIPAL, PRIVILEGED_PRINCIPAL, HttpURLConnection.HTTP_OK, null); + } + + @Test + public void testProxyDisallowedForUnprivileged() throws Exception { + testProxy(WHEEL_PRINCIPAL, UNPRIVILEGED_PRINCIPAL, HttpURLConnection.HTTP_FORBIDDEN, "403 User unprivileged is unauthorized to access this page."); + } + + @Test + public void testProxyDisallowedForNotSudoAble() throws Exception { + testProxy(WHEEL_PRINCIPAL, PRIVILEGED2_PRINCIPAL, HttpURLConnection.HTTP_FORBIDDEN, "403 Forbidden"); + } + + public void testProxy(String clientPrincipal, String doAs, int responseCode, String statusLine) throws Exception { + // Create the subject for the client + final Subject clientSubject = JaasKrbUtil.loginUsingKeytab(WHEEL_PRINCIPAL, wheelKeytab); + final Set clientPrincipals = clientSubject.getPrincipals(); + // Make sure the subject has a principal + assertFalse(clientPrincipals.isEmpty()); + + // Get a TGT for the subject (might have many, different encryption types). The first should + // be the default encryption type. + Set privateCredentials = + clientSubject.getPrivateCredentials(KerberosTicket.class); + assertFalse(privateCredentials.isEmpty()); + KerberosTicket tgt = privateCredentials.iterator().next(); + assertNotNull(tgt); + + // The name of the principal + final String principalName = clientPrincipals.iterator().next().getName(); + + // Run this code, logged in as the subject (the client) + HttpResponse resp = Subject.doAs(clientSubject, new PrivilegedExceptionAction() { + @Override + public HttpResponse run() throws Exception { + // Logs in with Kerberos via GSS + GSSManager gssManager = GSSManager.getInstance(); + // jGSS Kerberos login constant + Oid oid = new Oid("1.2.840.113554.1.2.2"); + GSSName gssClient = gssManager.createName(principalName, GSSName.NT_USER_NAME); + GSSCredential credential = gssManager.createCredential(gssClient, + GSSCredential.DEFAULT_LIFETIME, oid, GSSCredential.INITIATE_ONLY); + + HttpClientContext context = HttpClientContext.create(); + Lookup authRegistry = RegistryBuilder.create() + .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true)) + .build(); + + HttpClient client = HttpClients.custom().setDefaultAuthSchemeRegistry(authRegistry) + .build(); + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, new KerberosCredentials(credential)); + + URL url = new URL(getServerURL(server), "/echo?doAs=" + doAs + "&a=b"); + context.setTargetHost(new HttpHost(url.getHost(), url.getPort())); + context.setCredentialsProvider(credentialsProvider); + context.setAuthSchemeRegistry(authRegistry); + + HttpGet get = new HttpGet(url.toURI()); + return client.execute(get, context); + } + }); + + assertNotNull(resp); + assertEquals(responseCode, resp.getStatusLine().getStatusCode()); + if(responseCode == HttpURLConnection.HTTP_OK) { + assertTrue(EntityUtils.toString(resp.getEntity()).trim().contains("a:b")); + } else { + assertTrue(resp.getStatusLine().toString().contains(statusLine)); + } + } + +}