HBASE-27693 Support for Hadoop's LDAP Authentication mechanism (Web UI only)

This commit is contained in:
ydodeja365 2023-05-03 01:10:16 +05:30
parent b776c121d0
commit f80e53d020
6 changed files with 396 additions and 0 deletions

View File

@ -174,6 +174,48 @@
<artifactId>log4j-1.2-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-core</artifactId>
<version>${apacheds.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-protocol-ldap</artifactId>
<version>${apacheds.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-ldif-partition</artifactId>
<version>${apacheds.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-ldap-codec-core</artifactId>
<version>${ldap-api.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-ldap-model</artifactId>
<version>${ldap-api.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-server-integ</artifactId>
<version>${apacheds.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
@ -384,6 +426,12 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-all</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>

View File

@ -0,0 +1,90 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http.lib;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.http.FilterContainer;
import org.apache.hadoop.hbase.http.FilterInitializer;
import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
import org.apache.yetus.audience.InterfaceAudience;
/**
* This class is copied from Hadoop. Initializes hadoop-auth AuthenticationFilter which provides
* support for Kerberos HTTP SPNEGO authentication.
* <p>
* It enables anonymous access, simple/pseudo and Kerberos HTTP SPNEGO authentication for HBase web
* UI endpoints.
* <p>
* Refer to the <code>core-default.xml</code> file, after the comment 'HTTP Authentication' for
* details on the configuration options. All related configuration properties have
* 'hadoop.http.authentication.' as prefix.
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class AuthenticationFilterInitializer extends FilterInitializer {
static final String PREFIX = "hadoop.http.authentication.";
/**
* Initializes hadoop-auth AuthenticationFilter.
* <p>
* Propagates to hadoop-auth AuthenticationFilter configuration all Hadoop configuration
* properties prefixed with "hadoop.http.authentication."
* @param container The filter container
* @param conf Configuration for run-time parameters
*/
@Override
public void initFilter(FilterContainer container, Configuration conf) {
Map<String, String> filterConfig = getFilterConfigMap(conf, PREFIX);
container.addFilter("authentication", AuthenticationFilter.class.getName(), filterConfig);
}
public static Map<String, String> getFilterConfigMap(Configuration conf, String prefix) {
Map<String, String> filterConfig = new HashMap<String, String>();
// setting the cookie path to root '/' so it is used for all resources.
filterConfig.put(AuthenticationFilter.COOKIE_PATH, "/");
Map<String, String> propsWithPrefix = conf.getPropsWithPrefix(prefix);
for (Map.Entry<String, String> entry : propsWithPrefix.entrySet()) {
filterConfig.put(entry.getKey(), entry.getValue());
}
// Resolve _HOST into bind address
String bindAddress = conf.get(HttpServer.BIND_ADDRESS);
String principal = filterConfig.get(KerberosAuthenticationHandler.PRINCIPAL);
if (principal != null) {
try {
principal = SecurityUtil.getServerPrincipal(principal, bindAddress);
} catch (IOException ex) {
throw new RuntimeException("Could not resolve Kerberos principal name: " + ex.toString(),
ex);
}
filterConfig.put(KerberosAuthenticationHandler.PRINCIPAL, principal);
}
return filterConfig;
}
}

View File

@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http;
/**
* This class defines the constants used by the LDAP integration tests.
*/
public final class LdapConstants {
/**
* This class defines constants to be used for LDAP integration testing. Hence this class is not
* expected to be instantiated.
*/
private LdapConstants() {
}
public static final String LDAP_BASE_DN = "dc=example,dc=com";
public static final String LDAP_SERVER_ADDR = "localhost";
}

View File

@ -0,0 +1,140 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import org.apache.commons.codec.binary.Base64;
import org.apache.directory.server.annotations.CreateLdapServer;
import org.apache.directory.server.annotations.CreateTransport;
import org.apache.directory.server.core.annotations.ApplyLdifs;
import org.apache.directory.server.core.annotations.ContextEntry;
import org.apache.directory.server.core.annotations.CreateDS;
import org.apache.directory.server.core.annotations.CreatePartition;
import org.apache.directory.server.core.integ.CreateLdapServerRule;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.http.resource.JerseyResource;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test class for LDAP authentication on the HttpServer.
*/
@Category({ MiscTests.class, SmallTests.class })
@CreateLdapServer(
transports = { @CreateTransport(protocol = "LDAP", address = LdapConstants.LDAP_SERVER_ADDR), })
@CreateDS(allowAnonAccess = true,
partitions = { @CreatePartition(name = "Test_Partition", suffix = LdapConstants.LDAP_BASE_DN,
contextEntry = @ContextEntry(entryLdif = "dn: " + LdapConstants.LDAP_BASE_DN + " \n"
+ "dc: example\n" + "objectClass: top\n" + "objectClass: domain\n\n")) })
@ApplyLdifs({ "dn: uid=bjones," + LdapConstants.LDAP_BASE_DN, "cn: Bob Jones", "sn: Jones",
"objectClass: inetOrgPerson", "uid: bjones", "userPassword: p@ssw0rd" })
public class TestLdapHttpServer extends HttpServerFunctionalTest {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestLdapHttpServer.class);
@ClassRule
public static CreateLdapServerRule serverRule = new CreateLdapServerRule();
private static final Logger LOG = LoggerFactory.getLogger(TestLdapHttpServer.class);
private static HttpServer server;
private static URL baseUrl;
@BeforeClass
public static void setupServer() throws Exception {
Configuration conf = new Configuration();
buildLdapConfiguration(conf);
server = createTestServer(conf);
server.addUnprivilegedServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
server.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*");
server.start();
baseUrl = getServerURL(server);
LOG.info("HTTP server started: " + baseUrl);
}
@AfterClass
public static void stopServer() throws Exception {
try {
if (null != server) {
server.stop();
}
} catch (Exception e) {
LOG.info("Failed to stop info server", e);
}
}
private static Configuration buildLdapConfiguration(Configuration conf) {
conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
// Enable LDAP (pre-req)
conf.set(HttpServer.HTTP_UI_AUTHENTICATION, "ldap");
conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
"org.apache.hadoop.hbase.http.lib.AuthenticationFilterInitializer");
conf.set("hadoop.http.authentication.type", "ldap");
conf.set("hadoop.http.authentication.ldap.providerurl", String.format("ldap://%s:%s",
LdapConstants.LDAP_SERVER_ADDR, serverRule.getLdapServer().getPort()));
conf.set("hadoop.http.authentication.ldap.enablestarttls", "false");
conf.set("hadoop.http.authentication.ldap.basedn", LdapConstants.LDAP_BASE_DN);
return conf;
}
@Test
public void testUnauthorizedClientsDisallowed() throws IOException {
URL url = new URL(getServerURL(server), "/echo?a=b");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
}
@Test
public void testAllowedClient() throws IOException {
URL url = new URL(getServerURL(server), "/echo?a=b");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
final Base64 base64 = new Base64(0);
String userCredentials = "bjones:p@ssw0rd";
String basicAuth = "Basic " + base64.encodeToString(userCredentials.getBytes());
conn.setRequestProperty("Authorization", basicAuth);
assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
}
@Test
public void testWrongAuthClientsDisallowed() throws IOException {
URL url = new URL(getServerURL(server), "/echo?a=b");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
final Base64 base64 = new Base64(0);
String userCredentials = "bjones:password";
String basicAuth = "Basic " + base64.encodeToString(userCredentials.getBytes());
conn.setRequestProperty("Authorization", basicAuth);
assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
}
}

View File

@ -0,0 +1,80 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http.lib;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.http.FilterContainer;
import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
@Category({ MiscTests.class, SmallTests.class })
public class TestAuthenticationFilterInitializer {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestAuthenticationFilterInitializer.class);
@Test
public void testConfiguration() throws Exception {
Configuration conf = new Configuration();
conf.set("hadoop.http.authentication.foo", "bar");
conf.set(HttpServer.BIND_ADDRESS, "barhost");
FilterContainer container = Mockito.mock(FilterContainer.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
Object[] args = invocationOnMock.getArguments();
assertEquals("authentication", args[0]);
assertEquals(AuthenticationFilter.class.getName(), args[1]);
Map<String, String> conf = (Map<String, String>) args[2];
assertEquals("/", conf.get("cookie.path"));
assertEquals("simple", conf.get("type"));
assertEquals("36000", conf.get("token.validity"));
assertNull(conf.get("cookie.domain"));
assertEquals("true", conf.get("simple.anonymous.allowed"));
assertEquals("HTTP/barhost@LOCALHOST", conf.get("kerberos.principal"));
assertEquals(System.getProperty("user.home") + "/hadoop.keytab",
conf.get("kerberos.keytab"));
assertEquals("bar", conf.get("foo"));
return null;
}
}).when(container).addFilter(any(), any(), any());
new AuthenticationFilterInitializer().initFilter(container, conf);
}
}

View File

@ -725,6 +725,9 @@
<!-- Still need this to ignore some errors when building javadoc-->
<doclint>none</doclint>
<javax.activation.version>1.2.0</javax.activation.version>
<!-- Required for testing LDAP integration -->
<apacheds.version>2.0.0.AM26</apacheds.version>
<ldap-api.version>2.0.0</ldap-api.version>
</properties>
<!-- Sorted by groups of dependencies then groupId and artifactId -->
<dependencyManagement>