HDFS-14845. Ignore AuthenticationFilterInitializer for HttpFSServerWebServer and honor hadoop.http.authentication configs.
Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
This commit is contained in:
parent
a3463812ef
commit
3f89084ac7
|
@ -74,6 +74,10 @@ The following table lists the configuration property names that are deprecated i
|
|||
| hadoop.pipes.java.reducer | mapreduce.pipes.isjavareducer |
|
||||
| hadoop.pipes.partitioner | mapreduce.pipes.partitioner |
|
||||
| heartbeat.recheck.interval | dfs.namenode.heartbeat.recheck-interval |
|
||||
| httpfs.authentication.kerberos.keytab | hadoop.http.authentication.kerberos.keytab |
|
||||
| httpfs.authentication.kerberos.principal | hadoop.http.authentication.kerberos.principal |
|
||||
| httpfs.authentication.signature.secret.file | hadoop.http.authentication.signature.secret.file |
|
||||
| httpfs.authentication.type | hadoop.http.authentication.type |
|
||||
| io.bytes.per.checksum | dfs.bytes-per-checksum |
|
||||
| io.sort.factor | mapreduce.task.io.sort.factor |
|
||||
| io.sort.mb | mapreduce.task.io.sort.mb |
|
||||
|
|
|
@ -48,6 +48,8 @@ public class HttpFSAuthenticationFilter
|
|||
|
||||
static final String CONF_PREFIX = "httpfs.authentication.";
|
||||
|
||||
static final String HADOOP_HTTP_CONF_PREFIX = "hadoop.http.authentication.";
|
||||
|
||||
private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
|
||||
+ ".file";
|
||||
|
||||
|
@ -55,8 +57,9 @@ public class HttpFSAuthenticationFilter
|
|||
* Returns the hadoop-auth configuration from HttpFSServer's configuration.
|
||||
* <p>
|
||||
* It returns all HttpFSServer's configuration properties prefixed with
|
||||
* <code>httpfs.authentication</code>. The <code>httpfs.authentication</code>
|
||||
* prefix is removed from the returned property names.
|
||||
* <code>hadoop.http.authentication</code>. The
|
||||
* <code>hadoop.http.authentication</code> prefix is removed from the
|
||||
* returned property names.
|
||||
*
|
||||
* @param configPrefix parameter not used.
|
||||
* @param filterConfig parameter not used.
|
||||
|
@ -70,6 +73,15 @@ public class HttpFSAuthenticationFilter
|
|||
Configuration conf = HttpFSServerWebApp.get().getConfig();
|
||||
|
||||
props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
|
||||
for (Map.Entry<String, String> entry : conf) {
|
||||
String name = entry.getKey();
|
||||
if (name.startsWith(HADOOP_HTTP_CONF_PREFIX)) {
|
||||
name = name.substring(HADOOP_HTTP_CONF_PREFIX.length());
|
||||
props.setProperty(name, entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
// Replace Hadoop Http Authentication Configs with HttpFS specific Configs
|
||||
for (Map.Entry<String, String> entry : conf) {
|
||||
String name = entry.getKey();
|
||||
if (name.startsWith(CONF_PREFIX)) {
|
||||
|
|
|
@ -24,11 +24,15 @@ import java.net.InetSocketAddress;
|
|||
import java.net.MalformedURLException;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.ConfigurationWithLogging;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
import org.apache.hadoop.security.AuthenticationFilterInitializer;
|
||||
import org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer;
|
||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||
import org.apache.hadoop.security.ssl.SSLFactory;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -98,6 +102,24 @@ public class HttpFSServerWebServer {
|
|||
int port = conf.getInt(HTTP_PORT_KEY, HTTP_PORT_DEFAULT);
|
||||
URI endpoint = new URI(scheme, null, host, port, null, null, null);
|
||||
|
||||
// Allow the default authFilter HttpFSAuthenticationFilter
|
||||
String configuredInitializers = conf.get(HttpServer2.
|
||||
FILTER_INITIALIZER_PROPERTY);
|
||||
if (configuredInitializers != null) {
|
||||
Set<String> target = new LinkedHashSet<String>();
|
||||
String[] parts = configuredInitializers.split(",");
|
||||
for (String filterInitializer : parts) {
|
||||
if (!filterInitializer.equals(AuthenticationFilterInitializer.class.
|
||||
getName()) && !filterInitializer.equals(
|
||||
ProxyUserAuthenticationFilterInitializer.class.getName())) {
|
||||
target.add(filterInitializer);
|
||||
}
|
||||
}
|
||||
String actualInitializers =
|
||||
org.apache.commons.lang3.StringUtils.join(target, ",");
|
||||
conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY, actualInitializers);
|
||||
}
|
||||
|
||||
httpServer = new HttpServer2.Builder()
|
||||
.setName(NAME)
|
||||
.setConf(conf)
|
||||
|
|
|
@ -148,7 +148,7 @@
|
|||
</property>
|
||||
|
||||
<property>
|
||||
<name>httpfs.authentication.signature.secret.file</name>
|
||||
<name>hadoop.http.authentication.signature.secret.file</name>
|
||||
<value>${httpfs.config.dir}/httpfs-signature.secret</value>
|
||||
<description>
|
||||
File containing the secret to sign HttpFS hadoop-auth cookies.
|
||||
|
@ -160,11 +160,14 @@
|
|||
|
||||
If the secret file specified here does not exist, random secret is
|
||||
generated at startup time.
|
||||
|
||||
httpfs.authentication.signature.secret.file is deprecated. Instead use
|
||||
hadoop.http.authentication.signature.secret.file.
|
||||
</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>httpfs.authentication.type</name>
|
||||
<name>hadoop.http.authentication.type</name>
|
||||
<value>simple</value>
|
||||
<description>
|
||||
Defines the authentication mechanism used by httpfs for its HTTP clients.
|
||||
|
@ -175,26 +178,35 @@
|
|||
'user.name' query string parameter.
|
||||
|
||||
If using 'kerberos' HTTP clients must use HTTP SPNEGO or delegation tokens.
|
||||
|
||||
httpfs.authentication.type is deprecated. Instead use
|
||||
hadoop.http.authentication.type.
|
||||
</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>httpfs.authentication.kerberos.principal</name>
|
||||
<name>hadoop.http.authentication.kerberos.principal</name>
|
||||
<value>HTTP/${httpfs.hostname}@${kerberos.realm}</value>
|
||||
<description>
|
||||
The HTTP Kerberos principal used by HttpFS in the HTTP endpoint.
|
||||
|
||||
The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos
|
||||
HTTP SPNEGO specification.
|
||||
|
||||
httpfs.authentication.kerberos.principal is deprecated. Instead use
|
||||
hadoop.http.authentication.kerberos.principal.
|
||||
</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>httpfs.authentication.kerberos.keytab</name>
|
||||
<name>hadoop.http.authentication.kerberos.keytab</name>
|
||||
<value>${user.home}/httpfs.keytab</value>
|
||||
<description>
|
||||
The Kerberos keytab file with the credentials for the
|
||||
HTTP Kerberos principal used by httpfs in the HTTP endpoint.
|
||||
|
||||
httpfs.authentication.kerberos.keytab is deprecated. Instead use
|
||||
hadoop.http.authentication.kerberos.keytab.
|
||||
</description>
|
||||
</property>
|
||||
|
||||
|
|
Loading…
Reference in New Issue