HDFS-2707. HttpFS should read the hadoop-auth secret from a file instead inline from the configuration. (tucu)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1224794 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1dcc4b57ee
commit
ae0d48854d
|
@ -86,7 +86,7 @@
|
||||||
</init-param>
|
</init-param>
|
||||||
<init-param>
|
<init-param>
|
||||||
<param-name>kerberos.keytab</param-name>
|
<param-name>kerberos.keytab</param-name>
|
||||||
<param-value>/tmp/alfredo.keytab</param-value>
|
<param-value>/tmp/my.keytab</param-value>
|
||||||
</init-param>
|
</init-param>
|
||||||
<init-param>
|
<init-param>
|
||||||
<param-name>token.validity</param-name>
|
<param-name>token.validity</param-name>
|
||||||
|
|
|
@ -459,7 +459,7 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
|
|
||||||
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
|
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
|
||||||
token.setExpires(System.currentTimeMillis() + 1000);
|
token.setExpires(System.currentTimeMillis() + 1000);
|
||||||
Signer signer = new Signer("alfredo".getBytes());
|
Signer signer = new Signer("secret".getBytes());
|
||||||
String tokenSigned = signer.sign(token.toString());
|
String tokenSigned = signer.sign(token.toString());
|
||||||
|
|
||||||
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
|
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
|
||||||
|
@ -504,7 +504,7 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
|
|
||||||
AuthenticationToken token = new AuthenticationToken("u", "p", DummyAuthenticationHandler.TYPE);
|
AuthenticationToken token = new AuthenticationToken("u", "p", DummyAuthenticationHandler.TYPE);
|
||||||
token.setExpires(System.currentTimeMillis() - 1000);
|
token.setExpires(System.currentTimeMillis() - 1000);
|
||||||
Signer signer = new Signer("alfredo".getBytes());
|
Signer signer = new Signer("secret".getBytes());
|
||||||
String tokenSigned = signer.sign(token.toString());
|
String tokenSigned = signer.sign(token.toString());
|
||||||
|
|
||||||
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
|
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
|
||||||
|
@ -564,7 +564,7 @@ public class TestAuthenticationFilter extends TestCase {
|
||||||
|
|
||||||
AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype");
|
AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype");
|
||||||
token.setExpires(System.currentTimeMillis() + 1000);
|
token.setExpires(System.currentTimeMillis() + 1000);
|
||||||
Signer signer = new Signer("alfredo".getBytes());
|
Signer signer = new Signer("secret".getBytes());
|
||||||
String tokenSigned = signer.sign(token.toString());
|
String tokenSigned = signer.sign(token.toString());
|
||||||
|
|
||||||
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
|
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
|
||||||
|
|
|
@ -52,7 +52,7 @@
|
||||||
<p>
|
<p>
|
||||||
If a custom authentication mechanism is required for the HTTP web-consoles, it is possible
|
If a custom authentication mechanism is required for the HTTP web-consoles, it is possible
|
||||||
to implement a plugin to support the alternate authentication mechanism (refer to
|
to implement a plugin to support the alternate authentication mechanism (refer to
|
||||||
Hadoop Alfredo for details on writing an <code>AuthenticatorHandler</code>).
|
Hadoop hadoop-auth for details on writing an <code>AuthenticatorHandler</code>).
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
The next section describes how to configure Hadoop HTTP web-consoles to require user
|
The next section describes how to configure Hadoop HTTP web-consoles to require user
|
||||||
|
|
|
@ -29,7 +29,7 @@ import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initializes Alfredo AuthenticationFilter which provides support for
|
* Initializes hadoop-auth AuthenticationFilter which provides support for
|
||||||
* Kerberos HTTP SPENGO authentication.
|
* Kerberos HTTP SPENGO authentication.
|
||||||
* <p/>
|
* <p/>
|
||||||
* It enables anonymous access, simple/speudo and Kerberos HTTP SPNEGO
|
* It enables anonymous access, simple/speudo and Kerberos HTTP SPNEGO
|
||||||
|
@ -48,9 +48,9 @@ public class AuthenticationFilterInitializer extends FilterInitializer {
|
||||||
static final String SIGNATURE_SECRET_FILE = AuthenticationFilter.SIGNATURE_SECRET + ".file";
|
static final String SIGNATURE_SECRET_FILE = AuthenticationFilter.SIGNATURE_SECRET + ".file";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initializes Alfredo AuthenticationFilter.
|
* Initializes hadoop-auth AuthenticationFilter.
|
||||||
* <p/>
|
* <p/>
|
||||||
* Propagates to Alfredo AuthenticationFilter configuration all Hadoop
|
* Propagates to hadoop-auth AuthenticationFilter configuration all Hadoop
|
||||||
* configuration properties prefixed with "hadoop.http.authentication."
|
* configuration properties prefixed with "hadoop.http.authentication."
|
||||||
*
|
*
|
||||||
* @param container The filter container
|
* @param container The filter container
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
hadoop httpfs secret
|
|
@ -21,18 +21,23 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
|
|
||||||
import javax.servlet.FilterConfig;
|
import javax.servlet.FilterConfig;
|
||||||
|
import java.io.FileReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Reader;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Subclass of Alfredo's <code>AuthenticationFilter</code> that obtains its configuration
|
* Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its configuration
|
||||||
* from HttpFSServer's server configuration.
|
* from HttpFSServer's server configuration.
|
||||||
*/
|
*/
|
||||||
public class AuthFilter extends AuthenticationFilter {
|
public class AuthFilter extends AuthenticationFilter {
|
||||||
private static final String CONF_PREFIX = "httpfs.authentication.";
|
private static final String CONF_PREFIX = "httpfs.authentication.";
|
||||||
|
|
||||||
|
private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + ".file";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the Alfredo configuration from HttpFSServer's configuration.
|
* Returns the hadoop-auth configuration from HttpFSServer's configuration.
|
||||||
* <p/>
|
* <p/>
|
||||||
* It returns all HttpFSServer's configuration properties prefixed with
|
* It returns all HttpFSServer's configuration properties prefixed with
|
||||||
* <code>httpfs.authentication</code>. The <code>httpfs.authentication</code>
|
* <code>httpfs.authentication</code>. The <code>httpfs.authentication</code>
|
||||||
|
@ -41,7 +46,7 @@ public class AuthFilter extends AuthenticationFilter {
|
||||||
* @param configPrefix parameter not used.
|
* @param configPrefix parameter not used.
|
||||||
* @param filterConfig parameter not used.
|
* @param filterConfig parameter not used.
|
||||||
*
|
*
|
||||||
* @return Alfredo configuration read from HttpFSServer's configuration.
|
* @return hadoop-auth configuration read from HttpFSServer's configuration.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
|
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
|
||||||
|
@ -57,6 +62,25 @@ public class AuthFilter extends AuthenticationFilter {
|
||||||
props.setProperty(name, value);
|
props.setProperty(name, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
|
||||||
|
if (signatureSecretFile == null) {
|
||||||
|
throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
StringBuilder secret = new StringBuilder();
|
||||||
|
Reader reader = new FileReader(signatureSecretFile);
|
||||||
|
int c = reader.read();
|
||||||
|
while (c > -1) {
|
||||||
|
secret.append((char)c);
|
||||||
|
c = reader.read();
|
||||||
|
}
|
||||||
|
reader.close();
|
||||||
|
props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new RuntimeException("Could not read HttpFS signature secret file: " + signatureSecretFile);
|
||||||
|
}
|
||||||
return props;
|
return props;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -69,6 +69,19 @@
|
||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>httpfs.authentication.signature.secret.file</name>
|
||||||
|
<value>${httpfs.config.dir}/httpfs-signature.secret</value>
|
||||||
|
<description>
|
||||||
|
File containing the secret to sign HttpFS hadoop-auth cookies.
|
||||||
|
|
||||||
|
This file should be readable only by the system user running HttpFS service.
|
||||||
|
|
||||||
|
If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
|
||||||
|
they should share the secret file.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>httpfs.authentication.type</name>
|
<name>httpfs.authentication.type</name>
|
||||||
<value>simple</value>
|
<value>simple</value>
|
||||||
|
|
|
@ -45,9 +45,11 @@ import org.mortbay.jetty.webapp.WebAppContext;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.FileWriter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
|
import java.io.Writer;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -63,6 +65,11 @@ public class TestHttpFSFileSystem extends HFSTestCase {
|
||||||
Assert.assertTrue(new File(homeDir, "temp").mkdir());
|
Assert.assertTrue(new File(homeDir, "temp").mkdir());
|
||||||
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
|
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
|
||||||
|
|
||||||
|
File secretFile = new File(new File(homeDir, "conf"), "secret");
|
||||||
|
Writer w = new FileWriter(secretFile);
|
||||||
|
w.write("secret");
|
||||||
|
w.close();
|
||||||
|
|
||||||
String fsDefaultName = TestHdfsHelper.getHdfsConf().get("fs.default.name");
|
String fsDefaultName = TestHdfsHelper.getHdfsConf().get("fs.default.name");
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(false);
|
||||||
conf.set("httpfs.hadoop.conf:fs.default.name", fsDefaultName);
|
conf.set("httpfs.hadoop.conf:fs.default.name", fsDefaultName);
|
||||||
|
@ -70,6 +77,7 @@ public class TestHttpFSFileSystem extends HFSTestCase {
|
||||||
.getHadoopProxyUserGroups());
|
.getHadoopProxyUserGroups());
|
||||||
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper
|
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper
|
||||||
.getHadoopProxyUserHosts());
|
.getHadoopProxyUserHosts());
|
||||||
|
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
|
||||||
File hoopSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
File hoopSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
||||||
OutputStream os = new FileOutputStream(hoopSite);
|
OutputStream os = new FileOutputStream(hoopSite);
|
||||||
conf.writeXml(os);
|
conf.writeXml(os);
|
||||||
|
|
|
@ -39,8 +39,10 @@ import org.mortbay.jetty.webapp.WebAppContext;
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.FileWriter;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
|
import java.io.Writer;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.text.MessageFormat;
|
import java.text.MessageFormat;
|
||||||
|
@ -65,10 +67,16 @@ public class TestHttpFSServer extends HFSTestCase {
|
||||||
Assert.assertTrue(new File(homeDir, "temp").mkdir());
|
Assert.assertTrue(new File(homeDir, "temp").mkdir());
|
||||||
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
|
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
|
||||||
|
|
||||||
|
File secretFile = new File(new File(homeDir, "conf"), "secret");
|
||||||
|
Writer w = new FileWriter(secretFile);
|
||||||
|
w.write("secret");
|
||||||
|
w.close();
|
||||||
|
|
||||||
String fsDefaultName = TestHdfsHelper.getHdfsConf().get("fs.default.name");
|
String fsDefaultName = TestHdfsHelper.getHdfsConf().get("fs.default.name");
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(false);
|
||||||
conf.set("httpfs.hadoop.conf:fs.default.name", fsDefaultName);
|
conf.set("httpfs.hadoop.conf:fs.default.name", fsDefaultName);
|
||||||
conf.set("httpfs.groups." + CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, DummyGroupMapping.class.getName());
|
conf.set("httpfs.groups." + CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, DummyGroupMapping.class.getName());
|
||||||
|
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
|
||||||
File hoopSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
File hoopSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
||||||
OutputStream os = new FileOutputStream(hoopSite);
|
OutputStream os = new FileOutputStream(hoopSite);
|
||||||
conf.writeXml(os);
|
conf.writeXml(os);
|
||||||
|
|
|
@ -188,6 +188,9 @@ Trunk (unreleased changes)
|
||||||
HttpFS server should check that upload requests have correct
|
HttpFS server should check that upload requests have correct
|
||||||
content-type. (tucu)
|
content-type. (tucu)
|
||||||
|
|
||||||
|
HDFS-2707. HttpFS should read the hadoop-auth secret from a file
|
||||||
|
instead inline from the configuration. (tucu)
|
||||||
|
|
||||||
Release 0.23.1 - UNRELEASED
|
Release 0.23.1 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
Loading…
Reference in New Issue