token.validity
diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
index 415600e97e9..2989500ec50 100644
--- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
+++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
@@ -459,7 +459,7 @@ public class TestAuthenticationFilter extends TestCase {
AuthenticationToken token = new AuthenticationToken("u", "p", "t");
token.setExpires(System.currentTimeMillis() + 1000);
- Signer signer = new Signer("alfredo".getBytes());
+ Signer signer = new Signer("secret".getBytes());
String tokenSigned = signer.sign(token.toString());
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
@@ -504,7 +504,7 @@ public class TestAuthenticationFilter extends TestCase {
AuthenticationToken token = new AuthenticationToken("u", "p", DummyAuthenticationHandler.TYPE);
token.setExpires(System.currentTimeMillis() - 1000);
- Signer signer = new Signer("alfredo".getBytes());
+ Signer signer = new Signer("secret".getBytes());
String tokenSigned = signer.sign(token.toString());
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
@@ -564,7 +564,7 @@ public class TestAuthenticationFilter extends TestCase {
AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype");
token.setExpires(System.currentTimeMillis() + 1000);
- Signer signer = new Signer("alfredo".getBytes());
+ Signer signer = new Signer("secret".getBytes());
String tokenSigned = signer.sign(token.toString());
Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
diff --git a/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml b/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml
index 15abfbb044a..51a44f80c33 100644
--- a/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml
+++ b/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml
@@ -52,7 +52,7 @@
If a custom authentication mechanism is required for the HTTP web-consoles, it is possible
to implement a plugin to support the alternate authentication mechanism (refer to
- Hadoop Alfredo for details on writing an AuthenticatorHandler
).
+ Hadoop hadoop-auth for details on writing an AuthenticatorHandler
).
The next section describes how to configure Hadoop HTTP web-consoles to require user
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
index cd6ab7b3260..7e9dcebdedb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
@@ -26,7 +26,7 @@ import java.util.HashMap;
import java.util.Map;
/**
- * Initializes Alfredo AuthenticationFilter which provides support for
+ * Initializes hadoop-auth AuthenticationFilter which provides support for
* Kerberos HTTP SPENGO authentication.
*
* It enables anonymous access, simple/speudo and Kerberos HTTP SPNEGO
@@ -43,9 +43,9 @@ public class AuthenticationFilterInitializer extends FilterInitializer {
private static final String PREFIX = "hadoop.http.authentication.";
/**
- * Initializes Alfredo AuthenticationFilter.
+ * Initializes hadoop-auth AuthenticationFilter.
*
- * Propagates to Alfredo AuthenticationFilter configuration all Hadoop
+ * Propagates to hadoop-auth AuthenticationFilter configuration all Hadoop
* configuration properties prefixed with "hadoop.http.authentication."
*
* @param container The filter container
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-signature.secret b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-signature.secret
new file mode 100644
index 00000000000..56466e94dea
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-signature.secret
@@ -0,0 +1 @@
+hadoop httpfs secret
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java
index cc33e0af2cf..ab778f6c692 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java
@@ -21,18 +21,23 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import javax.servlet.FilterConfig;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
import java.util.Map;
import java.util.Properties;
/**
- * Subclass of Alfredo's AuthenticationFilter
that obtains its configuration
+ * Subclass of hadoop-auth AuthenticationFilter
that obtains its configuration
* from HttpFSServer's server configuration.
*/
public class AuthFilter extends AuthenticationFilter {
private static final String CONF_PREFIX = "httpfs.authentication.";
+ private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + ".file";
+
/**
- * Returns the Alfredo configuration from HttpFSServer's configuration.
+ * Returns the hadoop-auth configuration from HttpFSServer's configuration.
*
* It returns all HttpFSServer's configuration properties prefixed with
* httpfs.authentication
. The httpfs.authentication
@@ -41,7 +46,7 @@ public class AuthFilter extends AuthenticationFilter {
* @param configPrefix parameter not used.
* @param filterConfig parameter not used.
*
- * @return Alfredo configuration read from HttpFSServer's configuration.
+ * @return hadoop-auth configuration read from HttpFSServer's configuration.
*/
@Override
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
@@ -57,6 +62,25 @@ public class AuthFilter extends AuthenticationFilter {
props.setProperty(name, value);
}
}
+
+ String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
+ if (signatureSecretFile == null) {
+ throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
+ }
+
+ try {
+ StringBuilder secret = new StringBuilder();
+ Reader reader = new FileReader(signatureSecretFile);
+ int c = reader.read();
+ while (c > -1) {
+ secret.append((char)c);
+ c = reader.read();
+ }
+ reader.close();
+ props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
+ } catch (IOException ex) {
+ throw new RuntimeException("Could not read HttpFS signature secret file: " + signatureSecretFile);
+ }
return props;
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
index 6fac2651f54..c58c925663e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
@@ -69,6 +69,19 @@
+
+ httpfs.authentication.signature.secret.file
+ ${httpfs.config.dir}/httpfs-signature.secret
+
+ File containing the secret to sign HttpFS hadoop-auth cookies.
+
+ This file should be readable only by the system user running HttpFS service.
+
+ If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
+ they should share the secret file.
+
+
+
httpfs.authentication.type
simple
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
index 48bc7240d57..579498713f5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
@@ -45,9 +45,11 @@ import org.mortbay.jetty.webapp.WebAppContext;
import java.io.File;
import java.io.FileOutputStream;
+import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.io.Writer;
import java.net.URL;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
@@ -63,6 +65,11 @@ public class TestHttpFSFileSystem extends HFSTestCase {
Assert.assertTrue(new File(homeDir, "temp").mkdir());
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
+ File secretFile = new File(new File(homeDir, "conf"), "secret");
+ Writer w = new FileWriter(secretFile);
+ w.write("secret");
+ w.close();
+
String fsDefaultName = TestHdfsHelper.getHdfsConf().get("fs.default.name");
Configuration conf = new Configuration(false);
conf.set("httpfs.hadoop.conf:fs.default.name", fsDefaultName);
@@ -70,6 +77,7 @@ public class TestHttpFSFileSystem extends HFSTestCase {
.getHadoopProxyUserGroups());
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper
.getHadoopProxyUserHosts());
+ conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
File hoopSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
OutputStream os = new FileOutputStream(hoopSite);
conf.writeXml(os);
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
index faac97de1a0..d397fa35a51 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
@@ -39,8 +39,10 @@ import org.mortbay.jetty.webapp.WebAppContext;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
+import java.io.FileWriter;
import java.io.InputStreamReader;
import java.io.OutputStream;
+import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.MessageFormat;
@@ -65,10 +67,16 @@ public class TestHttpFSServer extends HFSTestCase {
Assert.assertTrue(new File(homeDir, "temp").mkdir());
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
+ File secretFile = new File(new File(homeDir, "conf"), "secret");
+ Writer w = new FileWriter(secretFile);
+ w.write("secret");
+ w.close();
+
String fsDefaultName = TestHdfsHelper.getHdfsConf().get("fs.default.name");
Configuration conf = new Configuration(false);
conf.set("httpfs.hadoop.conf:fs.default.name", fsDefaultName);
conf.set("httpfs.groups." + CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, DummyGroupMapping.class.getName());
+ conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
File hoopSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
OutputStream os = new FileOutputStream(hoopSite);
conf.writeXml(os);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 0080a8d9686..e56ebf1762d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -122,6 +122,9 @@ Release 0.23.1 - UNRELEASED
HDFS-2705. HttpFS server should check that upload requests have correct
content-type. (tucu)
+ HDFS-2707. HttpFS should read the hadoop-auth secret from a file instead
+ inline from the configuration. (tucu)
+
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES