diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
index 50a24a9e658..69b2634d822 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
@@ -304,7 +304,6 @@
src/test/resources/classutils.txt
- src/main/conf/httpfs-signature.secret
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-signature.secret b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-signature.secret
deleted file mode 100644
index 56466e94dea..00000000000
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-signature.secret
+++ /dev/null
@@ -1 +0,0 @@
-hadoop httpfs secret
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
index 362b1b45a6e..7bdaa841e26 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
@@ -21,6 +21,8 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.web.WebHdfsConstants;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
+import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationFilter;
import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticationHandler;
@@ -37,8 +39,8 @@ import java.util.Map;
import java.util.Properties;
/**
- * Subclass of hadoop-auth AuthenticationFilter
that obtains its configuration
- * from HttpFSServer's server configuration.
+ * Subclass of hadoop-auth AuthenticationFilter
that obtains its
+ * configuration from HttpFSServer's server configuration.
*/
@InterfaceAudience.Private
public class HttpFSAuthenticationFilter
@@ -46,7 +48,8 @@ public class HttpFSAuthenticationFilter
static final String CONF_PREFIX = "httpfs.authentication.";
- private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + ".file";
+ private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
+ + ".file";
/**
* Returns the hadoop-auth configuration from HttpFSServer's configuration.
@@ -78,22 +81,25 @@ public class HttpFSAuthenticationFilter
String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
if (signatureSecretFile == null) {
- throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
+ throw new RuntimeException("Undefined property: "
+ + SIGNATURE_SECRET_FILE);
}
- try {
- StringBuilder secret = new StringBuilder();
- Reader reader = new InputStreamReader(Files.newInputStream(Paths.get(
- signatureSecretFile)), StandardCharsets.UTF_8);
- int c = reader.read();
- while (c > -1) {
- secret.append((char)c);
- c = reader.read();
+ if (!isRandomSecret(filterConfig)) {
+ try (Reader reader = new InputStreamReader(Files.newInputStream(
+ Paths.get(signatureSecretFile)), StandardCharsets.UTF_8)) {
+ StringBuilder secret = new StringBuilder();
+ int c = reader.read();
+ while (c > -1) {
+ secret.append((char) c);
+ c = reader.read();
+ }
+ props.setProperty(AuthenticationFilter.SIGNATURE_SECRET,
+ secret.toString());
+ } catch (IOException ex) {
+ throw new RuntimeException("Could not read HttpFS signature "
+ + "secret file: " + signatureSecretFile);
}
- reader.close();
- props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
- } catch (IOException ex) {
- throw new RuntimeException("Could not read HttpFS signature secret file: " + signatureSecretFile);
}
setAuthHandlerClass(props);
String dtkind = WebHdfsConstants.WEBHDFS_TOKEN_KIND.toString();
@@ -115,4 +121,12 @@ public class HttpFSAuthenticationFilter
return conf;
}
+ private boolean isRandomSecret(FilterConfig filterConfig) {
+ SignerSecretProvider secretProvider = (SignerSecretProvider) filterConfig
+ .getServletContext().getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE);
+ if (secretProvider == null) {
+ return false;
+ }
+ return secretProvider.getClass() == RandomSignerSecretProvider.class;
+ }
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
index 3e9064f4472..e884a125ef5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
@@ -157,6 +157,9 @@
If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
they should share the secret file.
+
+ If the secret file specified here does not exist, random secret is
+ generated at startup time.
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServerWithRandomSecret.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServerWithRandomSecret.java
new file mode 100644
index 00000000000..b8e902a6f54
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServerWithRandomSecret.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.server;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.Shell;
+import org.junit.BeforeClass;
+
+import java.io.File;
+
+/**
+ * Unlike {@link TestHttpFSServerWebServer}, httpfs-signature.secret doesn't
+ * exist. In this case, a random secret is used.
+ */
+public class TestHttpFSServerWebServerWithRandomSecret extends
+ TestHttpFSServerWebServer {
+ @BeforeClass
+ public static void beforeClass() throws Exception {
+ File homeDir = GenericTestUtils.getTestDir();
+ File confDir = new File(homeDir, "etc/hadoop");
+ File logsDir = new File(homeDir, "logs");
+ File tempDir = new File(homeDir, "temp");
+ confDir.mkdirs();
+ logsDir.mkdirs();
+ tempDir.mkdirs();
+
+ if (Shell.WINDOWS) {
+ File binDir = new File(homeDir, "bin");
+ binDir.mkdirs();
+ File winutils = Shell.getWinUtilsFile();
+ if (winutils.exists()) {
+ FileUtils.copyFileToDirectory(winutils, binDir);
+ }
+ }
+
+ System.setProperty("hadoop.home.dir", homeDir.getAbsolutePath());
+ System.setProperty("hadoop.log.dir", logsDir.getAbsolutePath());
+ System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
+ System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
+ System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
+ }
+}