HDFS-13654. Use a random secret when a secret file doesn't exist in HttpFS. This should be default.

This commit is contained in:
Takanobu Asanuma 2019-05-31 10:29:24 +09:00
parent 219e286722
commit 35f1014b3e
5 changed files with 91 additions and 18 deletions

View File

@ -304,7 +304,6 @@
<configuration> <configuration>
<excludes> <excludes>
<exclude>src/test/resources/classutils.txt</exclude> <exclude>src/test/resources/classutils.txt</exclude>
<exclude>src/main/conf/httpfs-signature.secret</exclude>
</excludes> </excludes>
</configuration> </configuration>
</plugin> </plugin>

View File

@ -21,6 +21,8 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.web.WebHdfsConstants; import org.apache.hadoop.hdfs.web.WebHdfsConstants;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationFilter; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationFilter;
import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticationHandler; import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticationHandler;
@ -37,8 +39,8 @@ import java.util.Map;
import java.util.Properties; import java.util.Properties;
/** /**
* Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its configuration * Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its
* from HttpFSServer's server configuration. * configuration from HttpFSServer's server configuration.
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class HttpFSAuthenticationFilter public class HttpFSAuthenticationFilter
@ -46,7 +48,8 @@ public class HttpFSAuthenticationFilter
static final String CONF_PREFIX = "httpfs.authentication."; static final String CONF_PREFIX = "httpfs.authentication.";
private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + ".file"; private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
+ ".file";
/** /**
* Returns the hadoop-auth configuration from HttpFSServer's configuration. * Returns the hadoop-auth configuration from HttpFSServer's configuration.
@ -78,22 +81,25 @@ public class HttpFSAuthenticationFilter
String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null); String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
if (signatureSecretFile == null) { if (signatureSecretFile == null) {
throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE); throw new RuntimeException("Undefined property: "
+ SIGNATURE_SECRET_FILE);
} }
try { if (!isRandomSecret(filterConfig)) {
StringBuilder secret = new StringBuilder(); try (Reader reader = new InputStreamReader(Files.newInputStream(
Reader reader = new InputStreamReader(Files.newInputStream(Paths.get( Paths.get(signatureSecretFile)), StandardCharsets.UTF_8)) {
signatureSecretFile)), StandardCharsets.UTF_8); StringBuilder secret = new StringBuilder();
int c = reader.read(); int c = reader.read();
while (c > -1) { while (c > -1) {
secret.append((char)c); secret.append((char) c);
c = reader.read(); c = reader.read();
}
props.setProperty(AuthenticationFilter.SIGNATURE_SECRET,
secret.toString());
} catch (IOException ex) {
throw new RuntimeException("Could not read HttpFS signature "
+ "secret file: " + signatureSecretFile);
} }
reader.close();
props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
} catch (IOException ex) {
throw new RuntimeException("Could not read HttpFS signature secret file: " + signatureSecretFile);
} }
setAuthHandlerClass(props); setAuthHandlerClass(props);
String dtkind = WebHdfsConstants.WEBHDFS_TOKEN_KIND.toString(); String dtkind = WebHdfsConstants.WEBHDFS_TOKEN_KIND.toString();
@ -115,4 +121,12 @@ public class HttpFSAuthenticationFilter
return conf; return conf;
} }
private boolean isRandomSecret(FilterConfig filterConfig) {
SignerSecretProvider secretProvider = (SignerSecretProvider) filterConfig
.getServletContext().getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE);
if (secretProvider == null) {
return false;
}
return secretProvider.getClass() == RandomSignerSecretProvider.class;
}
} }

View File

@ -157,6 +157,9 @@
If multiple HttpFS servers are used in a load-balancer/round-robin fashion, If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
they should share the secret file. they should share the secret file.
If the secret file specified here does not exist, random secret is
generated at startup time.
</description> </description>
</property> </property>

View File

@ -0,0 +1,58 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.http.server;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
import org.junit.BeforeClass;
import java.io.File;
/**
* Unlike {@link TestHttpFSServerWebServer}, httpfs-signature.secret doesn't
* exist. In this case, a random secret is used.
*/
public class TestHttpFSServerWebServerWithRandomSecret extends
TestHttpFSServerWebServer {
@BeforeClass
public static void beforeClass() throws Exception {
File homeDir = GenericTestUtils.getTestDir();
File confDir = new File(homeDir, "etc/hadoop");
File logsDir = new File(homeDir, "logs");
File tempDir = new File(homeDir, "temp");
confDir.mkdirs();
logsDir.mkdirs();
tempDir.mkdirs();
if (Shell.WINDOWS) {
File binDir = new File(homeDir, "bin");
binDir.mkdirs();
File winutils = Shell.getWinUtilsFile();
if (winutils.exists()) {
FileUtils.copyFileToDirectory(winutils, binDir);
}
}
System.setProperty("hadoop.home.dir", homeDir.getAbsolutePath());
System.setProperty("hadoop.log.dir", logsDir.getAbsolutePath());
System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
}
}