HDFS-10579. HDFS web interfaces lack configs for X-FRAME-OPTIONS protection. Contributed by Anu Engineer.

Conflicts:
	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
This commit is contained in:
Jitendra Pandey 2016-07-11 14:55:33 -07:00
parent 02b037f625
commit be1a11c9c8
6 changed files with 247 additions and 0 deletions

View File

@ -702,6 +702,12 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
// Security-related configs
public static final String DFS_ENCRYPT_DATA_TRANSFER_KEY = "dfs.encrypt.data.transfer";
public static final boolean DFS_ENCRYPT_DATA_TRANSFER_DEFAULT = false;
public static final String DFS_XFRAME_OPTION_ENABLED = "dfs.xframe.enabled";
public static final boolean DFS_XFRAME_OPTION_ENABLED_DEFAULT = true;
public static final String DFS_XFRAME_OPTION_VALUE = "dfs.xframe.value";
public static final String DFS_XFRAME_OPTION_VALUE_DEFAULT = "SAMEORIGIN";
@Deprecated
public static final String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY =
HdfsClientConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY;

View File

@ -107,6 +107,16 @@ public class DatanodeHttpServer implements Closeable {
.addEndpoint(URI.create("http://localhost:0"))
.setFindPort(true);
final boolean xFrameEnabled = conf.getBoolean(
DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED,
DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED_DEFAULT);
final String xFrameOptionValue = conf.getTrimmed(
DFSConfigKeys.DFS_XFRAME_OPTION_VALUE,
DFSConfigKeys.DFS_XFRAME_OPTION_VALUE_DEFAULT);
builder.configureXFrame(xFrameEnabled).setXFrameOption(xFrameOptionValue);
this.infoServer = builder.build();
this.infoServer.addInternalServlet(null, "/streamFile/*", StreamFile.class);

View File

@ -29,6 +29,7 @@ import java.util.Map.Entry;
import javax.servlet.ServletContext;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
@ -141,6 +142,16 @@ public class NameNodeHttpServer {
DFSConfigKeys.DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
final boolean xFrameEnabled = conf.getBoolean(
DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED,
DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED_DEFAULT);
final String xFrameOptionValue = conf.getTrimmed(
DFSConfigKeys.DFS_XFRAME_OPTION_VALUE,
DFSConfigKeys.DFS_XFRAME_OPTION_VALUE_DEFAULT);
builder.configureXFrame(xFrameEnabled).setXFrameOption(xFrameOptionValue);
httpServer = builder.build();
if (policy.isHttpsEnabled()) {
@ -326,4 +337,13 @@ public class NameNodeHttpServer {
ServletContext context) {
return (StartupProgress)context.getAttribute(STARTUP_PROGRESS_ATTRIBUTE_KEY);
}
/**
* Returns the httpServer.
* @return HttpServer2
*/
@VisibleForTesting
public HttpServer2 getHttpServer() {
return httpServer;
}
}

View File

@ -2931,6 +2931,30 @@
</description>
</property>
<property>
<name>dfs.xframe.enabled</name>
<value>true</value>
<description>
If true, then enables protection against clickjacking by returning
X_FRAME_OPTIONS header value set to SAMEORIGIN.
Clickjacking protection prevents an attacker from using transparent or
opaque layers to trick a user into clicking on a button
or link on another page.
</description>
</property>
<property>
<name>dfs.xframe.value</name>
<value>SAMEORIGIN</value>
<description>
This configration value allows user to specify the value for the
X-FRAME-OPTIONS. The possible values for this field are
DENY, SAMEORIGIN and ALLOW-FROM. Any other value will throw an
exception when namenode and datanodes are starting up.
</description>
</property>
<property>
<name>dfs.http.client.retry.policy.enabled</name>
<value>false</value>

View File

@ -0,0 +1,90 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.hdfs.server.datanode.web;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.http.HttpServer2;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* Test that X-Frame-Options works correctly with DatanodeHTTPServer.
*/
public class TestDatanodeHttpXFrame {
@Rule
public ExpectedException exception = ExpectedException.none();
@Test
public void testDataNodeXFrameOptionsEnabled() throws Exception {
boolean xFrameEnabled = true;
MiniDFSCluster cluster = createCluster(xFrameEnabled, null);
HttpURLConnection conn = getConn(cluster);
String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
Assert.assertTrue("X-FRAME-OPTIONS is absent in the header",
xfoHeader != null);
Assert.assertTrue(xfoHeader.endsWith(HttpServer2.XFrameOption
.SAMEORIGIN.toString()));
}
@Test
public void testNameNodeXFrameOptionsDisabled() throws Exception {
boolean xFrameEnabled = false;
MiniDFSCluster cluster = createCluster(xFrameEnabled, null);
HttpURLConnection conn = getConn(cluster);
String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
Assert.assertTrue("unexpected X-FRAME-OPTION in header", xfoHeader == null);
}
@Test
public void testDataNodeXFramewithInvalidOptions() throws Exception {
exception.expect(IllegalArgumentException.class);
createCluster(false, "Hadoop");
}
private MiniDFSCluster createCluster(boolean enabled, String
value) throws IOException {
Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED, enabled);
if (value != null) {
conf.set(DFSConfigKeys.DFS_XFRAME_OPTION_VALUE, value);
}
MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
return cluster;
}
private HttpURLConnection getConn(MiniDFSCluster cluster)
throws IOException {
DataNode datanode = cluster.getDataNodes().get(0);
URL newURL = new URL("http://localhost:" + datanode.getInfoPort());
HttpURLConnection conn = (HttpURLConnection) newURL.openConnection();
conn.connect();
return conn;
}
}

View File

@ -0,0 +1,97 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.net.NetUtils;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.URL;
/**
* A class to test the XFrameoptions of Namenode HTTP Server. We are not reusing
* the TestNameNodeHTTPServer since it is a parameterized class and these
* following tests will run multiple times doing the same thing, if we had the
* code in that classs.
*/
public class TestNameNodeHttpServerXFrame {
@Rule
public ExpectedException exception = ExpectedException.none();
public static URL getServerURL(HttpServer2 server)
throws MalformedURLException {
Assert.assertNotNull("No server", server);
return new URL("http://"
+ NetUtils.getHostPortString(server.getConnectorAddress(0)));
}
@Test
public void testNameNodeXFrameOptionsEnabled() throws Exception {
HttpURLConnection conn = createServerwithXFrame(true, null);
String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
Assert.assertTrue("X-FRAME-OPTIONS is absent in the header",
xfoHeader != null);
Assert.assertTrue(xfoHeader.endsWith(HttpServer2.XFrameOption
.SAMEORIGIN.toString()));
}
@Test
public void testNameNodeXFrameOptionsDisabled() throws Exception {
HttpURLConnection conn = createServerwithXFrame(false, null);
String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
Assert.assertTrue("unexpected X-FRAME-OPTION in header", xfoHeader == null);
}
@Test
public void testNameNodeXFrameOptionsIllegalOption() throws Exception {
exception.expect(IllegalArgumentException.class);
createServerwithXFrame(true, "hadoop");
}
private HttpURLConnection createServerwithXFrame(boolean enabled, String
value) throws IOException {
Configuration conf = new HdfsConfiguration();
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.setBoolean(DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED, enabled);
if (value != null) {
conf.set(DFSConfigKeys.DFS_XFRAME_OPTION_VALUE, value);
}
InetSocketAddress addr = InetSocketAddress.createUnresolved("localhost", 0);
NameNodeHttpServer server = null;
server = new NameNodeHttpServer(conf, null, addr);
server.start();
URL url = getServerURL(server.getHttpServer());
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.connect();
return conn;
}
}