HDFS-9711. Integrate CSRF prevention filter in WebHDFS. Contributed by Chris Nauroth.

This commit is contained in:
cnauroth 2016-02-18 10:07:28 -08:00
parent e78d8e66f7
commit 5d1889a66d
13 changed files with 851 additions and 20 deletions

View File

@ -18,7 +18,9 @@
package org.apache.hadoop.security.http; package org.apache.hadoop.security.http;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.Set; import java.util.Set;
@ -32,6 +34,13 @@
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* This filter provides protection against cross site request forgery (CSRF) * This filter provides protection against cross site request forgery (CSRF)
* attacks for REST APIs. Enabling this filter on an endpoint results in the * attacks for REST APIs. Enabling this filter on an endpoint results in the
@ -39,7 +48,13 @@
* with every request. In the absense of this header the filter will reject the * with every request. In the absense of this header the filter will reject the
* attempt as a bad request. * attempt as a bad request.
*/ */
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class RestCsrfPreventionFilter implements Filter { public class RestCsrfPreventionFilter implements Filter {
private static final Logger LOG =
LoggerFactory.getLogger(RestCsrfPreventionFilter.class);
public static final String HEADER_USER_AGENT = "User-Agent"; public static final String HEADER_USER_AGENT = "User-Agent";
public static final String BROWSER_USER_AGENT_PARAM = public static final String BROWSER_USER_AGENT_PARAM =
"browser-useragents-regex"; "browser-useragents-regex";
@ -72,6 +87,9 @@ public void init(FilterConfig filterConfig) throws ServletException {
agents = BROWSER_USER_AGENTS_DEFAULT; agents = BROWSER_USER_AGENTS_DEFAULT;
} }
parseBrowserUserAgents(agents); parseBrowserUserAgents(agents);
LOG.info("Adding cross-site request forgery (CSRF) protection, "
+ "headerName = {}, methodsToIgnore = {}, browserUserAgents = {}",
headerName, methodsToIgnore, browserUserAgents);
} }
void parseBrowserUserAgents(String userAgents) { void parseBrowserUserAgents(String userAgents) {
@ -118,22 +136,152 @@ protected boolean isBrowser(String userAgent) {
return false; return false;
} }
@Override /**
public void doFilter(ServletRequest request, ServletResponse response, * Defines the minimal API requirements for the filter to execute its
FilterChain chain) throws IOException, ServletException { * filtering logic. This interface exists to facilitate integration in
HttpServletRequest httpRequest = (HttpServletRequest)request; * components that do not run within a servlet container and therefore cannot
if (!isBrowser(httpRequest.getHeader(HEADER_USER_AGENT)) || * rely on a servlet container to dispatch to the {@link #doFilter} method.
methodsToIgnore.contains(httpRequest.getMethod()) || * Applications that do run inside a servlet container will not need to write
httpRequest.getHeader(headerName) != null) { * code that uses this interface. Instead, they can use typical servlet
chain.doFilter(request, response); * container configuration mechanisms to insert the filter.
*/
public interface HttpInteraction {
/**
* Returns the value of a header.
*
* @param header name of header
* @return value of header
*/
String getHeader(String header);
/**
* Returns the method.
*
* @return method
*/
String getMethod();
/**
* Called by the filter after it decides that the request may proceed.
*
* @throws IOException if there is an I/O error
* @throws ServletException if the implementation relies on the servlet API
* and a servlet API call has failed
*/
void proceed() throws IOException, ServletException;
/**
* Called by the filter after it decides that the request is a potential
* CSRF attack and therefore must be rejected.
*
* @param code status code to send
* @param message response message
* @throws IOException if there is an I/O error
*/
void sendError(int code, String message) throws IOException;
}
/**
* Handles an {@link HttpInteraction} by applying the filtering logic.
*
* @param httpInteraction caller's HTTP interaction
* @throws IOException if there is an I/O error
* @throws ServletException if the implementation relies on the servlet API
* and a servlet API call has failed
*/
public void handleHttpInteraction(HttpInteraction httpInteraction)
throws IOException, ServletException {
if (!isBrowser(httpInteraction.getHeader(HEADER_USER_AGENT)) ||
methodsToIgnore.contains(httpInteraction.getMethod()) ||
httpInteraction.getHeader(headerName) != null) {
httpInteraction.proceed();
} else { } else {
((HttpServletResponse)response).sendError( httpInteraction.sendError(HttpServletResponse.SC_BAD_REQUEST,
HttpServletResponse.SC_BAD_REQUEST,
"Missing Required Header for CSRF Vulnerability Protection"); "Missing Required Header for CSRF Vulnerability Protection");
} }
} }
@Override
public void doFilter(ServletRequest request, ServletResponse response,
final FilterChain chain) throws IOException, ServletException {
final HttpServletRequest httpRequest = (HttpServletRequest)request;
final HttpServletResponse httpResponse = (HttpServletResponse)response;
handleHttpInteraction(new ServletFilterHttpInteraction(httpRequest,
httpResponse, chain));
}
@Override @Override
public void destroy() { public void destroy() {
} }
/**
* Constructs a mapping of configuration properties to be used for filter
* initialization. The mapping includes all properties that start with the
* specified configuration prefix. Property names in the mapping are trimmed
* to remove the configuration prefix.
*
* @param conf configuration to read
* @param confPrefix configuration prefix
* @return mapping of configuration properties to be used for filter
* initialization
*/
public static Map<String, String> getFilterParams(Configuration conf,
String confPrefix) {
Map<String, String> filterConfigMap = new HashMap<>();
for (Map.Entry<String, String> entry : conf) {
String name = entry.getKey();
if (name.startsWith(confPrefix)) {
String value = conf.get(name);
name = name.substring(confPrefix.length());
filterConfigMap.put(name, value);
}
}
return filterConfigMap;
}
/**
* {@link HttpInteraction} implementation for use in the servlet filter.
*/
private static final class ServletFilterHttpInteraction
implements HttpInteraction {
private final FilterChain chain;
private final HttpServletRequest httpRequest;
private final HttpServletResponse httpResponse;
/**
* Creates a new ServletFilterHttpInteraction.
*
* @param httpRequest request to process
* @param httpResponse response to process
* @param chain filter chain to forward to if HTTP interaction is allowed
*/
public ServletFilterHttpInteraction(HttpServletRequest httpRequest,
HttpServletResponse httpResponse, FilterChain chain) {
this.httpRequest = httpRequest;
this.httpResponse = httpResponse;
this.chain = chain;
}
@Override
public String getHeader(String header) {
return httpRequest.getHeader(header);
}
@Override
public String getMethod() {
return httpRequest.getMethod();
}
@Override
public void proceed() throws IOException, ServletException {
chain.doFilter(httpRequest, httpResponse);
}
@Override
public void sendError(int code, String message) throws IOException {
httpResponse.sendError(code, message);
}
}
} }

View File

@ -41,6 +41,18 @@ public interface HdfsClientConfigKeys {
String DFS_WEBHDFS_OAUTH_ENABLED_KEY = "dfs.webhdfs.oauth2.enabled"; String DFS_WEBHDFS_OAUTH_ENABLED_KEY = "dfs.webhdfs.oauth2.enabled";
boolean DFS_WEBHDFS_OAUTH_ENABLED_DEFAULT = false; boolean DFS_WEBHDFS_OAUTH_ENABLED_DEFAULT = false;
String DFS_WEBHDFS_REST_CSRF_ENABLED_KEY = "dfs.webhdfs.rest-csrf.enabled";
boolean DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT = false;
String DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_KEY =
"dfs.webhdfs.rest-csrf.custom-header";
String DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_DEFAULT = "X-XSRF-HEADER";
String DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_KEY =
"dfs.webhdfs.rest-csrf.methods-to-ignore";
String DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_DEFAULT =
"GET,OPTIONS,HEAD,TRACE";
String DFS_WEBHDFS_REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY =
"dfs.webhdfs.rest-csrf.browser-useragents-regex";
String OAUTH_CLIENT_ID_KEY = "dfs.webhdfs.oauth2.client.id"; String OAUTH_CLIENT_ID_KEY = "dfs.webhdfs.oauth2.client.id";
String OAUTH_REFRESH_URL_KEY = "dfs.webhdfs.oauth2.refresh.url"; String OAUTH_REFRESH_URL_KEY = "dfs.webhdfs.oauth2.refresh.url";

View File

@ -18,6 +18,13 @@
package org.apache.hadoop.hdfs.web; package org.apache.hadoop.hdfs.web;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_KEY;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
import java.io.BufferedOutputStream; import java.io.BufferedOutputStream;
import java.io.EOFException; import java.io.EOFException;
@ -33,8 +40,10 @@
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.HttpHeaders;
@ -129,6 +138,8 @@ public class WebHdfsFileSystem extends FileSystem
private InetSocketAddress nnAddrs[]; private InetSocketAddress nnAddrs[];
private int currentNNAddrIndex; private int currentNNAddrIndex;
private boolean disallowFallbackToInsecureCluster; private boolean disallowFallbackToInsecureCluster;
private String restCsrfCustomHeader;
private Set<String> restCsrfMethodsToIgnore;
private static final ObjectReader READER = private static final ObjectReader READER =
new ObjectMapper().reader(Map.class); new ObjectMapper().reader(Map.class);
@ -227,9 +238,52 @@ public synchronized void initialize(URI uri, Configuration conf
this.disallowFallbackToInsecureCluster = !conf.getBoolean( this.disallowFallbackToInsecureCluster = !conf.getBoolean(
CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY, CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY,
CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT); CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT);
this.initializeRestCsrf(conf);
this.delegationToken = null; this.delegationToken = null;
} }
/**
* Initializes client-side handling of cross-site request forgery (CSRF)
* protection by figuring out the custom HTTP headers that need to be sent in
* requests and which HTTP methods are ignored because they do not require
* CSRF protection.
*
* @param conf configuration to read
*/
private void initializeRestCsrf(Configuration conf) {
if (conf.getBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY,
DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT)) {
this.restCsrfCustomHeader = conf.getTrimmed(
DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_KEY,
DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_DEFAULT);
this.restCsrfMethodsToIgnore = new HashSet<>();
this.restCsrfMethodsToIgnore.addAll(getTrimmedStringList(conf,
DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_KEY,
DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_DEFAULT));
} else {
this.restCsrfCustomHeader = null;
this.restCsrfMethodsToIgnore = null;
}
}
/**
* Returns a list of strings from a comma-delimited configuration value.
*
* @param conf configuration to check
* @param name configuration property name
* @param defaultValue default value if no value found for name
* @return list of strings from comma-delimited configuration value, or an
* empty list if not found
*/
private static List<String> getTrimmedStringList(Configuration conf,
String name, String defaultValue) {
String valueString = conf.get(name, defaultValue);
if (valueString == null) {
return new ArrayList<>();
}
return new ArrayList<>(StringUtils.getTrimmedStringCollection(valueString));
}
@Override @Override
public URI getCanonicalUri() { public URI getCanonicalUri() {
return super.getCanonicalUri(); return super.getCanonicalUri();
@ -601,6 +655,11 @@ private HttpURLConnection connect(final HttpOpParam.Op op, final URL url)
final boolean doOutput = op.getDoOutput(); final boolean doOutput = op.getDoOutput();
conn.setRequestMethod(op.getType().toString()); conn.setRequestMethod(op.getType().toString());
conn.setInstanceFollowRedirects(false); conn.setInstanceFollowRedirects(false);
if (restCsrfCustomHeader != null &&
!restCsrfMethodsToIgnore.contains(op.getType().name())) {
// The value of the header is unimportant. Only its presence matters.
conn.setRequestProperty(restCsrfCustomHeader, "\"\"");
}
switch (op.getType()) { switch (op.getType()) {
// if not sending a message body for a POST or PUT operation, need // if not sending a message body for a POST or PUT operation, need
// to ensure the server/proxy knows this // to ensure the server/proxy knows this

View File

@ -1041,6 +1041,8 @@ Release 2.8.0 - UNRELEASED
HDFS-9244. Support nested encryption zones. (zhz) HDFS-9244. Support nested encryption zones. (zhz)
HDFS-9711. Integrate CSRF prevention filter in WebHDFS. (cnauroth)
IMPROVEMENTS IMPROVEMENTS
HDFS-9257. improve error message for "Absolute path required" in INode.java HDFS-9257. improve error message for "Absolute path required" in INode.java

View File

@ -17,6 +17,15 @@
*/ */
package org.apache.hadoop.hdfs.server.datanode.web; package org.apache.hadoop.hdfs.server.datanode.web;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_KEY;
import java.util.Enumeration;
import java.util.Map;
import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import io.netty.bootstrap.ServerBootstrap; import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFactory; import io.netty.channel.ChannelFactory;
import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFuture;
@ -46,6 +55,7 @@
import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.security.ssl.SSLFactory;
import java.io.Closeable; import java.io.Closeable;
@ -73,6 +83,7 @@ public class DatanodeHttpServer implements Closeable {
private final ServerBootstrap httpsServer; private final ServerBootstrap httpsServer;
private final Configuration conf; private final Configuration conf;
private final Configuration confForCreate; private final Configuration confForCreate;
private final RestCsrfPreventionFilter restCsrfPreventionFilter;
private InetSocketAddress httpAddress; private InetSocketAddress httpAddress;
private InetSocketAddress httpsAddress; private InetSocketAddress httpsAddress;
static final Log LOG = LogFactory.getLog(DatanodeHttpServer.class); static final Log LOG = LogFactory.getLog(DatanodeHttpServer.class);
@ -81,6 +92,7 @@ public DatanodeHttpServer(final Configuration conf,
final DataNode datanode, final DataNode datanode,
final ServerSocketChannel externalHttpChannel) final ServerSocketChannel externalHttpChannel)
throws IOException { throws IOException {
this.restCsrfPreventionFilter = createRestCsrfPreventionFilter(conf);
this.conf = conf; this.conf = conf;
Configuration confForInfoServer = new Configuration(conf); Configuration confForInfoServer = new Configuration(conf);
@ -117,7 +129,7 @@ public DatanodeHttpServer(final Configuration conf,
@Override @Override
protected void initChannel(SocketChannel ch) throws Exception { protected void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(new PortUnificationServerHandler(jettyAddr, ch.pipeline().addLast(new PortUnificationServerHandler(jettyAddr,
conf, confForCreate)); conf, confForCreate, restCsrfPreventionFilter));
} }
}); });
@ -165,11 +177,16 @@ protected void doBind(SocketAddress localAddress) throws Exception {}
protected void initChannel(SocketChannel ch) throws Exception { protected void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline p = ch.pipeline(); ChannelPipeline p = ch.pipeline();
p.addLast( p.addLast(
new SslHandler(sslFactory.createSSLEngine()), new SslHandler(sslFactory.createSSLEngine()),
new HttpRequestDecoder(), new HttpRequestDecoder(),
new HttpResponseEncoder(), new HttpResponseEncoder());
new ChunkedWriteHandler(), if (restCsrfPreventionFilter != null) {
new URLDispatcher(jettyAddr, conf, confForCreate)); p.addLast(new RestCsrfPreventionFilterHandler(
restCsrfPreventionFilter));
}
p.addLast(
new ChunkedWriteHandler(),
new URLDispatcher(jettyAddr, conf, confForCreate));
} }
}); });
} else { } else {
@ -252,4 +269,87 @@ private static String getHostnameForSpnegoPrincipal(Configuration conf) {
InetSocketAddress inetSocker = NetUtils.createSocketAddr(addr); InetSocketAddress inetSocker = NetUtils.createSocketAddr(addr);
return inetSocker.getHostString(); return inetSocker.getHostString();
} }
}
/**
* Creates the {@link RestCsrfPreventionFilter} for the DataNode. Since the
* DataNode HTTP server is not implemented in terms of the servlet API, it
* takes some extra effort to obtain an instance of the filter. This method
* takes care of configuration and implementing just enough of the servlet API
* and related interfaces so that the DataNode can get a fully initialized
* instance of the filter.
*
* @param conf configuration to read
* @return initialized filter, or null if CSRF protection not enabled
*/
private static RestCsrfPreventionFilter createRestCsrfPreventionFilter(
Configuration conf) {
if (!conf.getBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY,
DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT)) {
return null;
}
String restCsrfClassName = RestCsrfPreventionFilter.class.getName();
Map<String, String> restCsrfParams = RestCsrfPreventionFilter
.getFilterParams(conf, "dfs.webhdfs.rest-csrf.");
RestCsrfPreventionFilter filter = new RestCsrfPreventionFilter();
try {
filter.init(new MapBasedFilterConfig(restCsrfClassName, restCsrfParams));
} catch (ServletException e) {
throw new IllegalStateException(
"Failed to initialize RestCsrfPreventionFilter.", e);
}
return filter;
}
/**
* A minimal {@link FilterConfig} implementation backed by a {@link Map}.
*/
private static final class MapBasedFilterConfig implements FilterConfig {
private final String filterName;
private final Map<String, String> parameters;
/**
* Creates a new MapBasedFilterConfig.
*
* @param filterName filter name
* @param parameters mapping of filter initialization parameters
*/
public MapBasedFilterConfig(String filterName,
Map<String, String> parameters) {
this.filterName = filterName;
this.parameters = parameters;
}
@Override
public String getFilterName() {
return this.filterName;
}
@Override
public String getInitParameter(String name) {
return this.parameters.get(name);
}
@Override
public Enumeration<String> getInitParameterNames() {
throw this.notImplemented();
}
@Override
public ServletContext getServletContext() {
throw this.notImplemented();
}
/**
* Creates an exception indicating that an interface method is not
* implemented. These should never be seen in practice, because it is only
* used for methods that are not called by {@link RestCsrfPreventionFilter}.
*
* @return exception indicating method not implemented
*/
private UnsupportedOperationException notImplemented() {
return new UnsupportedOperationException(this.getClass().getSimpleName()
+ " does not implement this method.");
}
}
}

View File

@ -23,6 +23,7 @@
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.server.datanode.web.dtp.DtpHttp2Handler; import org.apache.hadoop.hdfs.server.datanode.web.dtp.DtpHttp2Handler;
import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil; import io.netty.buffer.ByteBufUtil;
@ -51,19 +52,32 @@ public class PortUnificationServerHandler extends ByteToMessageDecoder {
private final Configuration confForCreate; private final Configuration confForCreate;
private final RestCsrfPreventionFilter restCsrfPreventionFilter;
public PortUnificationServerHandler(InetSocketAddress proxyHost, public PortUnificationServerHandler(InetSocketAddress proxyHost,
Configuration conf, Configuration confForCreate) { Configuration conf, Configuration confForCreate,
RestCsrfPreventionFilter restCsrfPreventionFilter) {
this.proxyHost = proxyHost; this.proxyHost = proxyHost;
this.conf = conf; this.conf = conf;
this.confForCreate = confForCreate; this.confForCreate = confForCreate;
this.restCsrfPreventionFilter = restCsrfPreventionFilter;
} }
private void configureHttp1(ChannelHandlerContext ctx) { private void configureHttp1(ChannelHandlerContext ctx) {
ctx.pipeline().addLast(new HttpServerCodec(), new ChunkedWriteHandler(), ctx.pipeline().addLast(new HttpServerCodec());
if (this.restCsrfPreventionFilter != null) {
ctx.pipeline().addLast(new RestCsrfPreventionFilterHandler(
this.restCsrfPreventionFilter));
}
ctx.pipeline().addLast(new ChunkedWriteHandler(),
new URLDispatcher(proxyHost, conf, confForCreate)); new URLDispatcher(proxyHost, conf, confForCreate));
} }
private void configureHttp2(ChannelHandlerContext ctx) { private void configureHttp2(ChannelHandlerContext ctx) {
if (this.restCsrfPreventionFilter != null) {
ctx.pipeline().addLast(new RestCsrfPreventionFilterHandler(
this.restCsrfPreventionFilter));
}
ctx.pipeline().addLast(new DtpHttp2Handler()); ctx.pipeline().addLast(new DtpHttp2Handler());
} }

View File

@ -0,0 +1,137 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode.web;
import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION;
import static io.netty.handler.codec.http.HttpHeaderValues.CLOSE;
import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR;
import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.codec.http.DefaultHttpResponse;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.util.ReferenceCountUtil;
import org.apache.commons.logging.Log;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
import org.apache.hadoop.security.http.RestCsrfPreventionFilter.HttpInteraction;
/**
* Netty handler that integrates with the {@link RestCsrfPreventionFilter}. If
* the filter determines that the request is allowed, then this handler forwards
* the request to the next handler in the Netty pipeline. Otherwise, this
* handler drops the request and immediately sends an HTTP 400 response.
*/
@InterfaceAudience.Private
final class RestCsrfPreventionFilterHandler
extends SimpleChannelInboundHandler<HttpRequest> {
private static final Log LOG = DatanodeHttpServer.LOG;
private final RestCsrfPreventionFilter restCsrfPreventionFilter;
/**
* Creates a new RestCsrfPreventionFilterHandler. There will be a new
* instance created for each new Netty channel/pipeline serving a new request.
* To prevent the cost of repeated initialization of the filter, this
* constructor requires the caller to pass in a pre-built, fully initialized
* filter instance. The filter is stateless after initialization, so it can
* be shared across multiple Netty channels/pipelines.
*
* @param restCsrfPreventionFilter initialized filter
*/
public RestCsrfPreventionFilterHandler(
RestCsrfPreventionFilter restCsrfPreventionFilter) {
this.restCsrfPreventionFilter = restCsrfPreventionFilter;
}
@Override
protected void channelRead0(final ChannelHandlerContext ctx,
final HttpRequest req) throws Exception {
restCsrfPreventionFilter.handleHttpInteraction(new NettyHttpInteraction(
ctx, req));
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
LOG.error("Exception in " + this.getClass().getSimpleName(), cause);
sendResponseAndClose(ctx,
new DefaultHttpResponse(HTTP_1_1, INTERNAL_SERVER_ERROR));
}
/**
* Finish handling this pipeline by writing a response with the
* "Connection: close" header, flushing, and scheduling a close of the
* connection.
*
* @param ctx context to receive the response
* @param resp response to send
*/
private static void sendResponseAndClose(ChannelHandlerContext ctx,
DefaultHttpResponse resp) {
resp.headers().set(CONNECTION, CLOSE);
ctx.writeAndFlush(resp).addListener(ChannelFutureListener.CLOSE);
}
/**
* {@link HttpInteraction} implementation for use in a Netty pipeline.
*/
private static final class NettyHttpInteraction implements HttpInteraction {
private final ChannelHandlerContext ctx;
private final HttpRequest req;
/**
* Creates a new NettyHttpInteraction.
*
* @param ctx context to receive the response
* @param req request to process
*/
public NettyHttpInteraction(ChannelHandlerContext ctx, HttpRequest req) {
this.ctx = ctx;
this.req = req;
}
@Override
public String getHeader(String header) {
return req.headers().get(header);
}
@Override
public String getMethod() {
return req.method().name();
}
@Override
public void proceed() {
ReferenceCountUtil.retain(req);
ctx.fireChannelRead(req);
}
@Override
public void sendError(int code, String message) {
HttpResponseStatus status = new HttpResponseStatus(code, message);
sendResponseAndClose(ctx, new DefaultHttpResponse(HTTP_1_1, status));
}
}
}

View File

@ -17,6 +17,8 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_KEY;
import java.io.IOException; import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
@ -45,6 +47,7 @@
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
/** /**
* Encapsulates the HTTP server started by the NameNode. * Encapsulates the HTTP server started by the NameNode.
@ -90,6 +93,16 @@ private void initWebHdfs(Configuration conf) throws IOException {
HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
+ ")"); + ")");
// add REST CSRF prevention filter
if (conf.getBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY,
DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT)) {
Map<String, String> restCsrfParams = RestCsrfPreventionFilter
.getFilterParams(conf, "dfs.webhdfs.rest-csrf.");
String restCsrfClassName = RestCsrfPreventionFilter.class.getName();
HttpServer2.defineFilter(httpServer.getWebAppContext(), restCsrfClassName,
restCsrfClassName, restCsrfParams, new String[] {pathSpec});
}
// add webhdfs packages // add webhdfs packages
httpServer.addJerseyResourcePackage(NamenodeWebHdfsMethods.class httpServer.addJerseyResourcePackage(NamenodeWebHdfsMethods.class
.getPackage().getName() + ";" + Param.class.getPackage().getName(), .getPackage().getName() + ";" + Param.class.getPackage().getName(),
@ -297,4 +310,4 @@ static StartupProgress getStartupProgressFromContext(
public static HAServiceProtocol.HAServiceState getNameNodeStateFromContext(ServletContext context) { public static HAServiceProtocol.HAServiceState getNameNodeStateFromContext(ServletContext context) {
return getNameNodeFromContext(context).getServiceState(); return getNameNodeFromContext(context).getServiceState();
} }
} }

View File

@ -2762,4 +2762,56 @@
</description> </description>
</property> </property>
<property>
<name>dfs.webhdfs.rest-csrf.enabled</name>
<value>false</value>
<description>
If true, then enables WebHDFS protection against cross-site request forgery
(CSRF). The WebHDFS client also uses this property to determine whether or
not it needs to send the custom CSRF prevention header in its HTTP requests.
</description>
</property>
<property>
<name>dfs.webhdfs.rest-csrf.custom-header</name>
<value>X-XSRF-HEADER</value>
<description>
The name of a custom header that HTTP requests must send when protection
against cross-site request forgery (CSRF) is enabled for WebHDFS by setting
dfs.webhdfs.rest-csrf.enabled to true. The WebHDFS client also uses this
property to determine whether or not it needs to send the custom CSRF
prevention header in its HTTP requests.
</description>
</property>
<property>
<name>dfs.webhdfs.rest-csrf.methods-to-ignore</name>
<value>GET,OPTIONS,HEAD,TRACE</value>
<description>
A comma-separated list of HTTP methods that do not require HTTP requests to
include a custom header when protection against cross-site request forgery
(CSRF) is enabled for WebHDFS by setting dfs.webhdfs.rest-csrf.enabled to
true. The WebHDFS client also uses this property to determine whether or
not it needs to send the custom CSRF prevention header in its HTTP requests.
</description>
</property>
<property>
<name>dfs.webhdfs.rest-csrf.browser-useragents-regex</name>
<value>^Mozilla.*,^Opera.*</value>
<description>
A comma-separated list of regular expressions used to match against an HTTP
request's User-Agent header when protection against cross-site request
forgery (CSRF) is enabled for WebHDFS by setting
dfs.webhdfs.reset-csrf.enabled to true. If the incoming User-Agent matches
any of these regular expressions, then the request is considered to be sent
by a browser, and therefore CSRF prevention is enforced. If the request's
User-Agent does not match any of these regular expressions, then the request
is considered to be sent by something other than a browser, such as scripted
automation. In this case, CSRF is not a potential attack vector, so
the prevention is not enforced. This helps achieve backwards-compatibility
with existing automation that has not been updated to send the CSRF
prevention header.
</description>
</property>
</configuration> </configuration>

View File

@ -269,6 +269,7 @@ <h4 class="modal-title" id="delete-modal-title">Delete</h4>
</script><script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js"> </script><script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js">
</script><script type="text/javascript" src="/static/dfs-dust.js"> </script><script type="text/javascript" src="/static/dfs-dust.js">
</script><script type="text/javascript" src="/static/json-bignum.js"> </script><script type="text/javascript" src="/static/json-bignum.js">
</script><script type="text/javascript" src="/static/rest-csrf.js">
</script><script type="text/javascript" src="explorer.js"> </script><script type="text/javascript" src="explorer.js">
</script><script type="text/javascript" src="/static/moment.min.js"> </script><script type="text/javascript" src="/static/moment.min.js">
</script> </script>

View File

@ -0,0 +1,91 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
"use strict";
// Initializes client-side handling of cross-site request forgery (CSRF)
// protection by figuring out the custom HTTP headers that need to be sent in
// requests and which HTTP methods are ignored because they do not require CSRF
// protection.
(function() {
var restCsrfCustomHeader = null;
var restCsrfMethodsToIgnore = null;
$.ajax({'url': '/conf', 'dataType': 'xml', 'async': false}).done(
function(data) {
function getBooleanValue(element) {
return ($(element).find('value').text().trim().toLowerCase() === 'true')
}
function getTrimmedStringValue(element) {
return $(element).find('value').text().trim();
}
function getTrimmedStringArrayValue(element) {
var str = $(element).find('value').text().trim();
var array = [];
if (str) {
var splitStr = str.split(',');
for (var i = 0; i < splitStr.length; i++) {
array.push(splitStr[i].trim());
}
}
return array;
}
// Get all relevant configuration properties.
var $xml = $(data);
var csrfEnabled = false;
var header = null;
var methods = [];
$xml.find('property').each(function(idx, element) {
var name = $(element).find('name').text();
if (name === 'dfs.webhdfs.rest-csrf.enabled') {
csrfEnabled = getBooleanValue(element);
} else if (name === 'dfs.webhdfs.rest-csrf.custom-header') {
header = getTrimmedStringValue(element);
} else if (name === 'dfs.webhdfs.rest-csrf.methods-to-ignore') {
methods = getTrimmedStringArrayValue(element);
}
});
// If enabled, set up all subsequent AJAX calls with a pre-send callback
// that adds the custom headers if necessary.
if (csrfEnabled) {
restCsrfCustomHeader = header;
restCsrfMethodsToIgnore = {};
methods.map(function(method) { restCsrfMethodsToIgnore[method] = true; });
$.ajaxSetup({
beforeSend: addRestCsrfCustomHeader
});
}
});
// Adds custom headers to request if necessary. This is done only for WebHDFS
// URLs, and only if it's not an ignored method.
function addRestCsrfCustomHeader(xhr, settings) {
if (settings.url == null || !settings.url.startsWith('/webhdfs/')) {
return;
}
var method = settings.type;
if (restCsrfCustomHeader != null && !restCsrfMethodsToIgnore[method]) {
// The value of the header is unimportant. Only its presence matters.
xhr.setRequestHeader(restCsrfCustomHeader, '""');
}
}
})();

View File

@ -23,6 +23,7 @@ WebHDFS REST API
* [HDFS Configuration Options](#HDFS_Configuration_Options) * [HDFS Configuration Options](#HDFS_Configuration_Options)
* [Authentication](#Authentication) * [Authentication](#Authentication)
* [Proxy Users](#Proxy_Users) * [Proxy Users](#Proxy_Users)
* [Cross-Site Request Forgery Prevention](#Cross-Site_Request_Forgery_Prevention)
* [File and Directory Operations](#File_and_Directory_Operations) * [File and Directory Operations](#File_and_Directory_Operations)
* [Create and Write to a File](#Create_and_Write_to_a_File) * [Create and Write to a File](#Create_and_Write_to_a_File)
* [Append to a File](#Append_to_a_File) * [Append to a File](#Append_to_a_File)
@ -263,6 +264,41 @@ When the proxy user feature is enabled, a proxy user *P* may submit a request on
curl -i "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?delegation=<TOKEN>&op=..." curl -i "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?delegation=<TOKEN>&op=..."
Cross-Site Request Forgery Prevention
-------------------------------------
WebHDFS supports an optional, configurable mechanism for cross-site request
forgery (CSRF) prevention. When enabled, WebHDFS HTTP requests to the NameNode
or DataNode must include a custom HTTP header. Configuration properties allow
adjusting which specific HTTP methods are protected and the name of the HTTP
header. The value sent in the header is not relevant. Only the presence of a
header by that name is required.
Enabling CSRF prevention also sets up the `WebHdfsFileSystem` class to send the
required header. This ensures that CLI commands like
[`hdfs dfs`](./HDFSCommands.html#dfs) and
[`hadoop distcp`](../../hadoop-distcp/DistCp.html) continue to work correctly
when used with `webhdfs:` URIs.
Enabling CSRF prevention also sets up the NameNode web UI to send the required
header. After enabling CSRF prevention and restarting the NameNode, existing
users of the NameNode web UI need to refresh the browser to reload the page and
find the new configuration.
The following properties control CSRF prevention.
| Property | Description | Default Value |
|:---- |:---- |:----
| `dfs.webhdfs.rest-csrf.enabled` | If true, then enables WebHDFS protection against cross-site request forgery (CSRF). The WebHDFS client also uses this property to determine whether or not it needs to send the custom CSRF prevention header in its HTTP requests. | `false` |
| `dfs.webhdfs.rest-csrf.custom-header` | The name of a custom header that HTTP requests must send when protection against cross-site request forgery (CSRF) is enabled for WebHDFS by setting dfs.webhdfs.rest-csrf.enabled to true. The WebHDFS client also uses this property to determine whether or not it needs to send the custom CSRF prevention header in its HTTP requests. | `X-XSRF-HEADER` |
| `dfs.webhdfs.rest-csrf.methods-to-ignore` | A comma-separated list of HTTP methods that do not require HTTP requests to include a custom header when protection against cross-site request forgery (CSRF) is enabled for WebHDFS by setting dfs.webhdfs.rest-csrf.enabled to true. The WebHDFS client also uses this property to determine whether or not it needs to send the custom CSRF prevention header in its HTTP requests. | `GET,OPTIONS,HEAD,TRACE` |
| `dfs.webhdfs.rest-csrf.browser-useragents-regex` | A comma-separated list of regular expressions used to match against an HTTP request's User-Agent header when protection against cross-site request forgery (CSRF) is enabled for WebHDFS by setting dfs.webhdfs.reset-csrf.enabled to true. If the incoming User-Agent matches any of these regular expressions, then the request is considered to be sent by a browser, and therefore CSRF prevention is enforced. If the request's User-Agent does not match any of these regular expressions, then the request is considered to be sent by something other than a browser, such as scripted automation. In this case, CSRF is not a potential attack vector, so the prevention is not enforced. This helps achieve backwards-compatibility with existing automation that has not been updated to send the CSRF prevention header. | `^Mozilla.*,^Opera.*` |
The following is an example `curl` call that uses the `-H` option to include the
custom header in the request.
curl -i -L -X PUT -H 'X-XSRF-HEADER: ""' 'http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=CREATE'
File and Directory Operations File and Directory Operations
----------------------------- -----------------------------

View File

@ -0,0 +1,166 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.web;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_KEY;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
/**
* Tests use of the cross-site-request forgery (CSRF) prevention filter with
* WebHDFS. This is a parameterized test that covers various combinations of
* CSRF protection enabled or disabled at the NameNode, the DataNode and the
* WebHDFS client. If the server is configured with CSRF prevention, but the
* client is not, then protected operations are expected to fail.
*/
@RunWith(Parameterized.class)
public class TestWebHdfsWithRestCsrfPreventionFilter {
private static final Path FILE = new Path("/file");
private final boolean nnRestCsrf;
private final boolean dnRestCsrf;
private final boolean clientRestCsrf;
private MiniDFSCluster cluster;
private FileSystem fs, webhdfs;
@Rule
public ExpectedException exception = ExpectedException.none();
public TestWebHdfsWithRestCsrfPreventionFilter(boolean nnRestCsrf,
boolean dnRestCsrf, boolean clientRestCsrf) {
this.nnRestCsrf = nnRestCsrf;
this.dnRestCsrf = dnRestCsrf;
this.clientRestCsrf = clientRestCsrf;
}
@Parameters
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{ false, false, false },
{ true, true, true },
{ true, true, false },
{ true, false, true },
{ true, false, false },
{ false, true, true },
{ false, true, false },
{ false, false, true }});
}
@Before
public void before() throws Exception {
Configuration nnConf = new Configuration();
nnConf.setBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY, nnRestCsrf);
// Set configuration to treat anything as a browser, so that CSRF prevention
// checks actually get enforced.
nnConf.set(DFS_WEBHDFS_REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY, ".*");
cluster = new MiniDFSCluster.Builder(nnConf).numDataNodes(0).build();
Configuration dnConf = new Configuration(nnConf);
dnConf.setBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY, dnRestCsrf);
cluster.startDataNodes(dnConf, 1, true, null, null, null, null, false);
cluster.waitActive();
fs = cluster.getFileSystem();
Configuration clientConf = new Configuration();
clientConf.setBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY, clientRestCsrf);
InetSocketAddress addr = cluster.getNameNode().getHttpAddress();
webhdfs = FileSystem.get(URI.create("webhdfs://" +
NetUtils.getHostPortString(addr)), clientConf);
}
@After
public void after() {
IOUtils.closeStream(webhdfs);
IOUtils.closeStream(fs);
if (cluster != null) {
cluster.shutdown();
}
}
@Test
public void testCreate() throws Exception {
// create is a HTTP PUT that redirects from NameNode to DataNode, so we
// expect CSRF prevention on either server to block an unconfigured client.
if ((nnRestCsrf || dnRestCsrf) && !clientRestCsrf) {
expectException();
}
assertTrue(webhdfs.createNewFile(FILE));
}
@Test
public void testDelete() throws Exception {
DFSTestUtil.createFile(fs, FILE, 1024, (short)1, 0L);
// delete is an HTTP DELETE that executes solely within the NameNode as a
// metadata operation, so we expect CSRF prevention configured on the
// NameNode to block an unconfigured client.
if (nnRestCsrf && !clientRestCsrf) {
expectException();
}
assertTrue(webhdfs.delete(FILE, false));
}
@Test
public void testGetFileStatus() throws Exception {
// getFileStatus is an HTTP GET, not subject to CSRF prevention, so we
// expect it to succeed always, regardless of CSRF configuration.
assertNotNull(webhdfs.getFileStatus(new Path("/")));
}
@Test
public void testTruncate() throws Exception {
DFSTestUtil.createFile(fs, FILE, 1024, (short)1, 0L);
// truncate is an HTTP POST that executes solely within the NameNode as a
// metadata operation, so we expect CSRF prevention configured on the
// NameNode to block an unconfigured client.
if (nnRestCsrf && !clientRestCsrf) {
expectException();
}
assertTrue(webhdfs.truncate(FILE, 0L));
}
private void expectException() {
exception.expect(IOException.class);
exception.expectMessage("Missing Required Header");
}
}