From 5d1889a66d91608d34ca9411fb6e9161e637e9d3 Mon Sep 17 00:00:00 2001 From: cnauroth Date: Thu, 18 Feb 2016 10:07:28 -0800 Subject: [PATCH] HDFS-9711. Integrate CSRF prevention filter in WebHDFS. Contributed by Chris Nauroth. --- .../http/RestCsrfPreventionFilter.java | 168 ++++++++++++++++-- .../hdfs/client/HdfsClientConfigKeys.java | 12 ++ .../hadoop/hdfs/web/WebHdfsFileSystem.java | 59 ++++++ hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 + .../datanode/web/DatanodeHttpServer.java | 114 +++++++++++- .../web/PortUnificationServerHandler.java | 18 +- .../web/RestCsrfPreventionFilterHandler.java | 137 ++++++++++++++ .../server/namenode/NameNodeHttpServer.java | 15 +- .../src/main/resources/hdfs-default.xml | 52 ++++++ .../src/main/webapps/hdfs/explorer.html | 1 + .../src/main/webapps/static/rest-csrf.js | 91 ++++++++++ .../hadoop-hdfs/src/site/markdown/WebHDFS.md | 36 ++++ ...stWebHdfsWithRestCsrfPreventionFilter.java | 166 +++++++++++++++++ 13 files changed, 851 insertions(+), 20 deletions(-) create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/rest-csrf.js create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsWithRestCsrfPreventionFilter.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java index 4f7f5bbdf29..c0f7e39abd5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java @@ -18,7 +18,9 @@ package org.apache.hadoop.security.http; import java.io.IOException; +import java.util.HashMap; import java.util.HashSet; +import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.Set; @@ -32,6 +34,13 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * This filter provides protection against cross site request forgery (CSRF) * attacks for REST APIs. Enabling this filter on an endpoint results in the @@ -39,7 +48,13 @@ * with every request. In the absense of this header the filter will reject the * attempt as a bad request. */ +@InterfaceAudience.Public +@InterfaceStability.Evolving public class RestCsrfPreventionFilter implements Filter { + + private static final Logger LOG = + LoggerFactory.getLogger(RestCsrfPreventionFilter.class); + public static final String HEADER_USER_AGENT = "User-Agent"; public static final String BROWSER_USER_AGENT_PARAM = "browser-useragents-regex"; @@ -72,6 +87,9 @@ public void init(FilterConfig filterConfig) throws ServletException { agents = BROWSER_USER_AGENTS_DEFAULT; } parseBrowserUserAgents(agents); + LOG.info("Adding cross-site request forgery (CSRF) protection, " + + "headerName = {}, methodsToIgnore = {}, browserUserAgents = {}", + headerName, methodsToIgnore, browserUserAgents); } void parseBrowserUserAgents(String userAgents) { @@ -118,22 +136,152 @@ protected boolean isBrowser(String userAgent) { return false; } - @Override - public void doFilter(ServletRequest request, ServletResponse response, - FilterChain chain) throws IOException, ServletException { - HttpServletRequest httpRequest = (HttpServletRequest)request; - if (!isBrowser(httpRequest.getHeader(HEADER_USER_AGENT)) || - methodsToIgnore.contains(httpRequest.getMethod()) || - httpRequest.getHeader(headerName) != null) { - chain.doFilter(request, response); + /** + * Defines the minimal API requirements for the filter to execute its + * filtering logic. This interface exists to facilitate integration in + * components that do not run within a servlet container and therefore cannot + * rely on a servlet container to dispatch to the {@link #doFilter} method. + * Applications that do run inside a servlet container will not need to write + * code that uses this interface. Instead, they can use typical servlet + * container configuration mechanisms to insert the filter. + */ + public interface HttpInteraction { + + /** + * Returns the value of a header. + * + * @param header name of header + * @return value of header + */ + String getHeader(String header); + + /** + * Returns the method. + * + * @return method + */ + String getMethod(); + + /** + * Called by the filter after it decides that the request may proceed. + * + * @throws IOException if there is an I/O error + * @throws ServletException if the implementation relies on the servlet API + * and a servlet API call has failed + */ + void proceed() throws IOException, ServletException; + + /** + * Called by the filter after it decides that the request is a potential + * CSRF attack and therefore must be rejected. + * + * @param code status code to send + * @param message response message + * @throws IOException if there is an I/O error + */ + void sendError(int code, String message) throws IOException; + } + + /** + * Handles an {@link HttpInteraction} by applying the filtering logic. + * + * @param httpInteraction caller's HTTP interaction + * @throws IOException if there is an I/O error + * @throws ServletException if the implementation relies on the servlet API + * and a servlet API call has failed + */ + public void handleHttpInteraction(HttpInteraction httpInteraction) + throws IOException, ServletException { + if (!isBrowser(httpInteraction.getHeader(HEADER_USER_AGENT)) || + methodsToIgnore.contains(httpInteraction.getMethod()) || + httpInteraction.getHeader(headerName) != null) { + httpInteraction.proceed(); } else { - ((HttpServletResponse)response).sendError( - HttpServletResponse.SC_BAD_REQUEST, + httpInteraction.sendError(HttpServletResponse.SC_BAD_REQUEST, "Missing Required Header for CSRF Vulnerability Protection"); } } + @Override + public void doFilter(ServletRequest request, ServletResponse response, + final FilterChain chain) throws IOException, ServletException { + final HttpServletRequest httpRequest = (HttpServletRequest)request; + final HttpServletResponse httpResponse = (HttpServletResponse)response; + handleHttpInteraction(new ServletFilterHttpInteraction(httpRequest, + httpResponse, chain)); + } + @Override public void destroy() { } + + /** + * Constructs a mapping of configuration properties to be used for filter + * initialization. The mapping includes all properties that start with the + * specified configuration prefix. Property names in the mapping are trimmed + * to remove the configuration prefix. + * + * @param conf configuration to read + * @param confPrefix configuration prefix + * @return mapping of configuration properties to be used for filter + * initialization + */ + public static Map getFilterParams(Configuration conf, + String confPrefix) { + Map filterConfigMap = new HashMap<>(); + for (Map.Entry entry : conf) { + String name = entry.getKey(); + if (name.startsWith(confPrefix)) { + String value = conf.get(name); + name = name.substring(confPrefix.length()); + filterConfigMap.put(name, value); + } + } + return filterConfigMap; + } + + /** + * {@link HttpInteraction} implementation for use in the servlet filter. + */ + private static final class ServletFilterHttpInteraction + implements HttpInteraction { + + private final FilterChain chain; + private final HttpServletRequest httpRequest; + private final HttpServletResponse httpResponse; + + /** + * Creates a new ServletFilterHttpInteraction. + * + * @param httpRequest request to process + * @param httpResponse response to process + * @param chain filter chain to forward to if HTTP interaction is allowed + */ + public ServletFilterHttpInteraction(HttpServletRequest httpRequest, + HttpServletResponse httpResponse, FilterChain chain) { + this.httpRequest = httpRequest; + this.httpResponse = httpResponse; + this.chain = chain; + } + + @Override + public String getHeader(String header) { + return httpRequest.getHeader(header); + } + + @Override + public String getMethod() { + return httpRequest.getMethod(); + } + + @Override + public void proceed() throws IOException, ServletException { + chain.doFilter(httpRequest, httpResponse); + } + + @Override + public void sendError(int code, String message) throws IOException { + httpResponse.sendError(code, message); + } + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java index 775907e692f..ce0f3e4cfa3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java @@ -41,6 +41,18 @@ public interface HdfsClientConfigKeys { String DFS_WEBHDFS_OAUTH_ENABLED_KEY = "dfs.webhdfs.oauth2.enabled"; boolean DFS_WEBHDFS_OAUTH_ENABLED_DEFAULT = false; + String DFS_WEBHDFS_REST_CSRF_ENABLED_KEY = "dfs.webhdfs.rest-csrf.enabled"; + boolean DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT = false; + String DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_KEY = + "dfs.webhdfs.rest-csrf.custom-header"; + String DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_DEFAULT = "X-XSRF-HEADER"; + String DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_KEY = + "dfs.webhdfs.rest-csrf.methods-to-ignore"; + String DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_DEFAULT = + "GET,OPTIONS,HEAD,TRACE"; + String DFS_WEBHDFS_REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY = + "dfs.webhdfs.rest-csrf.browser-useragents-regex"; + String OAUTH_CLIENT_ID_KEY = "dfs.webhdfs.oauth2.client.id"; String OAUTH_REFRESH_URL_KEY = "dfs.webhdfs.oauth2.refresh.url"; diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 82cf65516a0..3527c2709ea 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -18,6 +18,13 @@ package org.apache.hadoop.hdfs.web; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_DEFAULT; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_KEY; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_KEY; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_DEFAULT; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_KEY; + import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.EOFException; @@ -33,8 +40,10 @@ import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.EnumSet; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.StringTokenizer; import javax.ws.rs.core.HttpHeaders; @@ -129,6 +138,8 @@ public class WebHdfsFileSystem extends FileSystem private InetSocketAddress nnAddrs[]; private int currentNNAddrIndex; private boolean disallowFallbackToInsecureCluster; + private String restCsrfCustomHeader; + private Set restCsrfMethodsToIgnore; private static final ObjectReader READER = new ObjectMapper().reader(Map.class); @@ -227,9 +238,52 @@ public synchronized void initialize(URI uri, Configuration conf this.disallowFallbackToInsecureCluster = !conf.getBoolean( CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY, CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT); + this.initializeRestCsrf(conf); this.delegationToken = null; } + /** + * Initializes client-side handling of cross-site request forgery (CSRF) + * protection by figuring out the custom HTTP headers that need to be sent in + * requests and which HTTP methods are ignored because they do not require + * CSRF protection. + * + * @param conf configuration to read + */ + private void initializeRestCsrf(Configuration conf) { + if (conf.getBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY, + DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT)) { + this.restCsrfCustomHeader = conf.getTrimmed( + DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_KEY, + DFS_WEBHDFS_REST_CSRF_CUSTOM_HEADER_DEFAULT); + this.restCsrfMethodsToIgnore = new HashSet<>(); + this.restCsrfMethodsToIgnore.addAll(getTrimmedStringList(conf, + DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_KEY, + DFS_WEBHDFS_REST_CSRF_METHODS_TO_IGNORE_DEFAULT)); + } else { + this.restCsrfCustomHeader = null; + this.restCsrfMethodsToIgnore = null; + } + } + + /** + * Returns a list of strings from a comma-delimited configuration value. + * + * @param conf configuration to check + * @param name configuration property name + * @param defaultValue default value if no value found for name + * @return list of strings from comma-delimited configuration value, or an + * empty list if not found + */ + private static List getTrimmedStringList(Configuration conf, + String name, String defaultValue) { + String valueString = conf.get(name, defaultValue); + if (valueString == null) { + return new ArrayList<>(); + } + return new ArrayList<>(StringUtils.getTrimmedStringCollection(valueString)); + } + @Override public URI getCanonicalUri() { return super.getCanonicalUri(); @@ -601,6 +655,11 @@ private HttpURLConnection connect(final HttpOpParam.Op op, final URL url) final boolean doOutput = op.getDoOutput(); conn.setRequestMethod(op.getType().toString()); conn.setInstanceFollowRedirects(false); + if (restCsrfCustomHeader != null && + !restCsrfMethodsToIgnore.contains(op.getType().name())) { + // The value of the header is unimportant. Only its presence matters. + conn.setRequestProperty(restCsrfCustomHeader, "\"\""); + } switch (op.getType()) { // if not sending a message body for a POST or PUT operation, need // to ensure the server/proxy knows this diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 61a783d66be..2948a949b35 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -1041,6 +1041,8 @@ Release 2.8.0 - UNRELEASED HDFS-9244. Support nested encryption zones. (zhz) + HDFS-9711. Integrate CSRF prevention filter in WebHDFS. (cnauroth) + IMPROVEMENTS HDFS-9257. improve error message for "Absolute path required" in INode.java diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java index fc24fae2e6d..f9bdbf6bf09 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java @@ -17,6 +17,15 @@ */ package org.apache.hadoop.hdfs.server.datanode.web; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_KEY; + +import java.util.Enumeration; +import java.util.Map; +import javax.servlet.FilterConfig; +import javax.servlet.ServletContext; +import javax.servlet.ServletException; + import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.ChannelFactory; import io.netty.channel.ChannelFuture; @@ -46,6 +55,7 @@ import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.security.http.RestCsrfPreventionFilter; import org.apache.hadoop.security.ssl.SSLFactory; import java.io.Closeable; @@ -73,6 +83,7 @@ public class DatanodeHttpServer implements Closeable { private final ServerBootstrap httpsServer; private final Configuration conf; private final Configuration confForCreate; + private final RestCsrfPreventionFilter restCsrfPreventionFilter; private InetSocketAddress httpAddress; private InetSocketAddress httpsAddress; static final Log LOG = LogFactory.getLog(DatanodeHttpServer.class); @@ -81,6 +92,7 @@ public DatanodeHttpServer(final Configuration conf, final DataNode datanode, final ServerSocketChannel externalHttpChannel) throws IOException { + this.restCsrfPreventionFilter = createRestCsrfPreventionFilter(conf); this.conf = conf; Configuration confForInfoServer = new Configuration(conf); @@ -117,7 +129,7 @@ public DatanodeHttpServer(final Configuration conf, @Override protected void initChannel(SocketChannel ch) throws Exception { ch.pipeline().addLast(new PortUnificationServerHandler(jettyAddr, - conf, confForCreate)); + conf, confForCreate, restCsrfPreventionFilter)); } }); @@ -165,11 +177,16 @@ protected void doBind(SocketAddress localAddress) throws Exception {} protected void initChannel(SocketChannel ch) throws Exception { ChannelPipeline p = ch.pipeline(); p.addLast( - new SslHandler(sslFactory.createSSLEngine()), - new HttpRequestDecoder(), - new HttpResponseEncoder(), - new ChunkedWriteHandler(), - new URLDispatcher(jettyAddr, conf, confForCreate)); + new SslHandler(sslFactory.createSSLEngine()), + new HttpRequestDecoder(), + new HttpResponseEncoder()); + if (restCsrfPreventionFilter != null) { + p.addLast(new RestCsrfPreventionFilterHandler( + restCsrfPreventionFilter)); + } + p.addLast( + new ChunkedWriteHandler(), + new URLDispatcher(jettyAddr, conf, confForCreate)); } }); } else { @@ -252,4 +269,87 @@ private static String getHostnameForSpnegoPrincipal(Configuration conf) { InetSocketAddress inetSocker = NetUtils.createSocketAddr(addr); return inetSocker.getHostString(); } -} \ No newline at end of file + + /** + * Creates the {@link RestCsrfPreventionFilter} for the DataNode. Since the + * DataNode HTTP server is not implemented in terms of the servlet API, it + * takes some extra effort to obtain an instance of the filter. This method + * takes care of configuration and implementing just enough of the servlet API + * and related interfaces so that the DataNode can get a fully initialized + * instance of the filter. + * + * @param conf configuration to read + * @return initialized filter, or null if CSRF protection not enabled + */ + private static RestCsrfPreventionFilter createRestCsrfPreventionFilter( + Configuration conf) { + if (!conf.getBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY, + DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT)) { + return null; + } + String restCsrfClassName = RestCsrfPreventionFilter.class.getName(); + Map restCsrfParams = RestCsrfPreventionFilter + .getFilterParams(conf, "dfs.webhdfs.rest-csrf."); + RestCsrfPreventionFilter filter = new RestCsrfPreventionFilter(); + try { + filter.init(new MapBasedFilterConfig(restCsrfClassName, restCsrfParams)); + } catch (ServletException e) { + throw new IllegalStateException( + "Failed to initialize RestCsrfPreventionFilter.", e); + } + return filter; + } + + /** + * A minimal {@link FilterConfig} implementation backed by a {@link Map}. + */ + private static final class MapBasedFilterConfig implements FilterConfig { + + private final String filterName; + private final Map parameters; + + /** + * Creates a new MapBasedFilterConfig. + * + * @param filterName filter name + * @param parameters mapping of filter initialization parameters + */ + public MapBasedFilterConfig(String filterName, + Map parameters) { + this.filterName = filterName; + this.parameters = parameters; + } + + @Override + public String getFilterName() { + return this.filterName; + } + + @Override + public String getInitParameter(String name) { + return this.parameters.get(name); + } + + @Override + public Enumeration getInitParameterNames() { + throw this.notImplemented(); + } + + @Override + public ServletContext getServletContext() { + throw this.notImplemented(); + } + + /** + * Creates an exception indicating that an interface method is not + * implemented. These should never be seen in practice, because it is only + * used for methods that are not called by {@link RestCsrfPreventionFilter}. + * + * @return exception indicating method not implemented + */ + private UnsupportedOperationException notImplemented() { + return new UnsupportedOperationException(this.getClass().getSimpleName() + + " does not implement this method."); + } + } +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/PortUnificationServerHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/PortUnificationServerHandler.java index 7ebc070482d..ff10c6da28e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/PortUnificationServerHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/PortUnificationServerHandler.java @@ -23,6 +23,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.server.datanode.web.dtp.DtpHttp2Handler; +import org.apache.hadoop.security.http.RestCsrfPreventionFilter; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufUtil; @@ -51,19 +52,32 @@ public class PortUnificationServerHandler extends ByteToMessageDecoder { private final Configuration confForCreate; + private final RestCsrfPreventionFilter restCsrfPreventionFilter; + public PortUnificationServerHandler(InetSocketAddress proxyHost, - Configuration conf, Configuration confForCreate) { + Configuration conf, Configuration confForCreate, + RestCsrfPreventionFilter restCsrfPreventionFilter) { this.proxyHost = proxyHost; this.conf = conf; this.confForCreate = confForCreate; + this.restCsrfPreventionFilter = restCsrfPreventionFilter; } private void configureHttp1(ChannelHandlerContext ctx) { - ctx.pipeline().addLast(new HttpServerCodec(), new ChunkedWriteHandler(), + ctx.pipeline().addLast(new HttpServerCodec()); + if (this.restCsrfPreventionFilter != null) { + ctx.pipeline().addLast(new RestCsrfPreventionFilterHandler( + this.restCsrfPreventionFilter)); + } + ctx.pipeline().addLast(new ChunkedWriteHandler(), new URLDispatcher(proxyHost, conf, confForCreate)); } private void configureHttp2(ChannelHandlerContext ctx) { + if (this.restCsrfPreventionFilter != null) { + ctx.pipeline().addLast(new RestCsrfPreventionFilterHandler( + this.restCsrfPreventionFilter)); + } ctx.pipeline().addLast(new DtpHttp2Handler()); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java new file mode 100644 index 00000000000..f2f0533894d --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java @@ -0,0 +1,137 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.server.datanode.web; + +import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; +import static io.netty.handler.codec.http.HttpHeaderValues.CLOSE; +import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; +import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; + +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.handler.codec.http.DefaultHttpResponse; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.util.ReferenceCountUtil; + +import org.apache.commons.logging.Log; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.security.http.RestCsrfPreventionFilter; +import org.apache.hadoop.security.http.RestCsrfPreventionFilter.HttpInteraction; + +/** + * Netty handler that integrates with the {@link RestCsrfPreventionFilter}. If + * the filter determines that the request is allowed, then this handler forwards + * the request to the next handler in the Netty pipeline. Otherwise, this + * handler drops the request and immediately sends an HTTP 400 response. + */ +@InterfaceAudience.Private +final class RestCsrfPreventionFilterHandler + extends SimpleChannelInboundHandler { + + private static final Log LOG = DatanodeHttpServer.LOG; + + private final RestCsrfPreventionFilter restCsrfPreventionFilter; + + /** + * Creates a new RestCsrfPreventionFilterHandler. There will be a new + * instance created for each new Netty channel/pipeline serving a new request. + * To prevent the cost of repeated initialization of the filter, this + * constructor requires the caller to pass in a pre-built, fully initialized + * filter instance. The filter is stateless after initialization, so it can + * be shared across multiple Netty channels/pipelines. + * + * @param restCsrfPreventionFilter initialized filter + */ + public RestCsrfPreventionFilterHandler( + RestCsrfPreventionFilter restCsrfPreventionFilter) { + this.restCsrfPreventionFilter = restCsrfPreventionFilter; + } + + @Override + protected void channelRead0(final ChannelHandlerContext ctx, + final HttpRequest req) throws Exception { + restCsrfPreventionFilter.handleHttpInteraction(new NettyHttpInteraction( + ctx, req)); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { + LOG.error("Exception in " + this.getClass().getSimpleName(), cause); + sendResponseAndClose(ctx, + new DefaultHttpResponse(HTTP_1_1, INTERNAL_SERVER_ERROR)); + } + + /** + * Finish handling this pipeline by writing a response with the + * "Connection: close" header, flushing, and scheduling a close of the + * connection. + * + * @param ctx context to receive the response + * @param resp response to send + */ + private static void sendResponseAndClose(ChannelHandlerContext ctx, + DefaultHttpResponse resp) { + resp.headers().set(CONNECTION, CLOSE); + ctx.writeAndFlush(resp).addListener(ChannelFutureListener.CLOSE); + } + + /** + * {@link HttpInteraction} implementation for use in a Netty pipeline. + */ + private static final class NettyHttpInteraction implements HttpInteraction { + + private final ChannelHandlerContext ctx; + private final HttpRequest req; + + /** + * Creates a new NettyHttpInteraction. + * + * @param ctx context to receive the response + * @param req request to process + */ + public NettyHttpInteraction(ChannelHandlerContext ctx, HttpRequest req) { + this.ctx = ctx; + this.req = req; + } + + @Override + public String getHeader(String header) { + return req.headers().get(header); + } + + @Override + public String getMethod() { + return req.method().name(); + } + + @Override + public void proceed() { + ReferenceCountUtil.retain(req); + ctx.fireChannelRead(req); + } + + @Override + public void sendError(int code, String message) { + HttpResponseStatus status = new HttpResponseStatus(code, message); + sendResponseAndClose(ctx, new DefaultHttpResponse(HTTP_1_1, status)); + } + } +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java index 55cf00f8820..84229e7c374 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hdfs.server.namenode; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_KEY; import java.io.IOException; import java.net.InetSocketAddress; @@ -45,6 +47,7 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.http.RestCsrfPreventionFilter; /** * Encapsulates the HTTP server started by the NameNode. @@ -90,6 +93,16 @@ private void initWebHdfs(Configuration conf) throws IOException { HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className + ")"); + // add REST CSRF prevention filter + if (conf.getBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY, + DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT)) { + Map restCsrfParams = RestCsrfPreventionFilter + .getFilterParams(conf, "dfs.webhdfs.rest-csrf."); + String restCsrfClassName = RestCsrfPreventionFilter.class.getName(); + HttpServer2.defineFilter(httpServer.getWebAppContext(), restCsrfClassName, + restCsrfClassName, restCsrfParams, new String[] {pathSpec}); + } + // add webhdfs packages httpServer.addJerseyResourcePackage(NamenodeWebHdfsMethods.class .getPackage().getName() + ";" + Param.class.getPackage().getName(), @@ -297,4 +310,4 @@ static StartupProgress getStartupProgressFromContext( public static HAServiceProtocol.HAServiceState getNameNodeStateFromContext(ServletContext context) { return getNameNodeFromContext(context).getServiceState(); } -} \ No newline at end of file +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml index 4889bc32318..0f5130a657c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml @@ -2762,4 +2762,56 @@ + + dfs.webhdfs.rest-csrf.enabled + false + + If true, then enables WebHDFS protection against cross-site request forgery + (CSRF). The WebHDFS client also uses this property to determine whether or + not it needs to send the custom CSRF prevention header in its HTTP requests. + + + + + dfs.webhdfs.rest-csrf.custom-header + X-XSRF-HEADER + + The name of a custom header that HTTP requests must send when protection + against cross-site request forgery (CSRF) is enabled for WebHDFS by setting + dfs.webhdfs.rest-csrf.enabled to true. The WebHDFS client also uses this + property to determine whether or not it needs to send the custom CSRF + prevention header in its HTTP requests. + + + + + dfs.webhdfs.rest-csrf.methods-to-ignore + GET,OPTIONS,HEAD,TRACE + + A comma-separated list of HTTP methods that do not require HTTP requests to + include a custom header when protection against cross-site request forgery + (CSRF) is enabled for WebHDFS by setting dfs.webhdfs.rest-csrf.enabled to + true. The WebHDFS client also uses this property to determine whether or + not it needs to send the custom CSRF prevention header in its HTTP requests. + + + + + dfs.webhdfs.rest-csrf.browser-useragents-regex + ^Mozilla.*,^Opera.* + + A comma-separated list of regular expressions used to match against an HTTP + request's User-Agent header when protection against cross-site request + forgery (CSRF) is enabled for WebHDFS by setting + dfs.webhdfs.reset-csrf.enabled to true. If the incoming User-Agent matches + any of these regular expressions, then the request is considered to be sent + by a browser, and therefore CSRF prevention is enforced. If the request's + User-Agent does not match any of these regular expressions, then the request + is considered to be sent by something other than a browser, such as scripted + automation. In this case, CSRF is not a potential attack vector, so + the prevention is not enforced. This helps achieve backwards-compatibility + with existing automation that has not been updated to send the CSRF + prevention header. + + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html index b68cac2e9f1..e66e1e7ec20 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html @@ -269,6 +269,7 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/rest-csrf.js b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/rest-csrf.js new file mode 100644 index 00000000000..973a8e815fd --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/rest-csrf.js @@ -0,0 +1,91 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +"use strict"; + +// Initializes client-side handling of cross-site request forgery (CSRF) +// protection by figuring out the custom HTTP headers that need to be sent in +// requests and which HTTP methods are ignored because they do not require CSRF +// protection. +(function() { + var restCsrfCustomHeader = null; + var restCsrfMethodsToIgnore = null; + + $.ajax({'url': '/conf', 'dataType': 'xml', 'async': false}).done( + function(data) { + function getBooleanValue(element) { + return ($(element).find('value').text().trim().toLowerCase() === 'true') + } + + function getTrimmedStringValue(element) { + return $(element).find('value').text().trim(); + } + + function getTrimmedStringArrayValue(element) { + var str = $(element).find('value').text().trim(); + var array = []; + if (str) { + var splitStr = str.split(','); + for (var i = 0; i < splitStr.length; i++) { + array.push(splitStr[i].trim()); + } + } + return array; + } + + // Get all relevant configuration properties. + var $xml = $(data); + var csrfEnabled = false; + var header = null; + var methods = []; + $xml.find('property').each(function(idx, element) { + var name = $(element).find('name').text(); + if (name === 'dfs.webhdfs.rest-csrf.enabled') { + csrfEnabled = getBooleanValue(element); + } else if (name === 'dfs.webhdfs.rest-csrf.custom-header') { + header = getTrimmedStringValue(element); + } else if (name === 'dfs.webhdfs.rest-csrf.methods-to-ignore') { + methods = getTrimmedStringArrayValue(element); + } + }); + + // If enabled, set up all subsequent AJAX calls with a pre-send callback + // that adds the custom headers if necessary. + if (csrfEnabled) { + restCsrfCustomHeader = header; + restCsrfMethodsToIgnore = {}; + methods.map(function(method) { restCsrfMethodsToIgnore[method] = true; }); + $.ajaxSetup({ + beforeSend: addRestCsrfCustomHeader + }); + } + }); + + // Adds custom headers to request if necessary. This is done only for WebHDFS + // URLs, and only if it's not an ignored method. + function addRestCsrfCustomHeader(xhr, settings) { + if (settings.url == null || !settings.url.startsWith('/webhdfs/')) { + return; + } + var method = settings.type; + if (restCsrfCustomHeader != null && !restCsrfMethodsToIgnore[method]) { + // The value of the header is unimportant. Only its presence matters. + xhr.setRequestHeader(restCsrfCustomHeader, '""'); + } + } +})(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md index 2b7a493fe5d..473ad27c81b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md +++ b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md @@ -23,6 +23,7 @@ WebHDFS REST API * [HDFS Configuration Options](#HDFS_Configuration_Options) * [Authentication](#Authentication) * [Proxy Users](#Proxy_Users) + * [Cross-Site Request Forgery Prevention](#Cross-Site_Request_Forgery_Prevention) * [File and Directory Operations](#File_and_Directory_Operations) * [Create and Write to a File](#Create_and_Write_to_a_File) * [Append to a File](#Append_to_a_File) @@ -263,6 +264,41 @@ When the proxy user feature is enabled, a proxy user *P* may submit a request on curl -i "http://:/webhdfs/v1/?delegation=&op=..." +Cross-Site Request Forgery Prevention +------------------------------------- + +WebHDFS supports an optional, configurable mechanism for cross-site request +forgery (CSRF) prevention. When enabled, WebHDFS HTTP requests to the NameNode +or DataNode must include a custom HTTP header. Configuration properties allow +adjusting which specific HTTP methods are protected and the name of the HTTP +header. The value sent in the header is not relevant. Only the presence of a +header by that name is required. + +Enabling CSRF prevention also sets up the `WebHdfsFileSystem` class to send the +required header. This ensures that CLI commands like +[`hdfs dfs`](./HDFSCommands.html#dfs) and +[`hadoop distcp`](../../hadoop-distcp/DistCp.html) continue to work correctly +when used with `webhdfs:` URIs. + +Enabling CSRF prevention also sets up the NameNode web UI to send the required +header. After enabling CSRF prevention and restarting the NameNode, existing +users of the NameNode web UI need to refresh the browser to reload the page and +find the new configuration. + +The following properties control CSRF prevention. + +| Property | Description | Default Value | +|:---- |:---- |:---- +| `dfs.webhdfs.rest-csrf.enabled` | If true, then enables WebHDFS protection against cross-site request forgery (CSRF). The WebHDFS client also uses this property to determine whether or not it needs to send the custom CSRF prevention header in its HTTP requests. | `false` | +| `dfs.webhdfs.rest-csrf.custom-header` | The name of a custom header that HTTP requests must send when protection against cross-site request forgery (CSRF) is enabled for WebHDFS by setting dfs.webhdfs.rest-csrf.enabled to true. The WebHDFS client also uses this property to determine whether or not it needs to send the custom CSRF prevention header in its HTTP requests. | `X-XSRF-HEADER` | +| `dfs.webhdfs.rest-csrf.methods-to-ignore` | A comma-separated list of HTTP methods that do not require HTTP requests to include a custom header when protection against cross-site request forgery (CSRF) is enabled for WebHDFS by setting dfs.webhdfs.rest-csrf.enabled to true. The WebHDFS client also uses this property to determine whether or not it needs to send the custom CSRF prevention header in its HTTP requests. | `GET,OPTIONS,HEAD,TRACE` | +| `dfs.webhdfs.rest-csrf.browser-useragents-regex` | A comma-separated list of regular expressions used to match against an HTTP request's User-Agent header when protection against cross-site request forgery (CSRF) is enabled for WebHDFS by setting dfs.webhdfs.reset-csrf.enabled to true. If the incoming User-Agent matches any of these regular expressions, then the request is considered to be sent by a browser, and therefore CSRF prevention is enforced. If the request's User-Agent does not match any of these regular expressions, then the request is considered to be sent by something other than a browser, such as scripted automation. In this case, CSRF is not a potential attack vector, so the prevention is not enforced. This helps achieve backwards-compatibility with existing automation that has not been updated to send the CSRF prevention header. | `^Mozilla.*,^Opera.*` | + +The following is an example `curl` call that uses the `-H` option to include the +custom header in the request. + + curl -i -L -X PUT -H 'X-XSRF-HEADER: ""' 'http://:/webhdfs/v1/?op=CREATE' + File and Directory Operations ----------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsWithRestCsrfPreventionFilter.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsWithRestCsrfPreventionFilter.java new file mode 100644 index 00000000000..d5f4a055202 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsWithRestCsrfPreventionFilter.java @@ -0,0 +1,166 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.web; + +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY; +import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_KEY; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.URI; +import java.util.Arrays; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.DFSTestUtil; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.net.NetUtils; + +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +/** + * Tests use of the cross-site-request forgery (CSRF) prevention filter with + * WebHDFS. This is a parameterized test that covers various combinations of + * CSRF protection enabled or disabled at the NameNode, the DataNode and the + * WebHDFS client. If the server is configured with CSRF prevention, but the + * client is not, then protected operations are expected to fail. + */ +@RunWith(Parameterized.class) +public class TestWebHdfsWithRestCsrfPreventionFilter { + + private static final Path FILE = new Path("/file"); + + private final boolean nnRestCsrf; + private final boolean dnRestCsrf; + private final boolean clientRestCsrf; + + private MiniDFSCluster cluster; + private FileSystem fs, webhdfs; + + @Rule + public ExpectedException exception = ExpectedException.none(); + + public TestWebHdfsWithRestCsrfPreventionFilter(boolean nnRestCsrf, + boolean dnRestCsrf, boolean clientRestCsrf) { + this.nnRestCsrf = nnRestCsrf; + this.dnRestCsrf = dnRestCsrf; + this.clientRestCsrf = clientRestCsrf; + } + + @Parameters + public static Iterable data() { + return Arrays.asList(new Object[][] { + { false, false, false }, + { true, true, true }, + { true, true, false }, + { true, false, true }, + { true, false, false }, + { false, true, true }, + { false, true, false }, + { false, false, true }}); + } + + @Before + public void before() throws Exception { + Configuration nnConf = new Configuration(); + nnConf.setBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY, nnRestCsrf); + // Set configuration to treat anything as a browser, so that CSRF prevention + // checks actually get enforced. + nnConf.set(DFS_WEBHDFS_REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY, ".*"); + cluster = new MiniDFSCluster.Builder(nnConf).numDataNodes(0).build(); + + Configuration dnConf = new Configuration(nnConf); + dnConf.setBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY, dnRestCsrf); + cluster.startDataNodes(dnConf, 1, true, null, null, null, null, false); + + cluster.waitActive(); + fs = cluster.getFileSystem(); + + Configuration clientConf = new Configuration(); + clientConf.setBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY, clientRestCsrf); + InetSocketAddress addr = cluster.getNameNode().getHttpAddress(); + webhdfs = FileSystem.get(URI.create("webhdfs://" + + NetUtils.getHostPortString(addr)), clientConf); + } + + @After + public void after() { + IOUtils.closeStream(webhdfs); + IOUtils.closeStream(fs); + if (cluster != null) { + cluster.shutdown(); + } + } + + @Test + public void testCreate() throws Exception { + // create is a HTTP PUT that redirects from NameNode to DataNode, so we + // expect CSRF prevention on either server to block an unconfigured client. + if ((nnRestCsrf || dnRestCsrf) && !clientRestCsrf) { + expectException(); + } + assertTrue(webhdfs.createNewFile(FILE)); + } + + @Test + public void testDelete() throws Exception { + DFSTestUtil.createFile(fs, FILE, 1024, (short)1, 0L); + // delete is an HTTP DELETE that executes solely within the NameNode as a + // metadata operation, so we expect CSRF prevention configured on the + // NameNode to block an unconfigured client. + if (nnRestCsrf && !clientRestCsrf) { + expectException(); + } + assertTrue(webhdfs.delete(FILE, false)); + } + + @Test + public void testGetFileStatus() throws Exception { + // getFileStatus is an HTTP GET, not subject to CSRF prevention, so we + // expect it to succeed always, regardless of CSRF configuration. + assertNotNull(webhdfs.getFileStatus(new Path("/"))); + } + + @Test + public void testTruncate() throws Exception { + DFSTestUtil.createFile(fs, FILE, 1024, (short)1, 0L); + // truncate is an HTTP POST that executes solely within the NameNode as a + // metadata operation, so we expect CSRF prevention configured on the + // NameNode to block an unconfigured client. + if (nnRestCsrf && !clientRestCsrf) { + expectException(); + } + assertTrue(webhdfs.truncate(FILE, 0L)); + } + + private void expectException() { + exception.expect(IOException.class); + exception.expectMessage("Missing Required Header"); + } +}