HttpFS server should check that upload requests have correct content-type. (tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1221616 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2011-12-21 05:26:20 +00:00
parent 8fe3dd3fea
commit 6b4f40cbf9
5 changed files with 219 additions and 1 deletions

View File

@ -98,6 +98,8 @@ public class HttpFSFileSystem extends FileSystem {
public static final String SET_REPLICATION_JSON = "boolean"; public static final String SET_REPLICATION_JSON = "boolean";
public static final String UPLOAD_CONTENT_TYPE= "application/octet-stream";
public static enum FILE_TYPE { public static enum FILE_TYPE {
FILE, DIRECTORY, SYMLINK; FILE, DIRECTORY, SYMLINK;
@ -459,7 +461,7 @@ public class HttpFSFileSystem extends FileSystem {
String location = conn.getHeaderField("Location"); String location = conn.getHeaderField("Location");
if (location != null) { if (location != null) {
conn = getConnection(new URL(location), method); conn = getConnection(new URL(location), method);
conn.setRequestProperty("Content-Type", "application/octet-stream"); conn.setRequestProperty("Content-Type", UPLOAD_CONTENT_TYPE);
try { try {
OutputStream os = new BufferedOutputStream(conn.getOutputStream(), bufferSize); OutputStream os = new BufferedOutputStream(conn.getOutputStream(), bufferSize);
return new HttpFSDataOutputStream(conn, os, expectedStatus, statistics); return new HttpFSDataOutputStream(conn, os, expectedStatus, statistics);

View File

@ -0,0 +1,112 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.net.InetAddress;
import java.util.HashSet;
import java.util.Set;
/**
* Filter that Enforces the content-type to be application/octet-stream for
* POST and PUT requests.
*/
public class CheckUploadContentTypeFilter implements Filter {
private static final Set<String> UPLOAD_OPERATIONS = new HashSet<String>();
static {
UPLOAD_OPERATIONS.add(HttpFSFileSystem.PostOpValues.APPEND.toString());
UPLOAD_OPERATIONS.add(HttpFSFileSystem.PutOpValues.CREATE.toString());
}
/**
* Initializes the filter.
* <p/>
* This implementation is a NOP.
*
* @param config filter configuration.
*
* @throws ServletException thrown if the filter could not be initialized.
*/
@Override
public void init(FilterConfig config) throws ServletException {
}
/**
* Enforces the content-type to be application/octet-stream for
* POST and PUT requests.
*
* @param request servlet request.
* @param response servlet response.
* @param chain filter chain.
*
* @throws IOException thrown if an IO error occurrs.
* @throws ServletException thrown if a servet error occurrs.
*/
@Override
public void doFilter(ServletRequest request, ServletResponse response,
FilterChain chain)
throws IOException, ServletException {
boolean contentTypeOK = true;
HttpServletRequest httpReq = (HttpServletRequest) request;
HttpServletResponse httpRes = (HttpServletResponse) response;
String method = httpReq.getMethod();
if (method.equals("PUT") || method.equals("POST")) {
String op = httpReq.getParameter(HttpFSFileSystem.OP_PARAM);
if (op != null && UPLOAD_OPERATIONS.contains(op.toUpperCase())) {
if ("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParams.DataParam.NAME))) {
String contentType = httpReq.getContentType();
contentTypeOK =
HttpFSFileSystem.UPLOAD_CONTENT_TYPE.equalsIgnoreCase(contentType);
}
}
}
if (contentTypeOK) {
chain.doFilter(httpReq, httpRes);
}
else {
httpRes.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Data upload requests must have content-type set to '" +
HttpFSFileSystem.UPLOAD_CONTENT_TYPE + "'");
}
}
/**
* Destroys the filter.
* <p/>
* This implementation is a NOP.
*/
@Override
public void destroy() {
}
}

View File

@ -60,6 +60,11 @@
<filter-class>org.apache.hadoop.lib.servlet.HostnameFilter</filter-class> <filter-class>org.apache.hadoop.lib.servlet.HostnameFilter</filter-class>
</filter> </filter>
<filter>
<filter-name>checkUploadContentType</filter-name>
<filter-class>org.apache.hadoop.fs.http.server.CheckUploadContentTypeFilter</filter-class>
</filter>
<filter> <filter>
<filter-name>fsReleaseFilter</filter-name> <filter-name>fsReleaseFilter</filter-name>
<filter-class>org.apache.hadoop.fs.http.server.HttpFSReleaseFilter</filter-class> <filter-class>org.apache.hadoop.fs.http.server.HttpFSReleaseFilter</filter-class>
@ -80,6 +85,11 @@
<url-pattern>*</url-pattern> <url-pattern>*</url-pattern>
</filter-mapping> </filter-mapping>
<filter-mapping>
<filter-name>checkUploadContentType</filter-name>
<url-pattern>*</url-pattern>
</filter-mapping>
<filter-mapping> <filter-mapping>
<filter-name>fsReleaseFilter</filter-name> <filter-name>fsReleaseFilter</filter-name>
<url-pattern>*</url-pattern> <url-pattern>*</url-pattern>

View File

@ -0,0 +1,91 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import org.junit.Test;
import org.mockito.Mockito;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class TestCheckUploadContentTypeFilter {
@Test
public void putUpload() throws Exception {
test("PUT", HttpFSFileSystem.PutOpValues.CREATE.toString(), "application/octet-stream", true, false);
}
@Test
public void postUpload() throws Exception {
test("POST", HttpFSFileSystem.PostOpValues.APPEND.toString(), "APPLICATION/OCTET-STREAM", true, false);
}
@Test
public void putUploadWrong() throws Exception {
test("PUT", HttpFSFileSystem.PutOpValues.CREATE.toString(), "plain/text", false, false);
test("PUT", HttpFSFileSystem.PutOpValues.CREATE.toString(), "plain/text", true, true);
}
@Test
public void postUploadWrong() throws Exception {
test("POST", HttpFSFileSystem.PostOpValues.APPEND.toString(), "plain/text", false, false);
test("POST", HttpFSFileSystem.PostOpValues.APPEND.toString(), "plain/text", true, true);
}
@Test
public void getOther() throws Exception {
test("GET", HttpFSFileSystem.GetOpValues.GETHOMEDIR.toString(), "plain/text", false, false);
}
@Test
public void putOther() throws Exception {
test("PUT", HttpFSFileSystem.PutOpValues.MKDIRS.toString(), "plain/text", false, false);
}
private void test(String method, String operation, String contentType,
boolean upload, boolean error) throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.reset(request);
Mockito.when(request.getMethod()).thenReturn(method);
Mockito.when(request.getParameter(HttpFSFileSystem.OP_PARAM)).thenReturn(operation);
Mockito.when(request.getParameter(HttpFSParams.DataParam.NAME)).
thenReturn(Boolean.toString(upload));
Mockito.when(request.getContentType()).thenReturn(contentType);
FilterChain chain = Mockito.mock(FilterChain.class);
Filter filter = new CheckUploadContentTypeFilter();
filter.doFilter(request, response, chain);
if (error) {
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_BAD_REQUEST),
Mockito.contains("Data upload"));
}
else {
Mockito.verify(chain).doFilter(request, response);
}
}
}

View File

@ -185,6 +185,9 @@ Trunk (unreleased changes)
HDFS-2657. TestHttpFSServer and TestServerWebApp are failing on trunk. HDFS-2657. TestHttpFSServer and TestServerWebApp are failing on trunk.
(tucu) (tucu)
HttpFS server should check that upload requests have correct
content-type. (tucu)
Release 0.23.1 - UNRELEASED Release 0.23.1 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES