HBASE-9347 followup -- adding new files missed in original commit

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1523020 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
ndimiduk 2013-09-13 17:20:00 +00:00
parent 7891b666a4
commit 486d9e16a6
3 changed files with 132 additions and 0 deletions

View File

@ -1048,4 +1048,11 @@ possible configurations would overwhelm and obscure the important.
Servlet filters for REST service.
</description>
</property>
<property>
<name>hbase.rest.filter.classes</name>
<value>org.apache.hadoop.hbase.rest.filter.GzipFilter</value>
<description>
Servlet filters for REST service.
</description>
</property>
</configuration>

View File

@ -0,0 +1,64 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class DummyFilter implements Filter {
private Log LOG = LogFactory.getLog(getClass());
@Override
public void destroy() {
}
@Override
public void doFilter(ServletRequest paramServletRequest, ServletResponse paramServletResponse,
FilterChain paramFilterChain) throws IOException, ServletException {
if (paramServletRequest instanceof HttpServletRequest
&& paramServletResponse instanceof HttpServletResponse) {
HttpServletRequest request = (HttpServletRequest) paramServletRequest;
HttpServletResponse response = (HttpServletResponse) paramServletResponse;
String path = request.getRequestURI();
LOG.info(path);
if (path.indexOf("/status/cluster") >= 0) {
LOG.info("Blocking cluster status request");
response.sendError(HttpServletResponse.SC_NOT_FOUND, "Cluster status cannot be requested.");
} else {
paramFilterChain.doFilter(request, response);
}
}
}
@Override
public void init(FilterConfig filterChain) throws ServletException {
}
}

View File

@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import static org.junit.Assert.assertEquals;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster;
import org.apache.hadoop.hbase.rest.client.Response;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestResourceFilter {
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final HBaseRESTTestingUtility REST_TEST_UTIL =
new HBaseRESTTestingUtility();
private static Client client;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().set(Constants.FILTER_CLASSES, DummyFilter.class.getName());
TEST_UTIL.startMiniCluster();
REST_TEST_UTIL.startServletContainer(TEST_UTIL.getConfiguration());
client = new Client(new Cluster().add("localhost",
REST_TEST_UTIL.getServletPort()));
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
REST_TEST_UTIL.shutdownServletContainer();
TEST_UTIL.shutdownMiniCluster();
}
@Test
public void testFilter() throws Exception {
String path = "/status/cluster";
Response response = client.get(path);
assertEquals(404, response.getCode());
}
}