HBASE-9347 Support for enabling servlet filters for REST service (Vandana Ayyalasomayajula)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1522586 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
ndimiduk 2013-09-12 14:14:02 +00:00
parent b685cf69da
commit 37e9a86396
5 changed files with 36 additions and 6 deletions

View File

@ -1041,4 +1041,11 @@ possible configurations would overwhelm and obscure the important.
Possible values are 'simple' (no authentication), and 'kerberos'. Possible values are 'simple' (no authentication), and 'kerberos'.
</description> </description>
</property> </property>
<property>
<name>hbase.rest.filter.classes</name>
<value>org.apache.hadoop.hbase.rest.filter.GzipFilter</value>
<description>
Servlet filters for REST service.
</description>
</property>
</configuration> </configuration>

View File

@ -57,4 +57,5 @@ public interface Constants {
static final String REST_DNS_NAMESERVER = "hbase.rest.dns.nameserver"; static final String REST_DNS_NAMESERVER = "hbase.rest.dns.nameserver";
static final String REST_DNS_INTERFACE = "hbase.rest.dns.interface"; static final String REST_DNS_INTERFACE = "hbase.rest.dns.interface";
public static final String FILTER_CLASSES = "hbase.rest.filter.classes";
} }

View File

@ -28,6 +28,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -226,7 +227,13 @@ public class RESTServer implements Constants {
context.addFilter(authFilter, "/*", 1); context.addFilter(authFilter, "/*", 1);
} }
context.addFilter(GzipFilter.class, "/*", 0); // Load filters from configuration.
String[] filterClasses = servlet.getConfiguration().getStrings(FILTER_CLASSES,
ArrayUtils.EMPTY_STRING_ARRAY);
for (String filter : filterClasses) {
filter = filter.trim();
context.addFilter(Class.forName(filter), "/*", 0);
}
// Put up info server. // Put up info server.
int port = conf.getInt("hbase.rest.info.port", 8085); int port = conf.getInt("hbase.rest.info.port", 8085);

View File

@ -18,6 +18,7 @@
*/ */
package org.apache.hadoop.hbase.rest; package org.apache.hadoop.hbase.rest;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -67,7 +68,14 @@ public class HBaseRESTTestingUtility {
// set up context // set up context
Context context = new Context(server, "/", Context.SESSIONS); Context context = new Context(server, "/", Context.SESSIONS);
context.addServlet(sh, "/*"); context.addServlet(sh, "/*");
context.addFilter(GzipFilter.class, "/*", 0); // Load filters specified from configuration.
String[] filterClasses = conf.getStrings(Constants.FILTER_CLASSES,
ArrayUtils.EMPTY_STRING_ARRAY);
for (String filter : filterClasses) {
filter = filter.trim();
context.addFilter(Class.forName(filter), "/*", 0);
}
LOG.info("Loaded filter classes :" + filterClasses);
// start the server // start the server
server.start(); server.start();
// get the port // get the port

View File

@ -19,13 +19,21 @@
package org.apache.hadoop.hbase.rest; package org.apache.hadoop.hbase.rest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.util.zip.GZIPInputStream; import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream; import java.util.zip.GZIPOutputStream;
import org.apache.commons.httpclient.Header; import org.apache.commons.httpclient.Header;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
@ -33,10 +41,8 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster; import org.apache.hadoop.hbase.rest.client.Cluster;
import org.apache.hadoop.hbase.rest.client.Response; import org.apache.hadoop.hbase.rest.client.Response;
import org.apache.hadoop.hbase.rest.filter.GzipFilter;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import static org.junit.Assert.*;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
@ -115,6 +121,7 @@ public class TestGzipFilter {
is.read(value, 0, VALUE_1.length); is.read(value, 0, VALUE_1.length);
assertTrue(Bytes.equals(value, VALUE_1)); assertTrue(Bytes.equals(value, VALUE_1));
is.close(); is.close();
table.close();
testScannerResultCodes(); testScannerResultCodes();
} }