HBASE-8819 Port HBASE-5428 to Thrift 2

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1501209 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Lars George 2013-07-09 12:05:34 +00:00
parent 8191fe5cc4
commit 60fcb122d5
2 changed files with 34 additions and 4 deletions

View File

@ -40,6 +40,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.thrift.CallQueue;
import org.apache.hadoop.hbase.thrift.CallQueue.Call;
import org.apache.hadoop.hbase.thrift.ThriftMetrics;
@ -129,7 +130,7 @@ public class ThriftServer {
}
/*
* If bindValue is null, we don't bind.
* If bindValue is null, we don't bind.
*/
private static InetSocketAddress bindToPort(String bindValue, int listenPort)
throws UnknownHostException {
@ -193,9 +194,28 @@ public class ThriftServer {
return new TThreadPoolServer(serverArgs);
}
/**
* Adds the option to pre-load filters at startup.
*
* @param conf The current configuration instance.
*/
protected static void registerFilters(Configuration conf) {
String[] filters = conf.getStrings("hbase.thrift.filters");
if(filters != null) {
for(String filterClass: filters) {
String[] filterPart = filterClass.split(":");
if(filterPart.length != 2) {
log.warn("Invalid filter specification " + filterClass + " - skipping");
} else {
ParseFilter.registerFilter(filterPart[0], filterPart[1]);
}
}
}
}
/**
* Start up the Thrift2 server.
*
*
* @param args
*/
public static void main(String[] args) throws Exception {
@ -237,6 +257,7 @@ public class ThriftServer {
conf.set("hbase.regionserver.thrift.server.type", implType);
conf.setInt("hbase.regionserver.thrift.port", listenPort);
registerFilters(conf);
// Construct correct ProtocolFactory
boolean compact = cmd.hasOption("compact");

View File

@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
import org.apache.hadoop.hbase.thrift.ThriftMetrics;
import org.apache.hadoop.hbase.thrift2.generated.TColumn;
@ -55,6 +56,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
import static java.nio.ByteBuffer.wrap;
@ -625,7 +627,6 @@ public class TestThriftHBaseServiceHandler {
int scanId = handler.openScanner(table, scan);
List<TResult> results = null;
for (int i = 0; i < 10; i++) {
System.out.println("batch: " + i);
// get batch for single row (10x10 is what we expect)
results = handler.getScannerRows(scanId, 1);
assertEquals(1, results.size());
@ -636,7 +637,6 @@ public class TestThriftHBaseServiceHandler {
for (int y = 0; y < 10; y++) {
int colNum = y + (10 * i);
String colNumPad = pad(colNum, (byte) 3);
System.out.println("col" + colNumPad + ": " + new String(cols.get(y).getQualifier()));
assertArrayEquals(("col" + colNumPad).getBytes(), cols.get(y).getQualifier());
}
}
@ -654,6 +654,15 @@ public class TestThriftHBaseServiceHandler {
}
}
@Test
public void testFilterRegistration() throws Exception {
Configuration conf = UTIL.getConfiguration();
conf.set("hbase.thrift.filters", "MyFilter:filterclass");
ThriftServer.registerFilters(conf);
Map<String, String> registeredFilters = ParseFilter.getAllFilters();
assertEquals("filterclass", registeredFilters.get("MyFilter"));
}
@Test
public void testMetrics() throws Exception {
Configuration conf = UTIL.getConfiguration();