HADOOP-2347 REST servlet not thread safe but run in a threaded manner

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@601232 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2007-12-05 07:27:59 +00:00
parent 7f52d2e951
commit afeed7bfc6
6 changed files with 37 additions and 44 deletions

View File

@ -56,6 +56,8 @@ Trunk (unreleased changes)
(Bryan Duxbury via Stack) (Bryan Duxbury via Stack)
HADOOP-2332 Meta table data selection in Hbase Shell HADOOP-2332 Meta table data selection in Hbase Shell
(Edward Yoon via Stack) (Edward Yoon via Stack)
HADOOP-2347 REST servlet not thread safe but run in a threaded manner
(Bryan Duxbury via Stack)
IMPROVEMENTS IMPROVEMENTS
HADOOP-2401 Add convenience put method that takes writable HADOOP-2401 Add convenience put method that takes writable

View File

@ -61,9 +61,6 @@ import org.apache.hadoop.hbase.util.InfoServer;
*/ */
public class Dispatcher extends javax.servlet.http.HttpServlet public class Dispatcher extends javax.servlet.http.HttpServlet
implements javax.servlet.Servlet { implements javax.servlet.Servlet {
private static final long serialVersionUID = 1045003206345359301L;
private MetaHandler metaHandler; private MetaHandler metaHandler;
private TableHandler tableHandler; private TableHandler tableHandler;
private ScannerHandler scannerHandler; private ScannerHandler scannerHandler;

View File

@ -44,12 +44,8 @@ import org.znerd.xmlenc.XMLOutputter;
* REST handler types take advantage of. * REST handler types take advantage of.
*/ */
public abstract class GenericHandler { public abstract class GenericHandler {
protected static final long serialVersionUID = 6939910503474376143L;
protected HBaseConfiguration conf; protected HBaseConfiguration conf;
protected HBaseAdmin admin; protected HBaseAdmin admin;
protected HTable table = null;
protected static final String ACCEPT = "accept"; protected static final String ACCEPT = "accept";
protected static final String COLUMN = "column"; protected static final String COLUMN = "column";
@ -255,16 +251,10 @@ public abstract class GenericHandler {
} }
} }
protected void focusTable(final String tableName) throws IOException { /*
// Do we have an HTable instance to suit? TODO, keep a pool of * Get an HTable instance by it's table name.
// instances of HTable. For now, allocate a new one each time table */
// focus changes. protected HTable getTable(final String tableName) throws IOException {
if (this.table == null || return new HTable(this.conf, new Text(tableName));
!this.table.getTableName().toString().equals(tableName)) {
if (this.table != null) {
this.table.close();
}
this.table = new HTable(this.conf, new Text(tableName));
}
} }
} }

View File

@ -77,6 +77,7 @@ public class MetaHandler extends GenericHandler {
final HttpServletResponse response) final HttpServletResponse response)
throws IOException { throws IOException {
HTableDescriptor [] tables = this.admin.listTables(); HTableDescriptor [] tables = this.admin.listTables();
switch (ContentType.getContentType(request.getHeader(ACCEPT))) { switch (ContentType.getContentType(request.getHeader(ACCEPT))) {
case XML: case XML:
setResponseHeader(response, tables.length > 0? 200: 204, setResponseHeader(response, tables.length > 0? 200: 204,

View File

@ -31,6 +31,7 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.hbase.HBaseAdmin; import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HTable;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
@ -248,8 +249,8 @@ public class ScannerHandler extends GenericHandler {
private void openScanner(final HttpServletRequest request, private void openScanner(final HttpServletRequest request,
final HttpServletResponse response, final String [] pathSegments) final HttpServletResponse response, final String [] pathSegments)
throws IOException, ServletException { throws IOException, ServletException {
// focus on the table // get the table
focusTable(getTableName(pathSegments)); HTable table = getTable(getTableName(pathSegments));
// get the list of columns we're supposed to interact with // get the list of columns we're supposed to interact with
String[] raw_columns = request.getParameterValues(COLUMN); String[] raw_columns = request.getParameterValues(COLUMN);
@ -284,8 +285,8 @@ public class ScannerHandler extends GenericHandler {
HConstants.UTF8_ENCODING)); HConstants.UTF8_ENCODING));
HScannerInterface scanner = (request.getParameter(END_ROW) == null)? HScannerInterface scanner = (request.getParameter(END_ROW) == null)?
this.table.obtainScanner(columns, startRow): table.obtainScanner(columns, startRow):
this.table.obtainScanner(columns, startRow, endRow); table.obtainScanner(columns, startRow, endRow);
// Make a scanner id by hashing the object toString value (object name + // Make a scanner id by hashing the object toString value (object name +
// an id). Will make identifier less burdensome and more url friendly. // an id). Will make identifier less burdensome and more url friendly.

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HTable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.mortbay.servlet.MultiPartResponse; import org.mortbay.servlet.MultiPartResponse;
import org.w3c.dom.Document; import org.w3c.dom.Document;
@ -67,14 +68,14 @@ public class TableHandler extends GenericHandler {
getTableMetadata(request, response, pathSegments[0]); getTableMetadata(request, response, pathSegments[0]);
} }
else{ else{
focusTable(pathSegments[0]); HTable table = getTable(pathSegments[0]);
if (pathSegments[1].toLowerCase().equals(REGIONS)) { if (pathSegments[1].toLowerCase().equals(REGIONS)) {
// get a region list // get a region list
getTableRegions(request, response); getTableRegions(table, request, response);
} }
else if (pathSegments[1].toLowerCase().equals(ROW)) { else if (pathSegments[1].toLowerCase().equals(ROW)) {
// get a row // get a row
getRow(request, response, pathSegments); getRow(table, request, response, pathSegments);
} }
else{ else{
doNotFound(response, "Not handled in TableHandler"); doNotFound(response, "Not handled in TableHandler");
@ -108,7 +109,7 @@ public class TableHandler extends GenericHandler {
* @throws IOException * @throws IOException
* Retrieve a row in one of several output formats. * Retrieve a row in one of several output formats.
*/ */
private void getRow(final HttpServletRequest request, private void getRow(HTable table, final HttpServletRequest request,
final HttpServletResponse response, final String [] pathSegments) final HttpServletResponse response, final String [] pathSegments)
throws IOException { throws IOException {
// pull the row key out of the path // pull the row key out of the path
@ -132,8 +133,8 @@ public class TableHandler extends GenericHandler {
// Presumption is that this.table has already been focused on target table. // Presumption is that this.table has already been focused on target table.
Map<Text, byte[]> result = timestampStr == null ? Map<Text, byte[]> result = timestampStr == null ?
this.table.getRow(new Text(row)) table.getRow(new Text(row))
: this.table.getRow(new Text(row), Long.parseLong(timestampStr)); : table.getRow(new Text(row), Long.parseLong(timestampStr));
if (result == null || result.size() == 0) { if (result == null || result.size() == 0) {
doNotFound(response, "Row not found!"); doNotFound(response, "Row not found!");
@ -151,7 +152,7 @@ public class TableHandler extends GenericHandler {
} }
} }
} else { } else {
Map<Text, byte[]> prefiltered_result = this.table.getRow(new Text(row)); Map<Text, byte[]> prefiltered_result = table.getRow(new Text(row));
if (prefiltered_result == null || prefiltered_result.size() == 0) { if (prefiltered_result == null || prefiltered_result.size() == 0) {
doNotFound(response, "Row not found!"); doNotFound(response, "Row not found!");
@ -243,13 +244,14 @@ public class TableHandler extends GenericHandler {
private void putRow(final HttpServletRequest request, private void putRow(final HttpServletRequest request,
final HttpServletResponse response, final String [] pathSegments) final HttpServletResponse response, final String [] pathSegments)
throws IOException, ServletException { throws IOException, ServletException {
focusTable(pathSegments[0]); HTable table = getTable(pathSegments[0]);
switch(ContentType.getContentType(request.getHeader(CONTENT_TYPE))) { switch(ContentType.getContentType(request.getHeader(CONTENT_TYPE))) {
case XML: case XML:
putRowXml(request, response, pathSegments); putRowXml(table, request, response, pathSegments);
break; break;
case MIME: case MIME:
doNotAcceptable(response); doNotAcceptable(response, "Don't support multipart/related yet...");
break; break;
default: default:
doNotAcceptable(response, "Unsupported Accept Header Content: " + doNotAcceptable(response, "Unsupported Accept Header Content: " +
@ -263,7 +265,7 @@ public class TableHandler extends GenericHandler {
* @param pathSegments * @param pathSegments
* Decode supplied XML and do a put to Hbase. * Decode supplied XML and do a put to Hbase.
*/ */
private void putRowXml(final HttpServletRequest request, private void putRowXml(HTable table, final HttpServletRequest request,
final HttpServletResponse response, final String [] pathSegments) final HttpServletResponse response, final String [] pathSegments)
throws IOException, ServletException{ throws IOException, ServletException{
@ -291,7 +293,7 @@ public class TableHandler extends GenericHandler {
try{ try{
// start an update // start an update
Text key = new Text(pathSegments[2]); Text key = new Text(pathSegments[2]);
lock_id = this.table.startUpdate(key); lock_id = table.startUpdate(key);
// set the columns from the xml // set the columns from the xml
NodeList columns = doc.getElementsByTagName("column"); NodeList columns = doc.getElementsByTagName("column");
@ -310,15 +312,15 @@ public class TableHandler extends GenericHandler {
byte[] value = org.apache.hadoop.hbase.util.Base64.decode(value_node.getFirstChild().getNodeValue()); byte[] value = org.apache.hadoop.hbase.util.Base64.decode(value_node.getFirstChild().getNodeValue());
// put the value // put the value
this.table.put(lock_id, name, value); table.put(lock_id, name, value);
} }
// commit the update // commit the update
if (timestamp != null) { if (timestamp != null) {
this.table.commit(lock_id, Long.parseLong(timestamp)); table.commit(lock_id, Long.parseLong(timestamp));
} }
else{ else{
this.table.commit(lock_id); table.commit(lock_id);
} }
// respond with a 200 // respond with a 200
@ -326,7 +328,7 @@ public class TableHandler extends GenericHandler {
} }
catch(Exception e){ catch(Exception e){
if (lock_id != -1) { if (lock_id != -1) {
this.table.abort(lock_id); table.abort(lock_id);
} }
throw new ServletException(e); throw new ServletException(e);
} }
@ -337,11 +339,11 @@ public class TableHandler extends GenericHandler {
* @param request * @param request
* @param response * @param response
*/ */
private void getTableRegions(final HttpServletRequest request, private void getTableRegions(HTable table, final HttpServletRequest request,
final HttpServletResponse response) final HttpServletResponse response)
throws IOException { throws IOException {
// Presumption is that this.table has already been focused on target table. // Presumption is that this.table has already been focused on target table.
Text [] startKeys = this.table.getStartKeys(); Text [] startKeys = table.getStartKeys();
// Presumption is that this.table has already been set against target table // Presumption is that this.table has already been set against target table
switch (ContentType.getContentType(request.getHeader(ACCEPT))) { switch (ContentType.getContentType(request.getHeader(ACCEPT))) {
case XML: case XML:
@ -445,7 +447,7 @@ public class TableHandler extends GenericHandler {
final HttpServletResponse response, final String [] pathSegments) final HttpServletResponse response, final String [] pathSegments)
throws IOException, ServletException { throws IOException, ServletException {
// grab the table we're operating on // grab the table we're operating on
focusTable(getTableName(pathSegments)); HTable table = getTable(getTableName(pathSegments));
Text key = new Text(pathSegments[2]); Text key = new Text(pathSegments[2]);
@ -465,7 +467,7 @@ public class TableHandler extends GenericHandler {
} else{ } else{
// delete each column in turn // delete each column in turn
for(int i = 0; i < columns.length; i++){ for(int i = 0; i < columns.length; i++){
this.table.deleteAll(key, new Text(columns[i])); table.deleteAll(key, new Text(columns[i]));
} }
} }
response.setStatus(202); response.setStatus(202);