HADOOP-2068 RESTful interface

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@599945 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2007-11-30 20:15:48 +00:00
parent 5d5f078bb3
commit 6b159e940e
9 changed files with 1378 additions and 8 deletions

View File

@ -7,6 +7,7 @@ Trunk (unreleased changes)
NEW FEATURES
HADOOP-2061 Add new Base64 dialects
HADOOP-2084 Add a LocalHBaseCluster
HADOOP-2068 RESTful interface
OPTIMIZATIONS

View File

@ -0,0 +1,162 @@
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HBaseConfiguration;
/**
* Servlet implementation class for hbase REST interface.
* Presumes container ensures single thread through here at any one time
* (Usually the default configuration). In other words, code is not
* written thread-safe.
* <p>This servlet has explicit dependency on Jetty server; it uses the
* jetty implementation of MultipartResponse.
*
* <p>TODO:
* <ul>
* <li>multipart/related response is not correct; the servlet setContentType
* is broken. I am unable to add parameters such as boundary or start to
* multipart/related. They get stripped.</li>
* <li>Currently creating a scanner, need to specify a column. Need to make
* it so the HTable instance has current table's metadata to-hand so easy to
* find the list of all column families so can make up list of columns if none
* specified.</li>
* <li>Minor items are we are decoding URLs in places where probably already
* done and how to timeout scanners that are in the scanner list.</li>
* </ul>
* @see <a href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseRest">Hbase REST Specification</a>
*/
public class Dispatcher extends javax.servlet.http.HttpServlet
implements javax.servlet.Servlet {
private static final long serialVersionUID = 1045003206345359301L;
private MetaHandler metaHandler;
private TableHandler tableHandler;
private ScannerHandler scannerHandler;
private static final String SCANNER = "scanner";
private static final String ROW = "row";
/**
* Default constructor
*/
public Dispatcher() {
super();
}
public void init() throws ServletException {
super.init();
HBaseConfiguration conf = new HBaseConfiguration();
HBaseAdmin admin = null;
try{
admin = new HBaseAdmin(conf);
metaHandler = new MetaHandler(conf, admin);
tableHandler = new TableHandler(conf, admin);
scannerHandler = new ScannerHandler(conf, admin);
} catch(Exception e){
throw new ServletException(e);
}
}
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
String [] pathSegments = getPathSegments(request);
if (pathSegments.length == 0 || pathSegments[0].length() <= 0) {
// if it was a root request, then get some metadata about
// the entire instance.
metaHandler.doGet(request, response, pathSegments);
} else {
// otherwise, it must be a GET request suitable for the
// table handler.
tableHandler.doGet(request, response, pathSegments);
}
}
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
String [] pathSegments = getPathSegments(request);
// there should be at least two path segments (table name and row or scanner)
if (pathSegments.length >= 2 && pathSegments[0].length() > 0) {
if (pathSegments[1].toLowerCase().equals(SCANNER) &&
pathSegments.length >= 2) {
scannerHandler.doPost(request, response, pathSegments);
return;
} else if (pathSegments[1].toLowerCase().equals(ROW) && pathSegments.length >= 3) {
tableHandler.doPost(request, response, pathSegments);
return;
}
}
// if we get to this point, then no handler was matched this request.
GenericHandler.doNotFound(response, "No handler for " + request.getPathInfo());
}
protected void doPut(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
// Equate PUT with a POST.
doPost(request, response);
}
protected void doDelete(HttpServletRequest request,
HttpServletResponse response)
throws IOException, ServletException {
String [] pathSegments = getPathSegments(request);
// must be at least two path segments (table name and row or scanner)
if (pathSegments.length >= 2 && pathSegments[0].length() > 0) {
// DELETE to a scanner requires at least three path segments
if (pathSegments[1].toLowerCase().equals(SCANNER) &&
pathSegments.length == 3 && pathSegments[2].length() > 0) {
scannerHandler.doDelete(request, response, pathSegments);
return;
} else if (pathSegments[1].toLowerCase().equals(ROW) &&
pathSegments.length >= 3) {
tableHandler.doDelete(request, response, pathSegments);
return;
}
}
// if we reach this point, then no handler exists for this request.
GenericHandler.doNotFound(response, "No handler");
}
/*
* @param request
* @return request pathinfo split on the '/' ignoring the first '/' so first
* element in pathSegment is not the empty string.
*/
private String [] getPathSegments(final HttpServletRequest request) {
return request.getPathInfo().substring(1).split("/");
}
}

View File

@ -0,0 +1,270 @@
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTable;
import org.apache.hadoop.io.Text;
import org.mortbay.servlet.MultiPartResponse;
import org.znerd.xmlenc.LineBreak;
import org.znerd.xmlenc.XMLOutputter;
/**
* GenericHandler contains some basic common stuff that all the individual
* REST handler types take advantage of.
*/
public abstract class GenericHandler {
protected static final long serialVersionUID = 6939910503474376143L;
protected HBaseConfiguration conf;
protected HBaseAdmin admin;
protected HTable table = null;
protected static final String ACCEPT = "accept";
protected static final String COLUMN = "column";
protected static final String TIMESTAMP = "timestamp";
protected static final String START_ROW = "start_row";
protected static final String END_ROW = "end_row";
protected static final String CONTENT_TYPE = "content-type";
protected static final String ROW = "row";
protected static final String REGIONS = "regions";
protected final Log LOG = LogFactory.getLog(this.getClass());
public GenericHandler(HBaseConfiguration conf, HBaseAdmin admin) {
this.conf = conf;
this.admin = admin;
}
/*
* Supported content types as enums
*/
protected enum ContentType {
XML("text/xml"),
PLAIN("text/plain"),
MIME("multipart/related"),
NOT_ACCEPTABLE("");
private final String type;
private ContentType(final String t) {
this.type = t;
}
@Override
public String toString() {
return this.type;
}
/**
* Utility method used looking at Accept header content.
* @param t The content type to examine.
* @return The enum that matches the prefix of <code>t</code> or
* the default enum if <code>t</code> is empty. If unsupported type, we
* return NOT_ACCEPTABLE.
*/
public static ContentType getContentType(final String t) {
// Default to text/plain. Curl sends */*.
if (t == null || t.equals("*/*")) {
return ContentType.XML;
}
String lowerCased = t.toLowerCase();
ContentType [] values = ContentType.values();
ContentType result = null;
for (int i = 0; i < values.length; i++) {
if (lowerCased.startsWith(values[i].type)) {
result = values[i];
break;
}
}
return result == null? NOT_ACCEPTABLE: result;
}
}
/*
* @param o
* @return XMLOutputter wrapped around <code>o</code>.
* @throws IllegalStateException
* @throws IOException
*/
protected XMLOutputter getXMLOutputter(final PrintWriter o)
throws IllegalStateException, IOException {
XMLOutputter outputter = new XMLOutputter(o, HConstants.UTF8_ENCODING);
outputter.setLineBreak(LineBreak.UNIX);
outputter.setIndentation(" ");
outputter.declaration();
return outputter;
}
/*
* Write an XML element.
* @param outputter
* @param name
* @param value
* @throws IllegalStateException
* @throws IOException
*/
protected void doElement(final XMLOutputter outputter,
final String name, final String value)
throws IllegalStateException, IOException {
outputter.startTag(name);
if (value.length() > 0) {
outputter.pcdata(value);
}
outputter.endTag();
}
/*
* Set content-type, encoding, and status on passed <code>response</code>
* @param response
* @param status
* @param contentType
*/
public static void setResponseHeader(final HttpServletResponse response,
final int status, final String contentType) {
// Container adds the charset to the HTTP content-type header.
response.setContentType(contentType);
response.setCharacterEncoding(HConstants.UTF8_ENCODING);
response.setStatus(status);
}
/*
* If we can't do the specified Accepts header type.
* @param response
* @throws IOException
*/
public static void doNotAcceptable(final HttpServletResponse response)
throws IOException {
response.sendError(HttpServletResponse.SC_NOT_ACCEPTABLE);
}
/*
* If we can't do the specified Accepts header type.
* @param response
* @param message
* @throws IOException
*/
public static void doNotAcceptable(final HttpServletResponse response,
final String message)
throws IOException {
response.sendError(HttpServletResponse.SC_NOT_ACCEPTABLE, message);
}
/*
* Resource not found.
* @param response
* @throws IOException
*/
public static void doNotFound(final HttpServletResponse response)
throws IOException {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
}
/*
* Resource not found.
* @param response
* @param msg
* @throws IOException
*/
public static void doNotFound(final HttpServletResponse response, final String msg)
throws IOException {
response.sendError(HttpServletResponse.SC_NOT_FOUND, msg);
}
/*
* Unimplemented method.
* @param response
* @param message to send
* @throws IOException
*/
public static void doMethodNotAllowed(final HttpServletResponse response,
final String message)
throws IOException {
response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
}
protected String getTableName(final String [] pathSegments)
throws UnsupportedEncodingException {
// Get table name? First part of passed segment. It can't be empty string
// or null because we should have tested for that before coming in here.
return URLDecoder.decode(pathSegments[0], HConstants.UTF8_ENCODING);
}
/*
* Output row columns
* @param outputter
* @param m
* @throws IllegalStateException
* @throws IllegalArgumentException
* @throws IOException
*/
protected void outputColumnsXml(final XMLOutputter outputter,
final Map<Text, byte[]> m)
throws IllegalStateException, IllegalArgumentException, IOException {
for (Map.Entry<Text, byte[]> e: m.entrySet()) {
outputter.startTag(COLUMN);
doElement(outputter, "name", e.getKey().toString());
// We don't know String from binary data so we always base64 encode.
doElement(outputter, "value",
org.apache.hadoop.hbase.util.Base64.encodeBytes(e.getValue()));
outputter.endTag();
}
}
protected void outputColumnsMime(final MultiPartResponse mpr,
final Map<Text, byte[]> m)
throws IOException {
for (Map.Entry<Text, byte[]> e: m.entrySet()) {
mpr.startPart("application/octet-stream",
new String [] {"Content-Description: " + e.getKey().toString(),
"Content-Transfer-Encoding: binary",
"Content-Length: " + e.getValue().length});
mpr.getOut().write(e.getValue());
}
}
protected void focusTable(final String tableName) throws IOException {
// Do we have an HTable instance to suit? TODO, keep a pool of
// instances of HTable. For now, allocate a new one each time table
// focus changes.
if (this.table == null ||
!this.table.getTableName().toString().equals(tableName)) {
if (this.table != null) {
this.table.close();
}
this.table = new HTable(this.conf, new Text(tableName));
}
}
}

View File

@ -0,0 +1,106 @@
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.znerd.xmlenc.XMLOutputter;
/**
* MetaHandler fields all requests for metadata at the instance level. At the
* momment this is only GET requests to /.
*/
public class MetaHandler extends GenericHandler {
public MetaHandler(HBaseConfiguration conf, HBaseAdmin admin)
throws ServletException{
super(conf, admin);
}
public void doGet(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
getTables(request, response);
}
public void doPost(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
doMethodNotAllowed(response, "POST not allowed at /");
}
public void doPut(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
doMethodNotAllowed(response, "PUT not allowed at /");
}
public void doDelete(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
doMethodNotAllowed(response, "DELETE not allowed at /");
}
/*
* Return list of tables.
* @param request
* @param response
*/
private void getTables(final HttpServletRequest request,
final HttpServletResponse response)
throws IOException {
HTableDescriptor [] tables = this.admin.listTables();
switch (ContentType.getContentType(request.getHeader(ACCEPT))) {
case XML:
setResponseHeader(response, tables.length > 0? 200: 204,
ContentType.XML.toString());
XMLOutputter outputter = getXMLOutputter(response.getWriter());
outputter.startTag("tables");
for (int i = 0; i < tables.length; i++) {
doElement(outputter, "table", tables[i].getName().toString());
}
outputter.endTag();
outputter.endDocument();
outputter.getWriter().close();
break;
case PLAIN:
setResponseHeader(response, tables.length > 0? 200: 204,
ContentType.PLAIN.toString());
PrintWriter out = response.getWriter();
for (int i = 0; i < tables.length; i++) {
out.println(tables[i].getName().toString());
}
out.close();
break;
default:
doNotAcceptable(response);
}
}
}

View File

@ -0,0 +1,325 @@
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.net.URLDecoder;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.util.JenkinsHash;
import org.apache.hadoop.io.Text;
import org.mortbay.servlet.MultiPartResponse;
import org.znerd.xmlenc.XMLOutputter;
/**
* ScannderHandler fields all scanner related requests.
*/
public class ScannerHandler extends GenericHandler {
public ScannerHandler(HBaseConfiguration conf, HBaseAdmin admin)
throws ServletException{
super(conf, admin);
}
private class ScannerRecord {
private final HScannerInterface scanner;
private HStoreKey key = null;
private SortedMap<Text, byte []> value = null;
private boolean isEmpty;
ScannerRecord(final HScannerInterface s) {
this.isEmpty = false;
this.scanner = s;
}
public HScannerInterface getScanner() {
return this.scanner;
}
public HStoreKey getKey() {
return this.key;
}
public SortedMap<Text, byte[]> getValue() {
return this.value;
}
public boolean isEmpty(){
return this.isEmpty;
}
/**
* Call next on the scanner.
* @return True if more values in scanner.
* @throws IOException
*/
public boolean next() throws IOException {
this.key = new HStoreKey();
this.value = new TreeMap<Text, byte []>();
this.isEmpty = !this.scanner.next(this.key, this.value);
return !this.isEmpty;
}
}
/*
* Map of outstanding scanners keyed by scannerid.
*/
private final Map<String, ScannerRecord> scanners =
new HashMap<String, ScannerRecord>();
public void doGet(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
doMethodNotAllowed(response, "GET to a scanner not supported.");
}
public void doPost(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
if (pathSegments.length == 2) {
// trying to create a scanner
openScanner(request, response, pathSegments);
}
else if (pathSegments.length == 3) {
// advancing a scanner
getScanner(request, response, pathSegments[2]);
}
else{
doNotFound(response, "No handler for request");
}
}
public void doPut(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
doPost(request, response, pathSegments);
}
public void doDelete(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
doMethodNotAllowed(response, "Not hooked back up yet!");
}
/*
* Advance scanner and return current position.
* @param request
* @param response
* @param scannerid
* @throws IOException
*/
private void getScanner(final HttpServletRequest request,
final HttpServletResponse response, final String scannerid)
throws IOException {
ScannerRecord sr = this.scanners.get(scannerid);
if (sr == null) {
doNotFound(response, "No such scanner.");
return;
}
if (sr.next()) {
switch (ContentType.getContentType(request.getHeader(ACCEPT))) {
case XML:
outputScannerEntryXML(response, sr);
break;
case MIME:
outputScannerEntryMime(response, sr);
break;
default:
doNotAcceptable(response);
}
}
else{
this.scanners.remove(scannerid);
doNotFound(response, "Scanner is expended");
}
}
private void outputScannerEntryXML(final HttpServletResponse response,
final ScannerRecord sr)
throws IOException {
HStoreKey key = sr.getKey();
// respond with a 200 and Content-type: text/xml
setResponseHeader(response, 200, ContentType.XML.toString());
// setup an xml outputter
XMLOutputter outputter = getXMLOutputter(response.getWriter());
outputter.startTag(ROW);
// write the row key
doElement(outputter, "name", key.getRow().toString());
// Normally no column is supplied when scanning.
if (key.getColumn() != null &&
key.getColumn().getLength() > 0) {
doElement(outputter, "key-column", key.getColumn().toString());
}
doElement(outputter, "timestamp", Long.toString(key.getTimestamp()));
outputColumnsXml(outputter, sr.getValue());
outputter.endTag();
outputter.endDocument();
outputter.getWriter().close();
}
private void outputScannerEntryMime(final HttpServletResponse response,
final ScannerRecord sr)
throws IOException {
response.setStatus(200);
// This code ties me to the jetty server.
MultiPartResponse mpr = new MultiPartResponse(response);
// Content type should look like this for multipart:
// Content-type: multipart/related;start="<rootpart*94ebf1e6-7eb5-43f1-85f4-2615fc40c5d6@example.jaxws.sun.com>";type="application/xop+xml";boundary="uuid:94ebf1e6-7eb5-43f1-85f4-2615fc40c5d6";start-info="text/xml"
String ct = ContentType.MIME.toString() + ";charset=\"UTF-8\";boundary=\"" +
mpr.getBoundary() + "\"";
// Setting content type is broken. I'm unable to set parameters on the
// content-type; They get stripped. Can't set boundary, etc.
// response.addHeader("Content-Type", ct);
response.setContentType(ct);
// Write row, key-column and timestamp each in its own part.
mpr.startPart("application/octet-stream",
new String [] {"Content-Description: row",
"Content-Transfer-Encoding: binary",
"Content-Length: " + sr.getKey().getRow().getBytes().length});
mpr.getOut().write(sr.getKey().getRow().getBytes());
// Usually key-column is empty when scanning.
if (sr.getKey().getColumn() != null &&
sr.getKey().getColumn().getLength() > 0) {
mpr.startPart("application/octet-stream",
new String [] {"Content-Description: key-column",
"Content-Transfer-Encoding: binary",
"Content-Length: " + sr.getKey().getColumn().getBytes().length});
}
mpr.getOut().write(sr.getKey().getColumn().getBytes());
// TODO: Fix. Need to write out the timestamp in the ordained timestamp
// format.
byte [] timestampBytes = Long.toString(sr.getKey().getTimestamp()).getBytes();
mpr.startPart("application/octet-stream",
new String [] {"Content-Description: timestamp",
"Content-Transfer-Encoding: binary",
"Content-Length: " + timestampBytes.length});
mpr.getOut().write(timestampBytes);
// Write out columns
outputColumnsMime(mpr, sr.getValue());
mpr.close();
}
/*
* Create scanner
* @param request
* @param response
* @param pathSegments
* @throws IOException
*/
private void openScanner(final HttpServletRequest request,
final HttpServletResponse response, final String [] pathSegments)
throws IOException, ServletException {
// focus on the table
focusTable(getTableName(pathSegments));
// get the list of columns we're supposed to interact with
String[] raw_columns = request.getParameterValues(COLUMN);
Text [] columns = null;
if (raw_columns != null) {
columns = new Text [raw_columns.length];
for (int i = 0; i < raw_columns.length; i++) {
// I think this decoding is redundant.
columns[i] =
new Text(URLDecoder.decode(raw_columns[i], HConstants.UTF8_ENCODING));
}
} else {
// TODO: Need to put into the scanner all of the table's column
// families. TODO: Verify this returns all rows. For now just fail.
doMethodNotAllowed(response, "Unspecified columns parameter currently not supported!");
return;
}
// TODO: Parse according to the timestamp format we agree on.
String raw_ts = request.getParameter(TIMESTAMP);
// TODO: Are these decodings redundant?
Text startRow = request.getParameter(START_ROW) == null?
HConstants.EMPTY_START_ROW:
new Text(URLDecoder.decode(request.getParameter(START_ROW),
HConstants.UTF8_ENCODING));
// Empty start row is same value as empty end row.
Text endRow = request.getParameter(END_ROW) == null?
HConstants.EMPTY_START_ROW:
new Text(URLDecoder.decode(request.getParameter(END_ROW),
HConstants.UTF8_ENCODING));
HScannerInterface scanner = (request.getParameter(END_ROW) == null)?
this.table.obtainScanner(columns, startRow):
this.table.obtainScanner(columns, startRow, endRow);
// Make a scanner id by hashing the object toString value (object name +
// an id). Will make identifier less burdensome and more url friendly.
String scannerid =
Integer.toHexString(JenkinsHash.hash(scanner.toString().getBytes(), -1));
ScannerRecord sr = new ScannerRecord(scanner);
// store the scanner for subsequent requests
this.scanners.put(scannerid, sr);
// set a 201 (Created) header and a Location pointing to the new
// scanner
response.setStatus(201);
response.addHeader("Location", request.getContextPath() + "/" +
pathSegments[0] + "/" + pathSegments[1] + "/" + scannerid);
response.getOutputStream().close();
}
/*
* Delete scanner
* @param response
* @param scannerid
* @throws IOException
*/
private void deleteScanner(final HttpServletResponse response,
final String scannerid)
throws IOException, ServletException {
ScannerRecord sr = this.scanners.remove(scannerid);
if (sr == null) {
doNotFound(response, "No such scanner");
} else {
sr.getScanner().close();
response.setStatus(200);
response.getOutputStream().close();
}
}
}

View File

@ -0,0 +1,474 @@
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URLDecoder;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.io.Text;
import org.mortbay.servlet.MultiPartResponse;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.znerd.xmlenc.XMLOutputter;
/**
* TableHandler fields all requests that deal with an individual table.
* That means all requests that start with /api/[table_name]/... go to
* this handler.
*/
public class TableHandler extends GenericHandler {
public TableHandler(HBaseConfiguration conf, HBaseAdmin admin)
throws ServletException{
super(conf, admin);
}
public void doGet(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
// if it's just table name, return the metadata
if (pathSegments.length == 1) {
getTableMetadata(request, response, pathSegments[0]);
}
else{
focusTable(pathSegments[0]);
if (pathSegments[1].toLowerCase().equals(REGIONS)) {
// get a region list
getTableRegions(request, response);
}
else if (pathSegments[1].toLowerCase().equals(ROW)) {
// get a row
getRow(request, response, pathSegments);
}
else{
doNotFound(response, "Not handled in TableHandler");
}
}
}
public void doPost(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
putRow(request, response, pathSegments);
}
public void doPut(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
doPost(request, response, pathSegments);
}
public void doDelete(HttpServletRequest request, HttpServletResponse response,
String[] pathSegments)
throws ServletException, IOException {
deleteRow(request, response, pathSegments);
}
/*
* @param request
* @param response
* @param pathSegments info path split on the '/' character. First segment
* is the tablename, second is 'row', and third is the row id.
* @throws IOException
* Retrieve a row in one of several output formats.
*/
private void getRow(final HttpServletRequest request,
final HttpServletResponse response, final String [] pathSegments)
throws IOException {
// pull the row key out of the path
String row = URLDecoder.decode(pathSegments[2], HConstants.UTF8_ENCODING);
String timestampStr = null;
if (pathSegments.length == 4) {
// A timestamp has been supplied.
timestampStr = pathSegments[3];
if (timestampStr.equals("timestamps")) {
// Not supported in hbase just yet. TODO
doMethodNotAllowed(response, "Not yet supported by hbase");
return;
}
}
String[] columns = request.getParameterValues(COLUMN);
if (columns == null || columns.length == 0) {
// They want full row returned.
// Presumption is that this.table has already been focused on target table.
Map<Text, byte[]> result = timestampStr == null ?
this.table.getRow(new Text(row))
: this.table.getRow(new Text(row), Long.parseLong(timestampStr));
if (result == null || result.size() == 0) {
doNotFound(response, "Row not found!");
} else {
switch (ContentType.getContentType(request.getHeader(ACCEPT))) {
case XML:
outputRowXml(response, result);
break;
case MIME:
outputRowMime(response, result);
break;
default:
doNotAcceptable(response, "Unsupported Accept Header Content: " +
request.getHeader(CONTENT_TYPE));
}
}
} else {
Map<Text, byte[]> prefiltered_result = this.table.getRow(new Text(row));
if (prefiltered_result == null || prefiltered_result.size() == 0) {
doNotFound(response, "Row not found!");
} else {
// create a Set from the columns requested so we can
// efficiently filter the actual found columns
Set<String> requested_columns_set = new HashSet<String>();
for(int i = 0; i < columns.length; i++){
requested_columns_set.add(columns[i]);
}
// output map that will contain the filtered results
Map<Text, byte[]> m = new HashMap<Text, byte[]>();
// get an array of all the columns retrieved
Object[] columns_retrieved = prefiltered_result.keySet().toArray();
// copy over those cells with requested column names
for(int i = 0; i < columns_retrieved.length; i++){
Text current_column = (Text)columns_retrieved[i];
if(requested_columns_set.contains(current_column.toString())){
m.put(current_column, prefiltered_result.get(current_column));
}
}
switch (ContentType.getContentType(request.getHeader(ACCEPT))) {
case XML:
outputRowXml(response, m);
break;
case MIME:
outputRowMime(response, m);
break;
default:
doNotAcceptable(response, "Unsupported Accept Header Content: " +
request.getHeader(CONTENT_TYPE));
}
}
}
}
/*
* Output a row encoded as XML.
* @param response
* @param result
* @throws IOException
*/
private void outputRowXml(final HttpServletResponse response,
final Map<Text, byte[]> result)
throws IOException {
setResponseHeader(response, result.size() > 0? 200: 204,
ContentType.XML.toString());
XMLOutputter outputter = getXMLOutputter(response.getWriter());
outputter.startTag(ROW);
outputColumnsXml(outputter, result);
outputter.endTag();
outputter.endDocument();
outputter.getWriter().close();
}
/*
* @param response
* @param result
* Output the results contained in result as a multipart/related response.
*/
private void outputRowMime(final HttpServletResponse response,
final Map<Text, byte[]> result)
throws IOException {
response.setStatus(result.size() > 0? 200: 204);
// This code ties me to the jetty server.
MultiPartResponse mpr = new MultiPartResponse(response);
// Content type should look like this for multipart:
// Content-type: multipart/related;start="<rootpart*94ebf1e6-7eb5-43f1-85f4-2615fc40c5d6@example.jaxws.sun.com>";type="application/xop+xml";boundary="uuid:94ebf1e6-7eb5-43f1-85f4-2615fc40c5d6";start-info="text/xml"
String ct = ContentType.MIME.toString() + ";charset=\"UTF-8\";boundary=\"" +
mpr.getBoundary() + "\"";
// Setting content type is broken. I'm unable to set parameters on the
// content-type; They get stripped. Can't set boundary, etc.
// response.addHeader("Content-Type", ct);
response.setContentType(ct);
outputColumnsMime(mpr, result);
mpr.close();
}
/*
* @param request
* @param response
* @param pathSegments
* Do a put based on the client request.
*/
private void putRow(final HttpServletRequest request,
final HttpServletResponse response, final String [] pathSegments)
throws IOException, ServletException {
focusTable(pathSegments[0]);
switch(ContentType.getContentType(request.getHeader(CONTENT_TYPE))) {
case XML:
putRowXml(request, response, pathSegments);
break;
case MIME:
doNotAcceptable(response);
break;
default:
doNotAcceptable(response, "Unsupported Accept Header Content: " +
request.getHeader(CONTENT_TYPE));
}
}
/*
* @param request
* @param response
* @param pathSegments
* Decode supplied XML and do a put to Hbase.
*/
private void putRowXml(final HttpServletRequest request,
final HttpServletResponse response, final String [] pathSegments)
throws IOException, ServletException{
DocumentBuilderFactory docBuilderFactory
= DocumentBuilderFactory.newInstance();
//ignore all comments inside the xml file
docBuilderFactory.setIgnoringComments(true);
DocumentBuilder builder = null;
Document doc = null;
String timestamp = pathSegments.length >= 4 ? pathSegments[3] : null;
try{
builder = docBuilderFactory.newDocumentBuilder();
doc = builder.parse(request.getInputStream());
} catch (javax.xml.parsers.ParserConfigurationException e) {
throw new ServletException(e);
} catch (org.xml.sax.SAXException e){
throw new ServletException(e);
}
long lock_id = -1;
try{
// start an update
Text key = new Text(pathSegments[2]);
lock_id = this.table.startUpdate(key);
// set the columns from the xml
NodeList columns = doc.getElementsByTagName("column");
for(int i = 0; i < columns.getLength(); i++){
// get the current column element we're working on
Element column = (Element)columns.item(i);
// extract the name and value children
Node name_node = column.getElementsByTagName("name").item(0);
Text name = new Text(name_node.getFirstChild().getNodeValue());
Node value_node = column.getElementsByTagName("value").item(0);
// decode the base64'd value
byte[] value = org.apache.hadoop.hbase.util.Base64.decode(value_node.getFirstChild().getNodeValue());
// put the value
this.table.put(lock_id, name, value);
}
// commit the update
if (timestamp != null) {
this.table.commit(lock_id, Long.parseLong(timestamp));
}
else{
this.table.commit(lock_id);
}
// respond with a 200
response.setStatus(200);
}
catch(Exception e){
if (lock_id != -1) {
this.table.abort(lock_id);
}
throw new ServletException(e);
}
}
/*
* Return region offsets.
* @param request
* @param response
*/
private void getTableRegions(final HttpServletRequest request,
final HttpServletResponse response)
throws IOException {
// Presumption is that this.table has already been focused on target table.
Text [] startKeys = this.table.getStartKeys();
// Presumption is that this.table has already been set against target table
switch (ContentType.getContentType(request.getHeader(ACCEPT))) {
case XML:
setResponseHeader(response, startKeys.length > 0? 200: 204,
ContentType.XML.toString());
XMLOutputter outputter = getXMLOutputter(response.getWriter());
outputter.startTag("regions");
for (int i = 0; i < startKeys.length; i++) {
doElement(outputter, "region", startKeys[i].toString());
}
outputter.endTag();
outputter.endDocument();
outputter.getWriter().close();
break;
case PLAIN:
setResponseHeader(response, startKeys.length > 0? 200: 204,
ContentType.PLAIN.toString());
PrintWriter out = response.getWriter();
for (int i = 0; i < startKeys.length; i++) {
// TODO: Add in the server location. Is it needed?
out.print(startKeys[i].toString());
}
out.close();
break;
case MIME:
default:
doNotAcceptable(response, "Unsupported Accept Header Content: " +
request.getHeader(CONTENT_TYPE));
}
}
/*
* Get table metadata.
* @param request
* @param response
* @param tableName
* @throws IOException
*/
private void getTableMetadata(final HttpServletRequest request,
final HttpServletResponse response, final String tableName)
throws IOException {
HTableDescriptor [] tables = this.admin.listTables();
HTableDescriptor descriptor = null;
for (int i = 0; i < tables.length; i++) {
if (tables[i].getName().toString().equals(tableName)) {
descriptor = tables[i];
break;
}
}
if (descriptor == null) {
doNotFound(response, "Table not found!");
} else {
// Presumption is that this.table has already been set against target table
ContentType type = ContentType.getContentType(request.getHeader(ACCEPT));
switch (type) {
case XML:
setResponseHeader(response, 200, ContentType.XML.toString());
XMLOutputter outputter = getXMLOutputter(response.getWriter());
outputter.startTag("table");
doElement(outputter, "name", descriptor.getName().toString());
outputter.startTag("columnfamilies");
for (Map.Entry<Text, HColumnDescriptor> e:
descriptor.getFamilies().entrySet()) {
outputter.startTag("columnfamily");
doElement(outputter, "name", e.getKey().toString());
HColumnDescriptor hcd = e.getValue();
doElement(outputter, "compression", hcd.getCompression().toString());
doElement(outputter, "bloomfilter",
hcd.getBloomFilter() == null? "NONE": hcd.getBloomFilter().toString());
doElement(outputter, "max-versions",
Integer.toString(hcd.getMaxVersions()));
doElement(outputter, "maximum-cell-size",
Integer.toString(hcd.getMaxValueLength()));
outputter.endTag();
}
outputter.endTag();
outputter.endTag();
outputter.endDocument();
outputter.getWriter().close();
break;
case PLAIN:
setResponseHeader(response, 200, ContentType.PLAIN.toString());
PrintWriter out = response.getWriter();
out.print(descriptor.toString());
out.close();
break;
case MIME:
default:
doNotAcceptable(response, "Unsupported Accept Header Content: " +
request.getHeader(CONTENT_TYPE));
}
}
}
/*
* @param request
* @param response
* @param pathSegments
* Delete some or all cells for a row.
*/
private void deleteRow(final HttpServletRequest request,
final HttpServletResponse response, final String [] pathSegments)
throws IOException, ServletException {
// grab the table we're operating on
focusTable(getTableName(pathSegments));
Text key = new Text(pathSegments[2]);
String[] columns = request.getParameterValues(COLUMN);
// hack - we'll actually test for the presence of the timestamp parameter
// eventually
boolean timestamp_present = false;
if(timestamp_present){ // do a timestamp-aware delete
doMethodNotAllowed(response, "DELETE with a timestamp not implemented!");
}
else{ // ignore timestamps
if(columns == null || columns.length == 0){
// retrieve all the columns
doMethodNotAllowed(response,
"DELETE without specified columns not implemented!");
} else{
// delete each column in turn
for(int i = 0; i < columns.length; i++){
this.table.deleteAll(key, new Text(columns[i]));
}
}
response.setStatus(202);
}
}
}

View File

@ -89,18 +89,27 @@ public class InfoServer {
this.webServer.addContext(staticContext);
// set up the context for "/" jsp files
String webappDir = null;
try {
webappDir = getWebAppsPath("webapps" + File.separator + name);
} catch (FileNotFoundException e) {
// Retry. Resource may be inside jar on a windows machine.
webappDir = getWebAppsPath("webapps/" + name);
}
this.webAppContext =
String webappDir = getWebAppDir(name);
this.webAppContext =
this.webServer.addWebApplication("/", webappDir);
if (name.equals("master")) {
// Put up the rest webapp.
this.webServer.addWebApplication("/api", getWebAppDir("rest"));
}
addServlet("stacks", "/stacks", StatusHttpServer.StackServlet.class);
addServlet("logLevel", "/logLevel", org.apache.hadoop.log.LogLevel.Servlet.class);
}
private String getWebAppDir(final String webappName) throws IOException {
String webappDir = null;
try {
webappDir = getWebAppsPath("webapps" + File.separator + webappName);
} catch (FileNotFoundException e) {
// Retry. Resource may be inside jar on a windows machine.
webappDir = getWebAppsPath("webapps/" + webappName);
}
return webappDir;
}
/**
* Set a value in the webapp context. These values are available to the jsp

View File

@ -0,0 +1,9 @@
Manifest-Version: 1.0
Class-Path:
Manifest-Version: 1.0
Class-Path:
Manifest-Version: 1.0
Class-Path:

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<web-app id="WebApp_ID" version="2.4" xmlns="http://java.sun.com/xml/ns/j2ee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://java.sun.com/xml/ns/j2ee http://java.sun.com/xml/ns/j2ee/web-app_2_4.xsd">
<display-name>rest</display-name>
<servlet>
<description>Hbase REST Interface</description>
<display-name>api</display-name>
<servlet-name>api</servlet-name>
<servlet-class>org.apache.hadoop.hbase.rest.Dispatcher</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>api</servlet-name>
<url-pattern>/*</url-pattern>
</servlet-mapping>
</web-app>