HBASE-2257 [stargate] multiuser mode; update for trunk

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@916110 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Kyle Purtell 2010-02-25 04:22:47 +00:00
parent 6c100b6de3
commit 1d4cace954
14 changed files with 537 additions and 450 deletions

View File

@ -25,29 +25,34 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.stargate.auth.Authenticator;
import org.apache.hadoop.hbase.stargate.auth.HBCAuthenticator;
import org.apache.hadoop.util.StringUtils;
import com.sun.jersey.server.impl.container.servlet.ServletAdaptor;
/**
* Singleton class encapsulating global REST servlet state and functions.
*/
public class RESTServlet extends ServletAdaptor {
private static final long serialVersionUID = 1L;
public static final int DEFAULT_MAX_AGE = 60 * 60 * 4; // 4 hours
public static final String VERSION_STRING = "0.0.1";
public class RESTServlet extends ServletAdaptor implements Constants {
private static final Log LOG = LogFactory.getLog(RESTServlet.class);
private static final long serialVersionUID = 1L;
private static RESTServlet instance;
private transient final Configuration conf;
private transient final HTablePool pool;
protected Map<String,Integer> maxAgeMap =
transient final Configuration conf;
transient final HTablePool pool;
Map<String,Integer> maxAgeMap =
Collections.synchronizedMap(new HashMap<String,Integer>());
boolean multiuser;
Authenticator authenticator;
/**
* @return the RESTServlet singleton instance
@ -70,7 +75,7 @@ public class RESTServlet extends ServletAdaptor {
}
/**
* Get a table pool for the given table.
* Get a table pool for the given table.
* @return the table pool
*/
protected HTablePool getTablePool() {
@ -87,7 +92,7 @@ public class RESTServlet extends ServletAdaptor {
/**
* @param tableName the table name
* @return the maximum cache age suitable for use with this table, in
* seconds
* seconds
* @throws IOException
*/
public int getMaxAge(String tableName) throws IOException {
@ -98,7 +103,7 @@ public class RESTServlet extends ServletAdaptor {
HTableInterface table = pool.getTable(tableName);
try {
int maxAge = DEFAULT_MAX_AGE;
for (HColumnDescriptor family :
for (HColumnDescriptor family :
table.getTableDescriptor().getFamilies()) {
int ttl = family.getTimeToLive();
if (ttl < 0) {

View File

@ -0,0 +1,108 @@
/*
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.stargate;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Map;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.stargate.auth.User;
import org.apache.hadoop.hbase.stargate.model.TableInfoModel;
import org.apache.hadoop.hbase.stargate.model.TableRegionModel;
public class RegionsResource implements Constants {
private static final Log LOG = LogFactory.getLog(RegionsResource.class);
User user;
String table;
CacheControl cacheControl;
RESTServlet servlet;
public RegionsResource(User user, String table) throws IOException {
if (user != null) {
if (!user.isAdmin()) {
throw new WebApplicationException(Response.Status.FORBIDDEN);
}
this.user = user;
}
this.table = table;
cacheControl = new CacheControl();
cacheControl.setNoCache(true);
cacheControl.setNoTransform(false);
servlet = RESTServlet.getInstance();
}
private Map<HRegionInfo,HServerAddress> getTableRegions()
throws IOException {
HTablePool pool = servlet.getTablePool();
HTable table = (HTable) pool.getTable(this.table);
try {
return table.getRegionsInfo();
} finally {
pool.putTable(table);
}
}
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
public Response get(@Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("GET " + uriInfo.getAbsolutePath());
}
try {
TableInfoModel model = new TableInfoModel(table);
Map<HRegionInfo,HServerAddress> regions = getTableRegions();
for (Map.Entry<HRegionInfo,HServerAddress> e: regions.entrySet()) {
HRegionInfo hri = e.getKey();
HServerAddress addr = e.getValue();
InetSocketAddress sa = addr.getInetSocketAddress();
model.add(
new TableRegionModel(table, hri.getRegionId(), hri.getStartKey(),
hri.getEndKey(),
sa.getHostName() + ":" + Integer.valueOf(sa.getPort())));
}
ResponseBuilder response = Response.ok(model);
response.cacheControl(cacheControl);
return response.build();
} catch (TableNotFoundException e) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}
}

View File

@ -1,328 +1,313 @@
/*
* Copyright 2009 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.stargate;
import java.io.IOException;
import java.net.URLDecoder;
import java.util.List;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.stargate.model.CellModel;
import org.apache.hadoop.hbase.stargate.model.CellSetModel;
import org.apache.hadoop.hbase.stargate.model.RowModel;
import org.apache.hadoop.hbase.util.Bytes;
public class RowResource implements Constants {
private static final Log LOG = LogFactory.getLog(RowResource.class);
private String table;
private RowSpec rowspec;
private CacheControl cacheControl;
public RowResource(String table, String rowspec, String versions)
throws IOException {
this.table = table;
this.rowspec = new RowSpec(URLDecoder.decode(rowspec,
HConstants.UTF8_ENCODING));
if (versions != null) {
this.rowspec.setMaxVersions(Integer.valueOf(versions));
}
cacheControl = new CacheControl();
cacheControl.setMaxAge(RESTServlet.getInstance().getMaxAge(table));
cacheControl.setNoTransform(false);
}
@GET
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
MIMETYPE_PROTOBUF})
public Response get(@Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("GET " + uriInfo.getAbsolutePath());
}
try {
ResultGenerator generator = ResultGenerator.fromRowSpec(table, rowspec);
if (!generator.hasNext()) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
CellSetModel model = new CellSetModel();
KeyValue value = generator.next();
byte[] rowKey = value.getRow();
RowModel rowModel = new RowModel(rowKey);
do {
if (!Bytes.equals(value.getRow(), rowKey)) {
model.addRow(rowModel);
rowKey = value.getRow();
rowModel = new RowModel(rowKey);
}
rowModel.addCell(new CellModel(value));
value = generator.next();
} while (value != null);
model.addRow(rowModel);
ResponseBuilder response = Response.ok(model);
response.cacheControl(cacheControl);
return response.build();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(@Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
// doesn't make sense to use a non specific coordinate as this can only
// return a single cell
if (!rowspec.hasColumns() || rowspec.getColumns().length > 1) {
throw new WebApplicationException(Response.Status.BAD_REQUEST);
}
try {
ResultGenerator generator = ResultGenerator.fromRowSpec(table, rowspec);
if (!generator.hasNext()) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
KeyValue value = generator.next();
ResponseBuilder response = Response.ok(value.getValue());
response.cacheControl(cacheControl);
response.header("X-Timestamp", value.getTimestamp());
return response.build();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}
private Response update(CellSetModel model, boolean replace) {
HTablePool pool;
try {
pool = RESTServlet.getInstance().getTablePool();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.INTERNAL_SERVER_ERROR);
}
HTableInterface table = null;
try {
table = pool.getTable(this.table);
for (RowModel row: model.getRows()) {
byte[] key = row.getKey();
Put put = new Put(key);
for (CellModel cell: row.getCells()) {
byte [][] parts = KeyValue.parseColumn(cell.getColumn());
if(parts.length == 1) {
put.add(parts[0], new byte[0], cell.getTimestamp(), cell.getValue());
} else {
put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
}
table.put(put);
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + put.toString());
}
}
table.flushCommits();
ResponseBuilder response = Response.ok();
return response.build();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
} finally {
if (table != null) {
pool.putTable(table);
}
}
}
private Response updateBinary(byte[] message, HttpHeaders headers,
boolean replace) {
HTablePool pool;
try {
pool = RESTServlet.getInstance().getTablePool();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.INTERNAL_SERVER_ERROR);
}
HTableInterface table = null;
try {
byte[] row = rowspec.getRow();
byte[][] columns = rowspec.getColumns();
byte[] column = null;
if (columns != null) {
column = columns[0];
}
long timestamp = HConstants.LATEST_TIMESTAMP;
List<String> vals = headers.getRequestHeader("X-Row");
if (vals != null && !vals.isEmpty()) {
row = Bytes.toBytes(vals.get(0));
}
vals = headers.getRequestHeader("X-Column");
if (vals != null && !vals.isEmpty()) {
column = Bytes.toBytes(vals.get(0));
}
vals = headers.getRequestHeader("X-Timestamp");
if (vals != null && !vals.isEmpty()) {
timestamp = Long.valueOf(vals.get(0));
}
if (column == null) {
throw new WebApplicationException(Response.Status.BAD_REQUEST);
}
Put put = new Put(row);
byte parts[][] = KeyValue.parseColumn(column);
if(parts.length == 1) {
put.add(parts[0], new byte[0], timestamp, message);
} else {
put.add(parts[0], parts[1], timestamp, message);
/*
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.stargate;
import java.io.IOException;
import java.net.URLDecoder;
import java.util.List;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.stargate.auth.User;
import org.apache.hadoop.hbase.stargate.model.CellModel;
import org.apache.hadoop.hbase.stargate.model.CellSetModel;
import org.apache.hadoop.hbase.stargate.model.RowModel;
import org.apache.hadoop.hbase.util.Bytes;
public class RowResource implements Constants {
private static final Log LOG = LogFactory.getLog(RowResource.class);
String tableName;
String actualTableName;
RowSpec rowspec;
CacheControl cacheControl;
RESTServlet servlet;
public RowResource(User user, String table, String rowspec, String versions)
throws IOException {
if (user != null) {
this.actualTableName =
!user.isAdmin() ? user.getName() + "." + table : table;
} else {
this.actualTableName = table;
}
this.tableName = table;
this.rowspec = new RowSpec(URLDecoder.decode(rowspec,
HConstants.UTF8_ENCODING));
if (versions != null) {
this.rowspec.setMaxVersions(Integer.valueOf(versions));
}
this.servlet = RESTServlet.getInstance();
cacheControl = new CacheControl();
cacheControl.setMaxAge(servlet.getMaxAge(table));
cacheControl.setNoTransform(false);
}
@GET
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
public Response get(@Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("GET " + uriInfo.getAbsolutePath());
}
try {
ResultGenerator generator =
ResultGenerator.fromRowSpec(actualTableName, rowspec);
if (!generator.hasNext()) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
table = pool.getTable(this.table);
table.put(put);
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + put.toString());
}
table.flushCommits();
return Response.ok().build();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
} finally {
if (table != null) {
pool.putTable(table);
}
}
}
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
MIMETYPE_PROTOBUF})
public Response put(CellSetModel model, @Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + uriInfo.getAbsolutePath());
}
return update(model, true);
}
@PUT
@Consumes(MIMETYPE_BINARY)
public Response putBinary(byte[] message, @Context UriInfo uriInfo,
@Context HttpHeaders headers)
{
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
return updateBinary(message, headers, true);
}
@POST
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
MIMETYPE_PROTOBUF})
public Response post(CellSetModel model, @Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("POST " + uriInfo.getAbsolutePath());
}
return update(model, false);
}
@POST
@Consumes(MIMETYPE_BINARY)
public Response postBinary(byte[] message, @Context UriInfo uriInfo,
@Context HttpHeaders headers)
{
if (LOG.isDebugEnabled()) {
LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
return updateBinary(message, headers, false);
}
@DELETE
public Response delete(@Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("DELETE " + uriInfo.getAbsolutePath());
}
CellSetModel model = new CellSetModel();
KeyValue value = generator.next();
byte[] rowKey = value.getRow();
RowModel rowModel = new RowModel(rowKey);
do {
if (!Bytes.equals(value.getRow(), rowKey)) {
model.addRow(rowModel);
rowKey = value.getRow();
rowModel = new RowModel(rowKey);
}
rowModel.addCell(
new CellModel(value.getFamily(), value.getQualifier(),
value.getTimestamp(), value.getValue()));
value = generator.next();
} while (value != null);
model.addRow(rowModel);
ResponseBuilder response = Response.ok(model);
response.cacheControl(cacheControl);
return response.build();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(@Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
// doesn't make sense to use a non specific coordinate as this can only
// return a single cell
if (!rowspec.hasColumns() || rowspec.getColumns().length > 1) {
throw new WebApplicationException(Response.Status.BAD_REQUEST);
}
try {
ResultGenerator generator =
ResultGenerator.fromRowSpec(actualTableName, rowspec);
if (!generator.hasNext()) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
KeyValue value = generator.next();
ResponseBuilder response = Response.ok(value.getValue());
response.cacheControl(cacheControl);
response.header("X-Timestamp", value.getTimestamp());
return response.build();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}
private Response update(CellSetModel model, boolean replace) {
HTablePool pool = servlet.getTablePool();
HTableInterface table = null;
try {
table = pool.getTable(actualTableName);
for (RowModel row: model.getRows()) {
byte[] key = row.getKey();
Put put = new Put(key);
for (CellModel cell: row.getCells()) {
byte [][] parts = KeyValue.parseColumn(cell.getColumn());
put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
table.put(put);
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + put.toString());
}
}
table.flushCommits();
ResponseBuilder response = Response.ok();
return response.build();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
} finally {
if (table != null) {
pool.putTable(table);
}
}
}
private Response updateBinary(byte[] message, HttpHeaders headers,
boolean replace) {
HTablePool pool = servlet.getTablePool();
HTableInterface table = null;
try {
byte[] row = rowspec.getRow();
byte[][] columns = rowspec.getColumns();
byte[] column = null;
if (columns != null) {
column = columns[0];
}
long timestamp = HConstants.LATEST_TIMESTAMP;
List<String> vals = headers.getRequestHeader("X-Row");
if (vals != null && !vals.isEmpty()) {
row = Bytes.toBytes(vals.get(0));
}
vals = headers.getRequestHeader("X-Column");
if (vals != null && !vals.isEmpty()) {
column = Bytes.toBytes(vals.get(0));
}
vals = headers.getRequestHeader("X-Timestamp");
if (vals != null && !vals.isEmpty()) {
timestamp = Long.valueOf(vals.get(0));
}
if (column == null) {
throw new WebApplicationException(Response.Status.BAD_REQUEST);
}
Put put = new Put(row);
byte parts[][] = KeyValue.parseColumn(column);
put.add(parts[0], parts[1], timestamp, message);
table = pool.getTable(actualTableName);
table.put(put);
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + put.toString());
}
table.flushCommits();
return Response.ok().build();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
} finally {
if (table != null) {
pool.putTable(table);
}
}
}
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
public Response put(CellSetModel model, @Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + uriInfo.getAbsolutePath());
}
return update(model, true);
}
@PUT
@Consumes(MIMETYPE_BINARY)
public Response putBinary(byte[] message, @Context UriInfo uriInfo,
@Context HttpHeaders headers)
{
if (LOG.isDebugEnabled()) {
LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
return updateBinary(message, headers, true);
}
@POST
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
public Response post(CellSetModel model, @Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("POST " + uriInfo.getAbsolutePath());
}
return update(model, false);
}
@POST
@Consumes(MIMETYPE_BINARY)
public Response postBinary(byte[] message, @Context UriInfo uriInfo,
@Context HttpHeaders headers)
{
if (LOG.isDebugEnabled()) {
LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
}
return updateBinary(message, headers, false);
}
@DELETE
public Response delete(@Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("DELETE " + uriInfo.getAbsolutePath());
}
Delete delete = null;
if (rowspec.hasTimestamp())
delete = new Delete(rowspec.getRow(), rowspec.getTimestamp(), null);
else
delete = new Delete(rowspec.getRow());
for (byte[] column: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(column);
if (rowspec.hasTimestamp()) {
if (split.length == 2 && split[1].length != 0) {
delete.deleteColumns(split[0], split[1], rowspec.getTimestamp());
} else {
delete.deleteFamily(split[0], rowspec.getTimestamp());
}
} else {
if (split.length == 2 && split[1].length != 0) {
delete.deleteColumns(split[0], split[1]);
} else {
delete.deleteFamily(split[0]);
}
}
}
HTablePool pool;
try {
pool = RESTServlet.getInstance().getTablePool();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.INTERNAL_SERVER_ERROR);
}
HTableInterface table = null;
try {
table = pool.getTable(this.table);
table.delete(delete);
if (LOG.isDebugEnabled()) {
LOG.debug("DELETE " + delete.toString());
}
table.flushCommits();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
} finally {
if (table != null) {
pool.putTable(table);
}
}
return Response.ok().build();
}
}
for (byte[] column: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(column);
if (rowspec.hasTimestamp()) {
if (split.length == 2 && split[1].length != 0) {
delete.deleteColumns(split[0], split[1], rowspec.getTimestamp());
} else {
delete.deleteFamily(split[0], rowspec.getTimestamp());
}
} else {
if (split.length == 2 && split[1].length != 0) {
delete.deleteColumns(split[0], split[1]);
} else {
delete.deleteFamily(split[0]);
}
}
}
HTablePool pool = servlet.getTablePool();
HTableInterface table = null;
try {
table = pool.getTable(actualTableName);
table.delete(delete);
if (LOG.isDebugEnabled()) {
LOG.debug("DELETE " + delete.toString());
}
table.flushCommits();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
} finally {
if (table != null) {
pool.putTable(table);
}
}
return Response.ok().build();
}
}

View File

@ -36,23 +36,22 @@ public class RowResultGenerator extends ResultGenerator {
public RowResultGenerator(String tableName, RowSpec rowspec)
throws IllegalArgumentException, IOException {
HTablePool pool = RESTServlet.getInstance().getTablePool();
HTablePool pool = RESTServlet.getInstance().getTablePool();
HTableInterface table = pool.getTable(tableName);
try {
Get get = new Get(rowspec.getRow());
if (rowspec.hasColumns()) {
byte [][] columns = rowspec.getColumns();
for(byte [] column : columns) {
byte [][] famQf = KeyValue.parseColumn(column);
if(famQf.length == 1) {
get.addFamily(famQf[0]);
} else {
get.addColumn(famQf[0], famQf[1]);
}
}
for (byte[] col: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(col);
if (split.length == 2 && split[1].length != 0) {
get.addColumn(split[0], split[1]);
} else {
get.addFamily(split[0]);
}
}
} else {
// rowspec does not explicitly specify columns, return them all
for (HColumnDescriptor family:
for (HColumnDescriptor family:
table.getTableDescriptor().getFamilies()) {
get.addFamily(family.getName());
}

View File

@ -98,7 +98,9 @@ public class ScannerInstanceResource implements Constants {
rowKey = value.getRow();
rowModel = new RowModel(rowKey);
}
rowModel.addCell(new CellModel(value));
rowModel.addCell(
new CellModel(value.getFamily(), value.getQualifier(),
value.getTimestamp(), value.getValue()));
} while (--count > 0);
model.addRow(rowModel);
ResponseBuilder response = Response.ok(model);
@ -119,12 +121,12 @@ public class ScannerInstanceResource implements Constants {
LOG.info("generator exhausted");
return Response.noContent().build();
}
byte [] column = KeyValue.makeColumn(value.getFamily(),
value.getQualifier());
ResponseBuilder response = Response.ok(value.getValue());
response.cacheControl(cacheControl);
response.header("X-Row", Base64.encode(value.getRow()));
response.header("X-Column", Base64.encode(column));
response.header("X-Row", Base64.encode(value.getRow()));
response.header("X-Column",
Base64.encode(
KeyValue.makeColumn(value.getFamily(), value.getQualifier())));
response.header("X-Timestamp", value.getTimestamp());
return response.build();
} catch (IllegalStateException e) {

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.util.StringUtils;
public class ScannerResultGenerator extends ResultGenerator {
private static final Log LOG =
LogFactory.getLog(ScannerResultGenerator.class);
private String id;
private Iterator<KeyValue> rowI;
private ResultScanner scanner;
@ -46,7 +46,7 @@ public class ScannerResultGenerator extends ResultGenerator {
public ScannerResultGenerator(String tableName, RowSpec rowspec)
throws IllegalArgumentException, IOException {
HTablePool pool = RESTServlet.getInstance().getTablePool();
HTablePool pool = RESTServlet.getInstance().getTablePool();
HTableInterface table = pool.getTable(tableName);
try {
Scan scan;
@ -66,12 +66,12 @@ public class ScannerResultGenerator extends ResultGenerator {
}
}
} else {
for (HColumnDescriptor family:
for (HColumnDescriptor family:
table.getTableDescriptor().getFamilies()) {
scan.addFamily(family.getName());
}
}
scan.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
scan.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
scan.setMaxVersions(rowspec.getMaxVersions());
scanner = table.getScanner(scan);
cached = null;

View File

@ -55,20 +55,31 @@ import org.apache.hadoop.hbase.util.Bytes;
public class SchemaResource implements Constants {
private static final Log LOG = LogFactory.getLog(SchemaResource.class);
private String table;
private CacheControl cacheControl;
User user;
String tableName;
String actualTableName;
CacheControl cacheControl;
RESTServlet servlet;
public SchemaResource(String table) {
this.table = table;
public SchemaResource(User user, String table) throws IOException {
if (user != null) {
this.user = user;
this.actualTableName =
!user.isAdmin() ? (user.getName() + "." + table) : table;
} else {
this.actualTableName = table;
}
this.tableName = table;
cacheControl = new CacheControl();
cacheControl.setNoCache(true);
cacheControl.setNoTransform(false);
servlet = RESTServlet.getInstance();
}
private HTableDescriptor getTableSchema() throws IOException,
TableNotFoundException {
HTablePool pool = RESTServlet.getInstance().getTablePool();
HTableInterface table = pool.getTable(this.table);
HTablePool pool = servlet.getTablePool();
HTableInterface table = pool.getTable(actualTableName);
try {
return table.getTableDescriptor();
} finally {
@ -88,7 +99,7 @@ public class SchemaResource implements Constants {
model.setName(tableName);
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
htd.getValues().entrySet()) {
model.addAttribute(Bytes.toString(e.getKey().get()),
model.addAttribute(Bytes.toString(e.getKey().get()),
Bytes.toString(e.getValue().get()));
}
for (HColumnDescriptor hcd: htd.getFamilies()) {
@ -96,7 +107,7 @@ public class SchemaResource implements Constants {
columnModel.setName(hcd.getNameAsString());
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
hcd.getValues().entrySet()) {
columnModel.addAttribute(Bytes.toString(e.getKey().get()),
columnModel.addAttribute(Bytes.toString(e.getKey().get()),
Bytes.toString(e.getValue().get()));
}
model.addColumnFamily(columnModel);
@ -138,10 +149,10 @@ public class SchemaResource implements Constants {
}
return Response.created(uriInfo.getAbsolutePath()).build();
} catch (IOException e) {
throw new WebApplicationException(e,
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}
}
}
private Response update(byte[] tableName, TableSchemaModel model,
UriInfo uriInfo, HBaseAdmin admin) {
@ -157,11 +168,11 @@ public class SchemaResource implements Constants {
if (htd.hasFamily(hcd.getName())) {
admin.modifyColumn(tableName, hcd.getName(), hcd);
} else {
admin.addColumn(model.getName(), hcd);
admin.addColumn(model.getName(), hcd);
}
}
} catch (IOException e) {
throw new WebApplicationException(e,
throw new WebApplicationException(e,
Response.Status.INTERNAL_SERVER_ERROR);
} finally {
admin.enableTable(tableName);
@ -185,7 +196,7 @@ public class SchemaResource implements Constants {
return update(tableName, model, uriInfo, admin);
}
} catch (IOException e) {
throw new WebApplicationException(e,
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}
@ -221,7 +232,7 @@ public class SchemaResource implements Constants {
}
@DELETE
public Response delete(@Context UriInfo uriInfo) {
public Response delete(@Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("DELETE " + uriInfo.getAbsolutePath());
}
@ -233,7 +244,7 @@ public class SchemaResource implements Constants {
} catch (TableNotFoundException e) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
} catch (IOException e) {
throw new WebApplicationException(e,
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
}

View File

@ -34,7 +34,6 @@ import javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.stargate.model.StorageClusterVersionModel;

View File

@ -22,95 +22,46 @@ package org.apache.hadoop.hbase.stargate;
import java.io.IOException;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.stargate.model.TableListModel;
import org.apache.hadoop.hbase.stargate.model.TableModel;
import org.apache.hadoop.hbase.stargate.auth.User;
@Path("/")
public class TableResource implements Constants {
private static final Log LOG = LogFactory.getLog(TableResource.class);
private CacheControl cacheControl;
User user;
String table;
public TableResource() {
cacheControl = new CacheControl();
cacheControl.setNoCache(true);
cacheControl.setNoTransform(false);
public TableResource(User user, String table) {
this.user = user;
this.table = table;
}
private HTableDescriptor[] getTableList() throws IOException {
HBaseAdmin admin =
new HBaseAdmin(RESTServlet.getInstance().getConfiguration());
HTableDescriptor[] list = admin.listTables();
if (LOG.isDebugEnabled()) {
LOG.debug("getTableList:");
for (HTableDescriptor htd: list) {
LOG.debug(htd.toString());
}
}
return list;
@Path("regions")
public RegionsResource getRegionsResource() throws IOException {
return new RegionsResource(user, table);
}
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
MIMETYPE_PROTOBUF})
public Response get(@Context UriInfo uriInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("GET " + uriInfo.getAbsolutePath());
}
try {
TableListModel tableList = new TableListModel();
for (HTableDescriptor htd: getTableList()) {
if (htd.isMetaRegion()) {
continue;
}
tableList.add(new TableModel(htd.getNameAsString()));
}
ResponseBuilder response = Response.ok(tableList);
response.cacheControl(cacheControl);
return response.build();
} catch (IOException e) {
throw new WebApplicationException(e,
Response.Status.SERVICE_UNAVAILABLE);
}
@Path("scanner")
public ScannerResource getScannerResource() {
return new ScannerResource(user, table);
}
@Path("{table}/scanner")
public ScannerResource getScannerResource(
@PathParam("table") String table) {
return new ScannerResource(table);
@Path("schema")
public SchemaResource getSchemaResource() throws IOException {
return new SchemaResource(user, table);
}
@Path("{table}/schema")
public SchemaResource getSchemaResource(
@PathParam("table") String table) {
return new SchemaResource(table);
}
@Path("{table}/{rowspec: .+}")
public RowResource getRowResource(
@PathParam("table") String table,
@PathParam("rowspec") String rowspec,
@Path("{rowspec: .+}")
public RowResource getRowResource(@PathParam("rowspec") String rowspec,
@QueryParam("v") String versions) {
try {
return new RowResource(table, rowspec, versions);
return new RowResource(user, table, rowspec, versions);
} catch (IOException e) {
throw new WebApplicationException(e,
throw new WebApplicationException(e,
Response.Status.INTERNAL_SERVER_ERROR);
}
}

View File

@ -1,23 +1,24 @@
package org.apache.hadoop.hbase.stargate.auth;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
public class HBCAuthenticator extends Authenticator {
HBaseConfiguration conf;
Configuration conf;
/**
* Default constructor
*/
public HBCAuthenticator() {
this(new HBaseConfiguration());
this(HBaseConfiguration.create());
}
/**
* Constructor
* @param conf
*/
public HBCAuthenticator(HBaseConfiguration conf) {
public HBCAuthenticator(Configuration conf) {
this.conf = conf;
}

View File

@ -2,6 +2,7 @@ package org.apache.hadoop.hbase.stargate.auth;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
@ -15,7 +16,7 @@ public class HTableAuthenticator extends Authenticator {
static final byte[] ADMIN = Bytes.toBytes("admin");
static final byte[] DISABLED = Bytes.toBytes("disabled");
HBaseConfiguration conf;
Configuration conf;
String tableName;
HTable table;
@ -23,14 +24,14 @@ public class HTableAuthenticator extends Authenticator {
* Default constructor
*/
public HTableAuthenticator() {
this(new HBaseConfiguration());
this(HBaseConfiguration.create());
}
/**
* Constructor
* @param conf
*/
public HTableAuthenticator(HBaseConfiguration conf) {
public HTableAuthenticator(Configuration conf) {
this.conf = conf;
this.tableName = conf.get("stargate.auth.htable.name",
"stargate.users");
@ -41,7 +42,7 @@ public class HTableAuthenticator extends Authenticator {
* @param conf
* @param tableName
*/
public HTableAuthenticator(HBaseConfiguration conf, String tableName) {
public HTableAuthenticator(Configuration conf, String tableName) {
this.conf = conf;
this.tableName = tableName;
}
@ -51,7 +52,7 @@ public class HTableAuthenticator extends Authenticator {
* @param conf
* @param table
*/
public HTableAuthenticator(HBaseConfiguration conf, HTable table) {
public HTableAuthenticator(Configuration conf, HTable table) {
this.conf = conf;
this.table = table;
this.tableName = Bytes.toString(table.getTableName());

View File

@ -77,13 +77,22 @@ public class CellModel implements ProtobufMessageHandler, Serializable {
this(column, HConstants.LATEST_TIMESTAMP, value);
}
/**
* Constructor
* @param column
* @param qualifier
* @param value
*/
public CellModel(byte[] column, byte[] qualifier, byte[] value) {
this(column, qualifier, HConstants.LATEST_TIMESTAMP, value);
}
/**
* Constructor from KeyValue
* @param kv
*/
public CellModel(KeyValue kv) {
this(KeyValue.makeColumn(kv.getFamily(), kv.getQualifier()),
kv.getTimestamp(), kv.getValue());
this(kv.getFamily(), kv.getQualifier(), kv.getTimestamp(), kv.getValue());
}
/**
@ -97,6 +106,20 @@ public class CellModel implements ProtobufMessageHandler, Serializable {
this.timestamp = timestamp;
this.value = value;
}
/**
* Constructor
* @param column
* @param qualifier
* @param timestamp
* @param value
*/
public CellModel(byte[] column, byte[] qualifier, long timestamp,
byte[] value) {
this.column = KeyValue.makeColumn(column, qualifier);
this.timestamp = timestamp;
this.value = value;
}
/**
* @return the column

View File

@ -25,6 +25,7 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
@ -59,7 +60,7 @@ public class MiniClusterTestCase extends TestCase {
// use a nonstandard port
public static final int DEFAULT_TEST_PORT = 38080;
protected static HBaseConfiguration conf = new HBaseConfiguration();
protected static Configuration conf = HBaseConfiguration.create();
protected static MiniZooKeeperCluster zooKeeperCluster;
protected static MiniHBaseCluster hbaseCluster;
protected static MiniDFSCluster dfsCluster;

View File

@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.stargate.auth;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import junit.framework.TestCase;
@ -34,10 +35,10 @@ public class TestHBCAuthenticator extends TestCase {
static final String DISABLED_TOKEN = "17de5b5db0fd3de0847bd95396f36d92";
static final String DISABLED_USERNAME = "disabledUser";
static Configuration conf;
static HBCAuthenticator authenticator;
static HBaseConfiguration conf;
static {
conf = new HBaseConfiguration();
conf = HBaseConfiguration.create();
conf.set("stargate.auth.token." + USER_TOKEN, USER_USERNAME);
conf.set("stargate.auth.user." + USER_USERNAME + ".admin", "false");
conf.set("stargate.auth.user." + USER_USERNAME + ".disabled", "false");