HBASE-3120 [rest] Content transcoding
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1029899 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
81d0a68a2c
commit
a2de4f359a
|
@ -1082,7 +1082,7 @@ Release 0.21.0 - Unreleased
|
||||||
HBASE-3154 HBase RPC should support timeout (Hairong via jgray)
|
HBASE-3154 HBase RPC should support timeout (Hairong via jgray)
|
||||||
HBASE-3184 Xmx setting in pom to use for tests/surefire does not appear
|
HBASE-3184 Xmx setting in pom to use for tests/surefire does not appear
|
||||||
to work
|
to work
|
||||||
|
HBASE-3120 [rest] Content transcoding
|
||||||
|
|
||||||
NEW FEATURES
|
NEW FEATURES
|
||||||
HBASE-1961 HBase EC2 scripts
|
HBASE-1961 HBase EC2 scripts
|
||||||
|
|
|
@ -31,8 +31,6 @@ import javax.ws.rs.core.Response;
|
||||||
import javax.ws.rs.core.UriInfo;
|
import javax.ws.rs.core.UriInfo;
|
||||||
import javax.ws.rs.core.Response.ResponseBuilder;
|
import javax.ws.rs.core.Response.ResponseBuilder;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
|
||||||
|
|
||||||
public class ExistsResource extends ResourceBase {
|
public class ExistsResource extends ResourceBase {
|
||||||
|
|
||||||
static CacheControl cacheControl;
|
static CacheControl cacheControl;
|
||||||
|
@ -42,16 +40,16 @@ public class ExistsResource extends ResourceBase {
|
||||||
cacheControl.setNoTransform(false);
|
cacheControl.setNoTransform(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
String tableName;
|
TableResource tableResource;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
* @param table
|
* @param tableResource
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public ExistsResource(String table) throws IOException {
|
public ExistsResource(TableResource tableResource) throws IOException {
|
||||||
super();
|
super();
|
||||||
this.tableName = table;
|
this.tableResource = tableResource;
|
||||||
}
|
}
|
||||||
|
|
||||||
@GET
|
@GET
|
||||||
|
@ -59,8 +57,7 @@ public class ExistsResource extends ResourceBase {
|
||||||
MIMETYPE_BINARY})
|
MIMETYPE_BINARY})
|
||||||
public Response get(final @Context UriInfo uriInfo) {
|
public Response get(final @Context UriInfo uriInfo) {
|
||||||
try {
|
try {
|
||||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
if (!tableResource.exists()) {
|
||||||
if (!admin.tableExists(tableName)) {
|
|
||||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
|
|
@ -55,17 +55,22 @@ public class RegionsResource extends ResourceBase {
|
||||||
cacheControl.setNoTransform(false);
|
cacheControl.setNoTransform(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
String tableName;
|
TableResource tableResource;
|
||||||
|
|
||||||
public RegionsResource(String table) throws IOException {
|
/**
|
||||||
|
* Constructor
|
||||||
|
* @param tableResource
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public RegionsResource(TableResource tableResource) throws IOException {
|
||||||
super();
|
super();
|
||||||
this.tableName = table;
|
this.tableResource = tableResource;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<HRegionInfo,HServerAddress> getTableRegions()
|
private Map<HRegionInfo,HServerAddress> getTableRegions()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
HTablePool pool = servlet.getTablePool();
|
HTablePool pool = servlet.getTablePool();
|
||||||
HTableInterface table = pool.getTable(tableName);
|
HTableInterface table = pool.getTable(tableResource.getName());
|
||||||
try {
|
try {
|
||||||
return ((HTable)table).getRegionsInfo();
|
return ((HTable)table).getRegionsInfo();
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -81,6 +86,7 @@ public class RegionsResource extends ResourceBase {
|
||||||
}
|
}
|
||||||
servlet.getMetrics().incrementRequests(1);
|
servlet.getMetrics().incrementRequests(1);
|
||||||
try {
|
try {
|
||||||
|
String tableName = tableResource.getName();
|
||||||
TableInfoModel model = new TableInfoModel(tableName);
|
TableInfoModel model = new TableInfoModel(tableName);
|
||||||
Map<HRegionInfo,HServerAddress> regions = getTableRegions();
|
Map<HRegionInfo,HServerAddress> regions = getTableRegions();
|
||||||
for (Map.Entry<HRegionInfo,HServerAddress> e: regions.entrySet()) {
|
for (Map.Entry<HRegionInfo,HServerAddress> e: regions.entrySet()) {
|
||||||
|
|
|
@ -48,30 +48,27 @@ import org.apache.hadoop.hbase.client.Put;
|
||||||
import org.apache.hadoop.hbase.rest.model.CellModel;
|
import org.apache.hadoop.hbase.rest.model.CellModel;
|
||||||
import org.apache.hadoop.hbase.rest.model.CellSetModel;
|
import org.apache.hadoop.hbase.rest.model.CellSetModel;
|
||||||
import org.apache.hadoop.hbase.rest.model.RowModel;
|
import org.apache.hadoop.hbase.rest.model.RowModel;
|
||||||
|
import org.apache.hadoop.hbase.rest.transform.Transform;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
public class RowResource extends ResourceBase {
|
public class RowResource extends ResourceBase {
|
||||||
private static final Log LOG = LogFactory.getLog(RowResource.class);
|
private static final Log LOG = LogFactory.getLog(RowResource.class);
|
||||||
|
|
||||||
String tableName;
|
TableResource tableResource;
|
||||||
RowSpec rowspec;
|
RowSpec rowspec;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
* @param table
|
* @param tableResource
|
||||||
* @param rowspec
|
* @param rowspec
|
||||||
* @param versions
|
* @param versions
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public RowResource(String table, String rowspec, String versions)
|
public RowResource(TableResource tableResource, String rowspec,
|
||||||
throws IOException {
|
String versions) throws IOException {
|
||||||
super();
|
super();
|
||||||
this.tableName = table;
|
this.tableResource = tableResource;
|
||||||
this.rowspec = new RowSpec(rowspec);
|
this.rowspec = new RowSpec(rowspec);
|
||||||
if (LOG.isDebugEnabled()) {
|
|
||||||
LOG.debug("new RowResource: table=" + this.tableName + "rowspec=" +
|
|
||||||
this.rowspec);
|
|
||||||
}
|
|
||||||
if (versions != null) {
|
if (versions != null) {
|
||||||
this.rowspec.setMaxVersions(Integer.valueOf(versions));
|
this.rowspec.setMaxVersions(Integer.valueOf(versions));
|
||||||
}
|
}
|
||||||
|
@ -86,7 +83,7 @@ public class RowResource extends ResourceBase {
|
||||||
servlet.getMetrics().incrementRequests(1);
|
servlet.getMetrics().incrementRequests(1);
|
||||||
try {
|
try {
|
||||||
ResultGenerator generator =
|
ResultGenerator generator =
|
||||||
ResultGenerator.fromRowSpec(tableName, rowspec, null);
|
ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null);
|
||||||
if (!generator.hasNext()) {
|
if (!generator.hasNext()) {
|
||||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||||
}
|
}
|
||||||
|
@ -101,9 +98,12 @@ public class RowResource extends ResourceBase {
|
||||||
rowKey = value.getRow();
|
rowKey = value.getRow();
|
||||||
rowModel = new RowModel(rowKey);
|
rowModel = new RowModel(rowKey);
|
||||||
}
|
}
|
||||||
rowModel.addCell(
|
byte[] family = value.getFamily();
|
||||||
new CellModel(value.getFamily(), value.getQualifier(),
|
byte[] qualifier = value.getQualifier();
|
||||||
value.getTimestamp(), value.getValue()));
|
byte[] data = tableResource.transform(family, qualifier,
|
||||||
|
value.getValue(), Transform.Direction.OUT);
|
||||||
|
rowModel.addCell(new CellModel(family, qualifier,
|
||||||
|
value.getTimestamp(), data));
|
||||||
if (++count > rowspec.getMaxValues()) {
|
if (++count > rowspec.getMaxValues()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -131,12 +131,16 @@ public class RowResource extends ResourceBase {
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
ResultGenerator generator =
|
ResultGenerator generator =
|
||||||
ResultGenerator.fromRowSpec(tableName, rowspec, null);
|
ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null);
|
||||||
if (!generator.hasNext()) {
|
if (!generator.hasNext()) {
|
||||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||||
}
|
}
|
||||||
KeyValue value = generator.next();
|
KeyValue value = generator.next();
|
||||||
ResponseBuilder response = Response.ok(value.getValue());
|
byte[] family = value.getFamily();
|
||||||
|
byte[] qualifier = value.getQualifier();
|
||||||
|
byte[] data = tableResource.transform(family, qualifier,
|
||||||
|
value.getValue(), Transform.Direction.OUT);
|
||||||
|
ResponseBuilder response = Response.ok(data);
|
||||||
response.header("X-Timestamp", value.getTimestamp());
|
response.header("X-Timestamp", value.getTimestamp());
|
||||||
return response.build();
|
return response.build();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -151,7 +155,7 @@ public class RowResource extends ResourceBase {
|
||||||
HTableInterface table = null;
|
HTableInterface table = null;
|
||||||
try {
|
try {
|
||||||
List<RowModel> rows = model.getRows();
|
List<RowModel> rows = model.getRows();
|
||||||
table = pool.getTable(tableName);
|
table = pool.getTable(tableResource.getName());
|
||||||
((HTable)table).setAutoFlush(false);
|
((HTable)table).setAutoFlush(false);
|
||||||
for (RowModel row: rows) {
|
for (RowModel row: rows) {
|
||||||
byte[] key = row.getKey();
|
byte[] key = row.getKey();
|
||||||
|
@ -159,9 +163,13 @@ public class RowResource extends ResourceBase {
|
||||||
for (CellModel cell: row.getCells()) {
|
for (CellModel cell: row.getCells()) {
|
||||||
byte [][] parts = KeyValue.parseColumn(cell.getColumn());
|
byte [][] parts = KeyValue.parseColumn(cell.getColumn());
|
||||||
if (parts.length == 2 && parts[1].length > 0) {
|
if (parts.length == 2 && parts[1].length > 0) {
|
||||||
put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
|
put.add(parts[0], parts[1], cell.getTimestamp(),
|
||||||
|
tableResource.transform(parts[0], parts[1], cell.getValue(),
|
||||||
|
Transform.Direction.IN));
|
||||||
} else {
|
} else {
|
||||||
put.add(parts[0], null, cell.getTimestamp(), cell.getValue());
|
put.add(parts[0], null, cell.getTimestamp(),
|
||||||
|
tableResource.transform(parts[0], null, cell.getValue(),
|
||||||
|
Transform.Direction.IN));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
table.put(put);
|
table.put(put);
|
||||||
|
@ -215,11 +223,15 @@ public class RowResource extends ResourceBase {
|
||||||
Put put = new Put(row);
|
Put put = new Put(row);
|
||||||
byte parts[][] = KeyValue.parseColumn(column);
|
byte parts[][] = KeyValue.parseColumn(column);
|
||||||
if (parts.length == 2 && parts[1].length > 0) {
|
if (parts.length == 2 && parts[1].length > 0) {
|
||||||
put.add(parts[0], parts[1], timestamp, message);
|
put.add(parts[0], parts[1], timestamp,
|
||||||
|
tableResource.transform(parts[0], parts[1], message,
|
||||||
|
Transform.Direction.IN));
|
||||||
} else {
|
} else {
|
||||||
put.add(parts[0], null, timestamp, message);
|
put.add(parts[0], null, timestamp,
|
||||||
|
tableResource.transform(parts[0], null, message,
|
||||||
|
Transform.Direction.IN));
|
||||||
}
|
}
|
||||||
table = pool.getTable(tableName);
|
table = pool.getTable(tableResource.getName());
|
||||||
table.put(put);
|
table.put(put);
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG.debug("PUT " + put.toString());
|
LOG.debug("PUT " + put.toString());
|
||||||
|
@ -306,7 +318,7 @@ public class RowResource extends ResourceBase {
|
||||||
HTablePool pool = servlet.getTablePool();
|
HTablePool pool = servlet.getTablePool();
|
||||||
HTableInterface table = null;
|
HTableInterface table = null;
|
||||||
try {
|
try {
|
||||||
table = pool.getTable(tableName);
|
table = pool.getTable(tableResource.getName());
|
||||||
table.delete(delete);
|
table.delete(delete);
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG.debug("DELETE " + delete.toString());
|
LOG.debug("DELETE " + delete.toString());
|
||||||
|
|
|
@ -51,16 +51,16 @@ public class ScannerResource extends ResourceBase {
|
||||||
static final Map<String,ScannerInstanceResource> scanners =
|
static final Map<String,ScannerInstanceResource> scanners =
|
||||||
Collections.synchronizedMap(new HashMap<String,ScannerInstanceResource>());
|
Collections.synchronizedMap(new HashMap<String,ScannerInstanceResource>());
|
||||||
|
|
||||||
String tableName;
|
TableResource tableResource;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
* @param table
|
* @param tableResource
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public ScannerResource(String table) throws IOException {
|
public ScannerResource(TableResource tableResource)throws IOException {
|
||||||
super();
|
super();
|
||||||
this.tableName = table;
|
this.tableResource = tableResource;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void delete(final String id) {
|
static void delete(final String id) {
|
||||||
|
@ -78,6 +78,7 @@ public class ScannerResource extends ResourceBase {
|
||||||
model.getColumns(), model.getStartTime(), model.getEndTime(), 1);
|
model.getColumns(), model.getStartTime(), model.getEndTime(), 1);
|
||||||
try {
|
try {
|
||||||
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
|
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
|
||||||
|
String tableName = tableResource.getName();
|
||||||
ScannerResultGenerator gen =
|
ScannerResultGenerator gen =
|
||||||
new ScannerResultGenerator(tableName, spec, filter);
|
new ScannerResultGenerator(tableName, spec, filter);
|
||||||
String id = gen.getID();
|
String id = gen.getID();
|
||||||
|
|
|
@ -62,22 +62,22 @@ public class SchemaResource extends ResourceBase {
|
||||||
cacheControl.setNoTransform(false);
|
cacheControl.setNoTransform(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
String tableName;
|
TableResource tableResource;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
* @param table
|
* @param tableResource
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public SchemaResource(String table) throws IOException {
|
public SchemaResource(TableResource tableResource) throws IOException {
|
||||||
super();
|
super();
|
||||||
this.tableName = table;
|
this.tableResource = tableResource;
|
||||||
}
|
}
|
||||||
|
|
||||||
private HTableDescriptor getTableSchema() throws IOException,
|
private HTableDescriptor getTableSchema() throws IOException,
|
||||||
TableNotFoundException {
|
TableNotFoundException {
|
||||||
HTablePool pool = servlet.getTablePool();
|
HTablePool pool = servlet.getTablePool();
|
||||||
HTableInterface table = pool.getTable(tableName);
|
HTableInterface table = pool.getTable(tableResource.getName());
|
||||||
try {
|
try {
|
||||||
return table.getTableDescriptor();
|
return table.getTableDescriptor();
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -157,7 +157,7 @@ public class SchemaResource extends ResourceBase {
|
||||||
throw new WebApplicationException(e,
|
throw new WebApplicationException(e,
|
||||||
Response.Status.INTERNAL_SERVER_ERROR);
|
Response.Status.INTERNAL_SERVER_ERROR);
|
||||||
} finally {
|
} finally {
|
||||||
admin.enableTable(tableName);
|
admin.enableTable(tableResource.getName());
|
||||||
}
|
}
|
||||||
return Response.ok().build();
|
return Response.ok().build();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -169,7 +169,7 @@ public class SchemaResource extends ResourceBase {
|
||||||
private Response update(final TableSchemaModel model, final boolean replace,
|
private Response update(final TableSchemaModel model, final boolean replace,
|
||||||
final UriInfo uriInfo) {
|
final UriInfo uriInfo) {
|
||||||
try {
|
try {
|
||||||
byte[] name = Bytes.toBytes(tableName);
|
byte[] name = Bytes.toBytes(tableResource.getName());
|
||||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||||
if (replace || !admin.tableExists(name)) {
|
if (replace || !admin.tableExists(name)) {
|
||||||
return replace(name, model, uriInfo, admin);
|
return replace(name, model, uriInfo, admin);
|
||||||
|
@ -214,7 +214,7 @@ public class SchemaResource extends ResourceBase {
|
||||||
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
for (int i = 0; i < 10; i++) try {
|
for (int i = 0; i < 10; i++) try {
|
||||||
admin.disableTable(tableName);
|
admin.disableTable(tableResource.getName());
|
||||||
success = true;
|
success = true;
|
||||||
break;
|
break;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -222,7 +222,7 @@ public class SchemaResource extends ResourceBase {
|
||||||
if (!success) {
|
if (!success) {
|
||||||
throw new IOException("could not disable table");
|
throw new IOException("could not disable table");
|
||||||
}
|
}
|
||||||
admin.deleteTable(tableName);
|
admin.deleteTable(tableResource.getName());
|
||||||
return Response.ok().build();
|
return Response.ok().build();
|
||||||
} catch (TableNotFoundException e) {
|
} catch (TableNotFoundException e) {
|
||||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||||
|
|
|
@ -21,16 +21,171 @@
|
||||||
package org.apache.hadoop.hbase.rest;
|
package org.apache.hadoop.hbase.rest;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.ConcurrentSkipListMap;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import javax.ws.rs.Encoded;
|
import javax.ws.rs.Encoded;
|
||||||
import javax.ws.rs.Path;
|
import javax.ws.rs.Path;
|
||||||
import javax.ws.rs.PathParam;
|
import javax.ws.rs.PathParam;
|
||||||
import javax.ws.rs.QueryParam;
|
import javax.ws.rs.QueryParam;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
|
import org.apache.hadoop.hbase.rest.transform.NullTransform;
|
||||||
|
import org.apache.hadoop.hbase.rest.transform.Transform;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
public class TableResource extends ResourceBase {
|
public class TableResource extends ResourceBase {
|
||||||
|
private static final Log LOG = LogFactory.getLog(TableResource.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HCD attributes starting with this string are considered transform
|
||||||
|
* directives
|
||||||
|
*/
|
||||||
|
private static final String DIRECTIVE_KEY = "Transform$";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform directives are of the form <tt><qualifier>:<class></tt>
|
||||||
|
* where <tt>qualifier</tt> is a string for exact matching or '*' as a wildcard
|
||||||
|
* that will match anything; and <tt>class</tt> is either the fully qualified
|
||||||
|
* class name of a transform implementation or can be the short name of a
|
||||||
|
* transform in the <tt>org.apache.hadoop.hbase.rest.transform package</tt>.
|
||||||
|
*/
|
||||||
|
private static final Pattern DIRECTIVE_PATTERN =
|
||||||
|
Pattern.compile("([^\\:]+)\\:([^\\,]+)\\,?");
|
||||||
|
private static final Transform defaultTransform = new NullTransform();
|
||||||
|
private static final
|
||||||
|
Map<String,Map<byte[],Map<byte[],Transform>>> transformMap =
|
||||||
|
new ConcurrentHashMap<String,Map<byte[],Map<byte[],Transform>>>();
|
||||||
|
private static final Map<String,Long> lastCheckedMap =
|
||||||
|
new ConcurrentHashMap<String,Long>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param table the table
|
||||||
|
* @param family the column family
|
||||||
|
* @param qualifier the column qualifier, or null
|
||||||
|
* @return the transformation specified for the given family or qualifier, if
|
||||||
|
* any, otherwise the default
|
||||||
|
*/
|
||||||
|
static Transform getTransform(String table, byte[] family, byte[] qualifier) {
|
||||||
|
if (qualifier == null) {
|
||||||
|
qualifier = HConstants.EMPTY_BYTE_ARRAY;
|
||||||
|
}
|
||||||
|
Map<byte[],Map<byte[],Transform>> familyMap = transformMap.get(table);
|
||||||
|
if (familyMap != null) {
|
||||||
|
Map<byte[],Transform> columnMap = familyMap.get(family);
|
||||||
|
if (columnMap != null) {
|
||||||
|
Transform t = columnMap.get(qualifier);
|
||||||
|
// check as necessary if there is a wildcard entry
|
||||||
|
if (t == null) {
|
||||||
|
t = columnMap.get(HConstants.EMPTY_BYTE_ARRAY);
|
||||||
|
}
|
||||||
|
// if we found something, return it, otherwise we will return the
|
||||||
|
// default by falling through
|
||||||
|
if (t != null) {
|
||||||
|
return t;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return defaultTransform;
|
||||||
|
}
|
||||||
|
|
||||||
|
synchronized static void setTransform(String table, byte[] family,
|
||||||
|
byte[] qualifier, Transform transform) {
|
||||||
|
Map<byte[],Map<byte[],Transform>> familyMap = transformMap.get(table);
|
||||||
|
if (familyMap == null) {
|
||||||
|
familyMap = new ConcurrentSkipListMap<byte[],Map<byte[],Transform>>(
|
||||||
|
Bytes.BYTES_COMPARATOR);
|
||||||
|
transformMap.put(table, familyMap);
|
||||||
|
}
|
||||||
|
Map<byte[],Transform> columnMap = familyMap.get(family);
|
||||||
|
if (columnMap == null) {
|
||||||
|
columnMap = new ConcurrentSkipListMap<byte[],Transform>(
|
||||||
|
Bytes.BYTES_COMPARATOR);
|
||||||
|
familyMap.put(family, columnMap);
|
||||||
|
}
|
||||||
|
// if transform is null, remove any existing entry
|
||||||
|
if (transform != null) {
|
||||||
|
columnMap.put(qualifier, transform);
|
||||||
|
} else {
|
||||||
|
columnMap.remove(qualifier);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
String table;
|
String table;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan the table schema for transform directives. These are column family
|
||||||
|
* attributes containing a comma-separated list of elements of the form
|
||||||
|
* <tt><qualifier>:<transform-class></tt>, where qualifier
|
||||||
|
* can be a string for exact matching or '*' as a wildcard to match anything.
|
||||||
|
* The attribute key must begin with the string "Transform$".
|
||||||
|
*/
|
||||||
|
void scanTransformAttrs() throws IOException {
|
||||||
|
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||||
|
HTableDescriptor htd = admin.getTableDescriptor(Bytes.toBytes(table));
|
||||||
|
for (HColumnDescriptor hcd: htd.getFamilies()) {
|
||||||
|
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
|
||||||
|
hcd.getValues().entrySet()) {
|
||||||
|
// does the key start with the transform directive tag?
|
||||||
|
String key = Bytes.toString(e.getKey().get());
|
||||||
|
if (!key.startsWith(DIRECTIVE_KEY)) {
|
||||||
|
// no, skip
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// match a comma separated list of one or more directives
|
||||||
|
byte[] value = e.getValue().get();
|
||||||
|
Matcher m = DIRECTIVE_PATTERN.matcher(Bytes.toString(value));
|
||||||
|
while (m.find()) {
|
||||||
|
byte[] qualifier = HConstants.EMPTY_BYTE_ARRAY;
|
||||||
|
String s = m.group(1);
|
||||||
|
if (s.length() > 0 && !s.equals("*")) {
|
||||||
|
qualifier = Bytes.toBytes(s);
|
||||||
|
}
|
||||||
|
boolean retry = false;
|
||||||
|
String className = m.group(2);
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
// if a transform was previously configured for the qualifier,
|
||||||
|
// this will simply replace it
|
||||||
|
setTransform(table, hcd.getName(), qualifier,
|
||||||
|
(Transform)Class.forName(className).newInstance());
|
||||||
|
break;
|
||||||
|
} catch (InstantiationException ex) {
|
||||||
|
LOG.error(StringUtils.stringifyException(ex));
|
||||||
|
if (retry) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
retry = true;
|
||||||
|
} catch (IllegalAccessException ex) {
|
||||||
|
LOG.error(StringUtils.stringifyException(ex));
|
||||||
|
if (retry) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
retry = true;
|
||||||
|
} catch (ClassNotFoundException ex) {
|
||||||
|
if (retry) {
|
||||||
|
LOG.error(StringUtils.stringifyException(ex));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
className = "org.apache.hadoop.hbase.rest.transform." + className;
|
||||||
|
retry = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
* @param table
|
* @param table
|
||||||
|
@ -39,26 +194,74 @@ public class TableResource extends ResourceBase {
|
||||||
public TableResource(String table) throws IOException {
|
public TableResource(String table) throws IOException {
|
||||||
super();
|
super();
|
||||||
this.table = table;
|
this.table = table;
|
||||||
|
// Scanning the table schema is too expensive to do for every operation.
|
||||||
|
// Do it once per minute by default.
|
||||||
|
// Setting hbase.rest.transform.check.interval to <= 0 disables rescanning.
|
||||||
|
long now = System.currentTimeMillis();
|
||||||
|
Long lastChecked = lastCheckedMap.get(table);
|
||||||
|
if (lastChecked != null) {
|
||||||
|
long interval = servlet.getConfiguration()
|
||||||
|
.getLong("hbase.rest.transform.check.interval", 60000);
|
||||||
|
if (interval > 0 && (now - lastChecked.longValue()) > interval) {
|
||||||
|
scanTransformAttrs();
|
||||||
|
lastCheckedMap.put(table, now);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
scanTransformAttrs();
|
||||||
|
lastCheckedMap.put(table, now);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @return the table name */
|
||||||
|
String getName() {
|
||||||
|
return table;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return true if the table exists
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
boolean exists() throws IOException {
|
||||||
|
HBaseAdmin admin = new HBaseAdmin(servlet.getConfiguration());
|
||||||
|
return admin.tableExists(table);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply any configured transformations to the value
|
||||||
|
* @param family
|
||||||
|
* @param qualifier
|
||||||
|
* @param value
|
||||||
|
* @param direction
|
||||||
|
* @return
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
byte[] transform(byte[] family, byte[] qualifier, byte[] value,
|
||||||
|
Transform.Direction direction) throws IOException {
|
||||||
|
Transform t = getTransform(table, family, qualifier);
|
||||||
|
if (t != null) {
|
||||||
|
return t.transform(value, direction);
|
||||||
|
}
|
||||||
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Path("exists")
|
@Path("exists")
|
||||||
public ExistsResource getExistsResource() throws IOException {
|
public ExistsResource getExistsResource() throws IOException {
|
||||||
return new ExistsResource(table);
|
return new ExistsResource(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Path("regions")
|
@Path("regions")
|
||||||
public RegionsResource getRegionsResource() throws IOException {
|
public RegionsResource getRegionsResource() throws IOException {
|
||||||
return new RegionsResource(table);
|
return new RegionsResource(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Path("scanner")
|
@Path("scanner")
|
||||||
public ScannerResource getScannerResource() throws IOException {
|
public ScannerResource getScannerResource() throws IOException {
|
||||||
return new ScannerResource(table);
|
return new ScannerResource(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Path("schema")
|
@Path("schema")
|
||||||
public SchemaResource getSchemaResource() throws IOException {
|
public SchemaResource getSchemaResource() throws IOException {
|
||||||
return new SchemaResource(table);
|
return new SchemaResource(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Path("{rowspec: .+}")
|
@Path("{rowspec: .+}")
|
||||||
|
@ -67,6 +270,6 @@ public class TableResource extends ResourceBase {
|
||||||
// the RowSpec constructor has a chance to parse
|
// the RowSpec constructor has a chance to parse
|
||||||
final @PathParam("rowspec") @Encoded String rowspec,
|
final @PathParam("rowspec") @Encoded String rowspec,
|
||||||
final @QueryParam("v") String versions) throws IOException {
|
final @QueryParam("v") String versions) throws IOException {
|
||||||
return new RowResource(table, rowspec, versions);
|
return new RowResource(this, rowspec, versions);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2010 The Apache Software Foundation
|
||||||
|
*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hbase.rest.transform;
|
||||||
|
|
||||||
|
public class Base64 implements Transform {
|
||||||
|
@Override
|
||||||
|
public byte[] transform(byte[] data, Direction direction) {
|
||||||
|
switch (direction) {
|
||||||
|
case IN:
|
||||||
|
return com.sun.jersey.core.util.Base64.encode(data);
|
||||||
|
case OUT:
|
||||||
|
return com.sun.jersey.core.util.Base64.decode(data);
|
||||||
|
default:
|
||||||
|
throw new RuntimeException("illegal direction");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2010 The Apache Software Foundation
|
||||||
|
*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hbase.rest.transform;
|
||||||
|
|
||||||
|
public class NullTransform implements Transform {
|
||||||
|
@Override
|
||||||
|
public byte[] transform(byte[] data, Direction direction) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,44 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2010 The Apache Software Foundation
|
||||||
|
*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hbase.rest.transform;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Data transformation module
|
||||||
|
*/
|
||||||
|
public interface Transform {
|
||||||
|
|
||||||
|
/*** Transfer direction */
|
||||||
|
static enum Direction {
|
||||||
|
/** From client to server */
|
||||||
|
IN,
|
||||||
|
/** From server to client */
|
||||||
|
OUT
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform data from one representation to another according to
|
||||||
|
* transfer direction.
|
||||||
|
* @param in input data
|
||||||
|
* @param direction IN or OUT
|
||||||
|
* @return the transformed data
|
||||||
|
*/
|
||||||
|
byte[] transform (byte[] data, Direction direction);
|
||||||
|
}
|
|
@ -0,0 +1,104 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2010 The Apache Software Foundation
|
||||||
|
*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hbase.rest;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.client.Get;
|
||||||
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
|
import org.apache.hadoop.hbase.client.Result;
|
||||||
|
import org.apache.hadoop.hbase.rest.client.Client;
|
||||||
|
import org.apache.hadoop.hbase.rest.client.Cluster;
|
||||||
|
import org.apache.hadoop.hbase.rest.client.Response;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
|
public class TestTransform extends HBaseRESTClusterTestBase {
|
||||||
|
static final String TABLE = "TestTransform";
|
||||||
|
static final String CFA = "a";
|
||||||
|
static final String CFB = "b";
|
||||||
|
static final String COLUMN_1 = CFA + ":1";
|
||||||
|
static final String COLUMN_2 = CFB + ":2";
|
||||||
|
static final String ROW_1 = "testrow1";
|
||||||
|
static final byte[] VALUE_1 = Bytes.toBytes("testvalue1");
|
||||||
|
static final byte[] VALUE_2 = Bytes.toBytes("testvalue2");
|
||||||
|
static final byte[] VALUE_2_BASE64 = Bytes.toBytes("dGVzdHZhbHVlMg==");
|
||||||
|
|
||||||
|
Client client;
|
||||||
|
HBaseAdmin admin;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
client = new Client(new Cluster().add("localhost", testServletPort));
|
||||||
|
admin = new HBaseAdmin(conf);
|
||||||
|
if (admin.tableExists(TABLE)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
HTableDescriptor htd = new HTableDescriptor(TABLE);
|
||||||
|
htd.addFamily(new HColumnDescriptor(CFA));
|
||||||
|
HColumnDescriptor cfB = new HColumnDescriptor(CFB);
|
||||||
|
cfB.setValue("Transform$1", "*:Base64");
|
||||||
|
htd.addFamily(cfB);
|
||||||
|
admin.createTable(htd);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void tearDown() throws Exception {
|
||||||
|
client.shutdown();
|
||||||
|
super.tearDown();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testTransform() throws Exception {
|
||||||
|
String path1 = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_1;
|
||||||
|
String path2 = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_2;
|
||||||
|
|
||||||
|
// store value 1
|
||||||
|
Response response = client.put(path1, MIMETYPE_BINARY, VALUE_1);
|
||||||
|
assertEquals(response.getCode(), 200);
|
||||||
|
|
||||||
|
// store value 2 (stargate should transform into base64)
|
||||||
|
response = client.put(path2, MIMETYPE_BINARY, VALUE_2);
|
||||||
|
assertEquals(response.getCode(), 200);
|
||||||
|
|
||||||
|
// get the table contents directly
|
||||||
|
HTable table = new HTable(TABLE);
|
||||||
|
Get get = new Get(Bytes.toBytes(ROW_1));
|
||||||
|
get.addFamily(Bytes.toBytes(CFA));
|
||||||
|
get.addFamily(Bytes.toBytes(CFB));
|
||||||
|
Result result = table.get(get);
|
||||||
|
// value 1 should not be transformed
|
||||||
|
byte[] value = result.getValue(Bytes.toBytes(CFA), Bytes.toBytes("1"));
|
||||||
|
assertNotNull(value);
|
||||||
|
assertTrue(Bytes.equals(value, VALUE_1));
|
||||||
|
// value 2 should have been base64 encoded
|
||||||
|
value = result.getValue(Bytes.toBytes(CFB), Bytes.toBytes("2"));
|
||||||
|
assertNotNull(value);
|
||||||
|
assertTrue(Bytes.equals(value, VALUE_2_BASE64));
|
||||||
|
table.close();
|
||||||
|
|
||||||
|
// stargate should decode the transformed value back to original bytes
|
||||||
|
response = client.get(path2, MIMETYPE_BINARY);
|
||||||
|
assertEquals(response.getCode(), 200);
|
||||||
|
value = response.getBody();
|
||||||
|
assertTrue(Bytes.equals(value, VALUE_2));
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue