diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 9f8fa552385..c283c8d156c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -61,7 +61,8 @@ import com.google.common.primitives.Longs; * KeyValue wraps a byte array and takes offsets and lengths into passed array at where to start * interpreting the content as KeyValue. The KeyValue format inside a byte array is: * <keylength> <valuelength> <key> <value> Key is further decomposed as: - * <rowlength> <row> <columnfamilylength> <columnfamily> <columnqualifier> <timestamp> <keytype> + * <rowlength> <row> <columnfamilylength> <columnfamily> <columnqualifier> + * <timestamp> <keytype> * The rowlength maximum is Short.MAX_SIZE, column family length maximum * is Byte.MAX_SIZE, and column qualifier + key length must be < * Integer.MAX_SIZE. The column does not contain the family/qualifier delimiter, @@ -1294,10 +1295,16 @@ public class KeyValue implements Cell, HeapSize, Cloneable { } /** - * Splits a column in family:qualifier form into separate byte arrays. + * Splits a column in {@code family:qualifier} form into separate byte arrays. An empty qualifier + * (ie, {@code fam:}) is parsed as { fam, EMPTY_BYTE_ARRAY } while no delimiter (ie, + * {@code fam}) is parsed as an array of one element, { fam }. + *

+ * Don't forget, HBase DOES support empty qualifiers. (see HBASE-9549) + *

*

* Not recommend to be used as this is old-style API. - * @param c The column. + *

+ * @param c The column. * @return The parsed column. */ public static byte [][] parseColumn(byte [] c) { @@ -1306,10 +1313,10 @@ public class KeyValue implements Cell, HeapSize, Cloneable { // If no delimiter, return array of size 1 return new byte [][] { c }; } else if(index == c.length - 1) { - // Only a family, return array size 1 + // family with empty qualifier, return array size 2 byte [] family = new byte[c.length-1]; System.arraycopy(c, 0, family, 0, family.length); - return new byte [][] { family }; + return new byte [][] { family, HConstants.EMPTY_BYTE_ARRAY}; } // Family and column, return array size 2 final byte [][] result = new byte [2][]; @@ -1317,8 +1324,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable { System.arraycopy(c, 0, result[0], 0, index); final int len = c.length - (index + 1); result[1] = new byte[len]; - System.arraycopy(c, index + 1 /*Skip delimiter*/, result[1], 0, - len); + System.arraycopy(c, index + 1 /* Skip delimiter */, result[1], 0, len); return result; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index eda5dd88ff3..021b10163de 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -77,6 +77,11 @@ public class TestKeyValue extends TestCase { // Test empty value and empty column -- both should work. (not empty fam) check(Bytes.toBytes(getName()), Bytes.toBytes(getName()), null, 1, null); check(HConstants.EMPTY_BYTE_ARRAY, Bytes.toBytes(getName()), null, 1, null); + // empty qual is equivalent to null qual + assertEquals( + new KeyValue(Bytes.toBytes("rk"), Bytes.toBytes("fam"), null, 1, (byte[]) null), + new KeyValue(Bytes.toBytes("rk"), Bytes.toBytes("fam"), + HConstants.EMPTY_BYTE_ARRAY, 1, (byte[]) null)); } private void check(final byte [] row, final byte [] family, byte [] qualifier, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java index dd6ef511229..d6442a4e3e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java @@ -54,7 +54,7 @@ implements Configurable { public static final String SCAN_ROW_STOP = "hbase.mapreduce.scan.row.stop"; /** Column Family to Scan */ public static final String SCAN_COLUMN_FAMILY = "hbase.mapreduce.scan.column.family"; - /** Space delimited list of columns to scan. */ + /** Space delimited list of columns and column families to scan. */ public static final String SCAN_COLUMNS = "hbase.mapreduce.scan.columns"; /** The timestamp used to filter columns with a specific timestamp. */ public static final String SCAN_TIMESTAMP = "hbase.mapreduce.scan.timestamp"; @@ -159,30 +159,34 @@ implements Configurable { /** * Parses a combined family and qualifier and adds either both or just the - * family in case there is not qualifier. This assumes the older colon - * divided notation, e.g. "data:contents" or "meta:". - *

- * Note: It will through an error when the colon is missing. + * family in case there is no qualifier. This assumes the older colon + * divided notation, e.g. "family:qualifier". * + * @param scan The Scan to update. * @param familyAndQualifier family and qualifier * @return A reference to this instance. - * @throws IllegalArgumentException When the colon is missing. + * @throws IllegalArgumentException When familyAndQualifier is invalid. */ private static void addColumn(Scan scan, byte[] familyAndQualifier) { byte [][] fq = KeyValue.parseColumn(familyAndQualifier); - if (fq.length > 1 && fq[1] != null && fq[1].length > 0) { + if (fq.length == 1) { + scan.addFamily(fq[0]); + } else if (fq.length == 2) { scan.addColumn(fq[0], fq[1]); } else { - scan.addFamily(fq[0]); + throw new IllegalArgumentException("Invalid familyAndQualifier provided."); } } /** * Adds an array of columns specified using old format, family:qualifier. *

- * Overrides previous calls to addFamily for any families in the input. + * Overrides previous calls to {@link Scan#addColumn(byte[], byte[])}for any families in the + * input. * - * @param columns array of columns, formatted as

family:qualifier
+ * @param scan The Scan to update. + * @param columns array of columns, formatted as family:qualifier + * @see Scan#addColumn(byte[], byte[]) */ public static void addColumns(Scan scan, byte [][] columns) { for (byte[] column : columns) { @@ -191,13 +195,10 @@ implements Configurable { } /** - * Convenience method to help parse old style (or rather user entry on the - * command line) column definitions, e.g. "data:contents mime:". The columns - * must be space delimited and always have a colon (":") to denote family - * and qualifier. + * Convenience method to parse a string representation of an array of column specifiers. * + * @param scan The Scan to update. * @param columns The columns to parse. - * @return A reference to this instance. */ private static void addColumns(Scan scan, String columns) { String[] cols = columns.split(" "); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java index c90ce6bda34..486ef921525 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java @@ -63,8 +63,8 @@ import org.apache.hadoop.net.DNS; * Bytes.toBytes("exampleTable")); * // mandatory * setHTable(exampleTable); - * Text[] inputColumns = new byte [][] { Bytes.toBytes("columnA"), - * Bytes.toBytes("columnB") }; + * Text[] inputColumns = new byte [][] { Bytes.toBytes("cf1:columnA"), + * Bytes.toBytes("cf2") }; * // mandatory * setInputColumns(inputColumns); * RowFilterInterface exampleFilter = new RegExpRowFilter("keyPrefix.*"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java index 2a397ec1a46..3a587f81c5f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java @@ -217,11 +217,12 @@ public class RowResource extends ResourceBase { .build(); } byte [][] parts = KeyValue.parseColumn(col); - if (parts.length == 2 && parts[1].length > 0) { - put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue()); - } else { - put.add(parts[0], null, cell.getTimestamp(), cell.getValue()); + if (parts.length != 2) { + return Response.status(Response.Status.BAD_REQUEST) + .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) + .build(); } + put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue()); } puts.add(put); if (LOG.isDebugEnabled()) { @@ -285,11 +286,12 @@ public class RowResource extends ResourceBase { } Put put = new Put(row); byte parts[][] = KeyValue.parseColumn(column); - if (parts.length == 2 && parts[1].length > 0) { - put.add(parts[0], parts[1], timestamp, message); - } else { - put.add(parts[0], null, timestamp, message); + if (parts.length != 2) { + return Response.status(Response.Status.BAD_REQUEST) + .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) + .build(); } + put.add(parts[0], parts[1], timestamp, message); table = servlet.getTable(tableResource.getName()); table.put(put); if (LOG.isDebugEnabled()) { @@ -373,16 +375,24 @@ public class RowResource extends ResourceBase { for (byte[] column: rowspec.getColumns()) { byte[][] split = KeyValue.parseColumn(column); if (rowspec.hasTimestamp()) { - if (split.length == 2 && split[1].length != 0) { + if (split.length == 1) { + delete.deleteFamily(split[0], rowspec.getTimestamp()); + } else if (split.length == 2) { delete.deleteColumns(split[0], split[1], rowspec.getTimestamp()); } else { - delete.deleteFamily(split[0], rowspec.getTimestamp()); + return Response.status(Response.Status.BAD_REQUEST) + .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) + .build(); } } else { - if (split.length == 2 && split[1].length != 0) { + if (split.length == 1) { + delete.deleteFamily(split[0]); + } else if (split.length == 2) { delete.deleteColumns(split[0], split[1]); } else { - delete.deleteFamily(split[0]); + return Response.status(Response.Status.BAD_REQUEST) + .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) + .build(); } } } @@ -441,28 +451,26 @@ public class RowResource extends ResourceBase { CellModel valueToCheckCell = cellModels.get(cellModelCount - 1); byte[] valueToCheckColumn = valueToCheckCell.getColumn(); byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn); - if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) { - CellModel valueToPutCell = null; - for (int i = 0, n = cellModelCount - 1; i < n ; i++) { - if(Bytes.equals(cellModels.get(i).getColumn(), - valueToCheckCell.getColumn())) { - valueToPutCell = cellModels.get(i); - break; - } - } - if (valueToPutCell != null) { - put.add(valueToPutParts[0], valueToPutParts[1], valueToPutCell - .getTimestamp(), valueToPutCell.getValue()); - } else { - return Response.status(Response.Status.BAD_REQUEST) - .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) - .build(); - } - } else { + if (valueToPutParts.length != 2) { return Response.status(Response.Status.BAD_REQUEST) .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) .build(); } + CellModel valueToPutCell = null; + for (int i = 0, n = cellModelCount - 1; i < n ; i++) { + if(Bytes.equals(cellModels.get(i).getColumn(), + valueToCheckCell.getColumn())) { + valueToPutCell = cellModels.get(i); + break; + } + } + if (null == valueToPutCell) { + return Response.status(Response.Status.BAD_REQUEST) + .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) + .build(); + } + put.add(valueToPutParts[0], valueToPutParts[1], valueToPutCell + .getTimestamp(), valueToPutCell.getValue()); table = servlet.getTable(this.tableResource.getName()); boolean retValue = table.checkAndPut(key, valueToPutParts[0], @@ -527,13 +535,12 @@ public class RowResource extends ResourceBase { } } byte[][] parts = KeyValue.parseColumn(valueToDeleteColumn); - if (parts.length == 2 && parts[1].length > 0) { - delete.deleteColumns(parts[0], parts[1]); - } else { + if (parts.length != 2) { return Response.status(Response.Status.BAD_REQUEST) .type(MIMETYPE_TEXT).entity("Bad request" + CRLF) .build(); } + delete.deleteColumns(parts[0], parts[1]); table = servlet.getTable(tableResource.getName()); boolean retValue = table.checkAndDelete(key, parts[0], parts[1], diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java index ed796072c6a..ee84e6d6b53 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java @@ -50,10 +50,12 @@ public class RowResultGenerator extends ResultGenerator { if (rowspec.hasColumns()) { for (byte[] col: rowspec.getColumns()) { byte[][] split = KeyValue.parseColumn(col); - if (split.length == 2 && split[1].length != 0) { + if (split.length == 1) { + get.addFamily(split[0]); + } else if (split.length == 2) { get.addColumn(split[0], split[1]); } else { - get.addFamily(split[0]); + throw new IllegalArgumentException("Invalid column specifier."); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java index 43743828701..5e252608b6f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java @@ -25,7 +25,6 @@ import java.util.Collection; import java.util.TreeSet; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.Bytes; @@ -122,11 +121,7 @@ public class RowSpec { } String s = URLDecoder.decode(column.toString(), HConstants.UTF8_ENCODING); - if (!s.contains(":")) { - this.columns.add(Bytes.toBytes(s + ":")); - } else { - this.columns.add(Bytes.toBytes(s)); - } + this.columns.add(Bytes.toBytes(s)); column.setLength(0); i++; continue; @@ -136,14 +131,10 @@ public class RowSpec { } i++; // trailing list entry - if (column.length() > 1) { + if (column.length() > 0) { String s = URLDecoder.decode(column.toString(), HConstants.UTF8_ENCODING); - if (!s.contains(":")) { - this.columns.add(Bytes.toBytes(s + ":")); - } else { - this.columns.add(Bytes.toBytes(s)); - } + this.columns.add(Bytes.toBytes(s)); } } catch (IndexOutOfBoundsException e) { throw new IllegalArgumentException(e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java index ebeae0d8e7f..09fb7d4073d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java @@ -76,10 +76,12 @@ public class ScannerResultGenerator extends ResultGenerator { byte[][] columns = rowspec.getColumns(); for (byte[] column: columns) { byte[][] split = KeyValue.parseColumn(column); - if (split.length > 1 && (split[1] != null && split[1].length != 0)) { + if (split.length == 1) { + scan.addFamily(split[0]); + } else if (split.length == 2) { scan.addColumn(split[0], split[1]); } else { - scan.addFamily(split[0]); + throw new IllegalArgumentException("Invalid familyAndQualifier provided."); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java index 236026bdd57..fb16769f1f3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java @@ -28,17 +28,8 @@ import java.util.Map; import java.util.Set; import java.util.TreeMap; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.*; -import org.apache.hadoop.hbase.client.coprocessor.Batch; -import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; -import org.apache.hadoop.util.StringUtils; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -48,17 +39,22 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.client.RowMutations; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Row; +import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.io.TimeRange; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.rest.Constants; import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.CellSetModel; @@ -66,6 +62,10 @@ import org.apache.hadoop.hbase.rest.model.RowModel; import org.apache.hadoop.hbase.rest.model.ScannerModel; import org.apache.hadoop.hbase.rest.model.TableSchemaModel; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.util.StringUtils; + +import com.google.protobuf.Service; +import com.google.protobuf.ServiceException; /** * HTable interface to remote tables accessed via REST gateway @@ -116,8 +116,8 @@ public class RemoteHTable implements HTableInterface { } } } else { + // this is an unqualified family. append the family name and NO ':' sb.append(Bytes.toStringBinary((byte[])e.getKey())); - sb.append(':'); } if (i.hasNext()) { sb.append(','); @@ -172,7 +172,14 @@ public class RemoteHTable implements HTableInterface { for (CellModel cell: row.getCells()) { byte[][] split = KeyValue.parseColumn(cell.getColumn()); byte[] column = split[0]; - byte[] qualifier = split.length > 1 ? split[1] : null; + byte[] qualifier = null; + if (split.length == 1) { + qualifier = HConstants.EMPTY_BYTE_ARRAY; + } else if (split.length == 2) { + qualifier = split[1]; + } else { + throw new IllegalArgumentException("Invalid familyAndQualifier provided."); + } kvs.add(new KeyValue(row.getKey(), column, qualifier, cell.getTimestamp(), cell.getValue())); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java index c22aa837f79..bb45c880a5e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java @@ -197,12 +197,10 @@ public class IncrementCoalescer implements IncrementCoalescerMBean { private boolean internalQueueTincrement(TIncrement inc) throws TException { byte[][] famAndQf = KeyValue.parseColumn(inc.getColumn()); - if (famAndQf.length < 1) return false; - byte[] qual = famAndQf.length == 1 ? new byte[0] : famAndQf[1]; + if (famAndQf.length != 2) return false; - return internalQueueIncrement(inc.getTable(), inc.getRow(), famAndQf[0], qual, + return internalQueueIncrement(inc.getTable(), inc.getRow(), famAndQf[0], famAndQf[1], inc.getAmmount()); - } private boolean internalQueueIncrement(byte[] tableName, byte[] rowKey, byte[] fam, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index eecc9e2214e..6b6278b8117 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -651,12 +651,22 @@ public class ThriftServerRunner implements Runnable { Map attributes) throws IOError { byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); - if(famAndQf.length == 1) { - return get(tableName, row, famAndQf[0], new byte[0], attributes); + if (famAndQf.length == 1) { + return get(tableName, row, famAndQf[0], null, attributes); } - return get(tableName, row, famAndQf[0], famAndQf[1], attributes); + if (famAndQf.length == 2) { + return get(tableName, row, famAndQf[0], famAndQf[1], attributes); + } + throw new IllegalArgumentException("Invalid familyAndQualifier provided."); } + /** + * Note: this internal interface is slightly different from public APIs in regard to handling + * of the qualifier. Here we differ from the public Java API in that null != byte[0]. Rather, + * we respect qual == null as a request for the entire column family. The caller ( + * {@link #get(ByteBuffer, ByteBuffer, ByteBuffer, Map)}) interface IS consistent in that the + * column is parse like normal. + */ protected List get(ByteBuffer tableName, ByteBuffer row, byte[] family, @@ -666,7 +676,7 @@ public class ThriftServerRunner implements Runnable { HTable table = getTable(tableName); Get get = new Get(getBytes(row)); addAttributes(get, attributes); - if (qualifier == null || qualifier.length == 0) { + if (qualifier == null) { get.addFamily(family); } else { get.addColumn(family, qualifier); @@ -681,27 +691,38 @@ public class ThriftServerRunner implements Runnable { @Deprecated @Override - public List getVer(ByteBuffer tableName, ByteBuffer row, - ByteBuffer column, int numVersions, - Map attributes) throws IOError { + public List getVer(ByteBuffer tableName, ByteBuffer row, ByteBuffer column, + int numVersions, Map attributes) throws IOError { byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); if(famAndQf.length == 1) { - return getVer(tableName, row, famAndQf[0], - new byte[0], numVersions, attributes); + return getVer(tableName, row, famAndQf[0], null, numVersions, attributes); } - return getVer(tableName, row, - famAndQf[0], famAndQf[1], numVersions, attributes); + if (famAndQf.length == 2) { + return getVer(tableName, row, famAndQf[0], famAndQf[1], numVersions, attributes); + } + throw new IllegalArgumentException("Invalid familyAndQualifier provided."); + } - public List getVer(ByteBuffer tableName, ByteBuffer row, - byte[] family, - byte[] qualifier, int numVersions, - Map attributes) throws IOError { + /** + * Note: this public interface is slightly different from public Java APIs in regard to + * handling of the qualifier. Here we differ from the public Java API in that null != byte[0]. + * Rather, we respect qual == null as a request for the entire column family. If you want to + * access the entire column family, use + * {@link #getVer(ByteBuffer, ByteBuffer, ByteBuffer, int, Map)} with a {@code column} value + * that lacks a {@code ':'}. + */ + public List getVer(ByteBuffer tableName, ByteBuffer row, byte[] family, + byte[] qualifier, int numVersions, Map attributes) throws IOError { try { HTable table = getTable(tableName); Get get = new Get(getBytes(row)); addAttributes(get, attributes); - get.addColumn(family, qualifier); + if (null == qualifier) { + get.addFamily(family); + } else { + get.addColumn(family, qualifier); + } get.setMaxVersions(numVersions); Result result = table.get(get); return ThriftUtilities.cellFromHBase(result.rawCells()); @@ -713,30 +734,38 @@ public class ThriftServerRunner implements Runnable { @Deprecated @Override - public List getVerTs(ByteBuffer tableName, - ByteBuffer row, - ByteBuffer column, - long timestamp, - int numVersions, - Map attributes) throws IOError { + public List getVerTs(ByteBuffer tableName, ByteBuffer row, ByteBuffer column, + long timestamp, int numVersions, Map attributes) throws IOError { byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); - if(famAndQf.length == 1) { - return getVerTs(tableName, row, famAndQf[0], new byte[0], timestamp, - numVersions, attributes); + if (famAndQf.length == 1) { + return getVerTs(tableName, row, famAndQf[0], null, timestamp, numVersions, attributes); } - return getVerTs(tableName, row, famAndQf[0], famAndQf[1], timestamp, - numVersions, attributes); + if (famAndQf.length == 2) { + return getVerTs(tableName, row, famAndQf[0], famAndQf[1], timestamp, numVersions, + attributes); + } + throw new IllegalArgumentException("Invalid familyAndQualifier provided."); } - protected List getVerTs(ByteBuffer tableName, - ByteBuffer row, byte [] family, - byte [] qualifier, long timestamp, int numVersions, - Map attributes) throws IOError { + /** + * Note: this internal interface is slightly different from public APIs in regard to handling + * of the qualifier. Here we differ from the public Java API in that null != byte[0]. Rather, + * we respect qual == null as a request for the entire column family. The caller ( + * {@link #getVerTs(ByteBuffer, ByteBuffer, ByteBuffer, long, int, Map)}) interface IS + * consistent in that the column is parse like normal. + */ + protected List getVerTs(ByteBuffer tableName, ByteBuffer row, byte[] family, + byte[] qualifier, long timestamp, int numVersions, Map attributes) + throws IOError { try { HTable table = getTable(tableName); Get get = new Get(getBytes(row)); addAttributes(get, attributes); - get.addColumn(family, qualifier); + if (null == qualifier) { + get.addFamily(family); + } else { + get.addColumn(family, qualifier); + } get.setTimeRange(0, timestamp); get.setMaxVersions(numVersions); Result result = table.get(get); @@ -1002,9 +1031,8 @@ public class ThriftServerRunner implements Runnable { : Durability.SKIP_WAL); } else { if(famAndQf.length == 1) { - put.add(famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, - m.value != null ? getBytes(m.value) - : HConstants.EMPTY_BYTE_ARRAY); + LOG.warn("No column qualifier specified. Delete is the only mutation supported " + + "over the whole column family."); } else { put.add(famAndQf[0], famAndQf[1], m.value != null ? getBytes(m.value) @@ -1060,14 +1088,16 @@ public class ThriftServerRunner implements Runnable { delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL); } else { - if(famAndQf.length == 1) { - put.add(famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, - m.value != null ? getBytes(m.value) - : HConstants.EMPTY_BYTE_ARRAY); - } else { + if (famAndQf.length == 1) { + LOG.warn("No column qualifier specified. Delete is the only mutation supported " + + "over the whole column family."); + } + if (famAndQf.length == 2) { put.add(famAndQf[0], famAndQf[1], m.value != null ? getBytes(m.value) : HConstants.EMPTY_BYTE_ARRAY); + } else { + throw new IllegalArgumentException("Invalid famAndQf provided."); } put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL); } @@ -1102,8 +1132,7 @@ public class ThriftServerRunner implements Runnable { throws IOError, IllegalArgument, TException { byte [][] famAndQf = KeyValue.parseColumn(getBytes(column)); if(famAndQf.length == 1) { - return atomicIncrement(tableName, row, famAndQf[0], new byte[0], - amount); + return atomicIncrement(tableName, row, famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, amount); } return atomicIncrement(tableName, row, famAndQf[0], famAndQf[1], amount); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java index 582befbfc20..d8d0ae8aace 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java @@ -198,9 +198,8 @@ public class ThriftUtilities { public static Increment incrementFromThrift(TIncrement tincrement) { Increment inc = new Increment(tincrement.getRow()); byte[][] famAndQf = KeyValue.parseColumn(tincrement.getColumn()); - if (famAndQf.length <1 ) return null; - byte[] qual = famAndQf.length == 1 ? new byte[0]: famAndQf[1]; - inc.addColumn(famAndQf[0], qual, tincrement.getAmmount()); + if (famAndQf.length != 2) return null; + inc.addColumn(famAndQf[0], famAndQf[1], tincrement.getAmmount()); return inc; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java index a4cb8479f0b..4f24cc996d1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java @@ -28,8 +28,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -54,7 +52,6 @@ import org.junit.experimental.categories.Category; @Category(MediumTests.class) public class TestRemoteTable { - private static final Log LOG = LogFactory.getLog(TestRemoteTable.class); private static final String TABLE = "TestRemoteTable"; private static final byte[] ROW_1 = Bytes.toBytes("testrow1"); private static final byte[] ROW_2 = Bytes.toBytes("testrow2"); @@ -82,13 +79,18 @@ public class TestRemoteTable { TEST_UTIL.startMiniCluster(); REST_TEST_UTIL.startServletContainer(TEST_UTIL.getConfiguration()); HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); - if (!admin.tableExists(TABLE)) { - HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE)); - htd.addFamily(new HColumnDescriptor(COLUMN_1).setMaxVersions(3)); - htd.addFamily(new HColumnDescriptor(COLUMN_2).setMaxVersions(3)); - htd.addFamily(new HColumnDescriptor(COLUMN_3).setMaxVersions(3)); - admin.createTable(htd); - HTable table = new HTable(TEST_UTIL.getConfiguration(), TABLE); + if (admin.tableExists(TABLE)) { + if (admin.isTableEnabled(TABLE)) admin.disableTable(TABLE); + admin.deleteTable(TABLE); + } + HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE)); + htd.addFamily(new HColumnDescriptor(COLUMN_1).setMaxVersions(3)); + htd.addFamily(new HColumnDescriptor(COLUMN_2).setMaxVersions(3)); + htd.addFamily(new HColumnDescriptor(COLUMN_3).setMaxVersions(3)); + admin.createTable(htd); + HTable table = null; + try { + table = new HTable(TEST_UTIL.getConfiguration(), TABLE); Put put = new Put(ROW_1); put.add(COLUMN_1, QUALIFIER_1, TS_2, VALUE_1); table.put(put); @@ -98,6 +100,8 @@ public class TestRemoteTable { put.add(COLUMN_2, QUALIFIER_2, TS_2, VALUE_2); table.put(put); table.flushCommits(); + } finally { + if (null != table) table.close(); } remoteTable = new RemoteHTable( new Client(new Cluster().add("localhost", @@ -114,9 +118,14 @@ public class TestRemoteTable { @Test public void testGetTableDescriptor() throws IOException { - HTableDescriptor local = new HTable(TEST_UTIL.getConfiguration(), - TABLE).getTableDescriptor(); - assertEquals(remoteTable.getTableDescriptor(), local); + HTable table = null; + try { + table = new HTable(TEST_UTIL.getConfiguration(), TABLE); + HTableDescriptor local = table.getTableDescriptor(); + assertEquals(remoteTable.getTableDescriptor(), local); + } finally { + if (null != table) table.close(); + } } @Test diff --git a/hbase-server/src/test/ruby/hbase/table_test.rb b/hbase-server/src/test/ruby/hbase/table_test.rb index 8170c5330ca..4eba3e2072d 100644 --- a/hbase-server/src/test/ruby/hbase/table_test.rb +++ b/hbase-server/src/test/ruby/hbase/table_test.rb @@ -78,10 +78,10 @@ module Hbase assert_nil(qual) end - define_test "parse_column_name should not return a qualifier for family-only column specifiers" do + define_test "parse_column_name should support and empty column qualifier" do col, qual = table('hbase:meta').parse_column_name('foo:') assert_not_nil(col) - assert_nil(qual) + assert_not_nil(qual) end define_test "parse_column_name should return a qualifier for family:qualifier column specifiers" do