diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 9f8fa552385..c283c8d156c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -61,7 +61,8 @@ import com.google.common.primitives.Longs;
* KeyValue wraps a byte array and takes offsets and lengths into passed array at where to start
* interpreting the content as KeyValue. The KeyValue format inside a byte array is:
* <keylength> <valuelength> <key> <value>
Key is further decomposed as:
- * <rowlength> <row> <columnfamilylength> <columnfamily> <columnqualifier> <timestamp> <keytype>
+ * <rowlength> <row> <columnfamilylength> <columnfamily> <columnqualifier>
+ * <timestamp> <keytype>
* The rowlength
maximum is Short.MAX_SIZE
, column family length maximum
* is Byte.MAX_SIZE
, and column qualifier + key length must be <
* Integer.MAX_SIZE
. The column does not contain the family/qualifier delimiter,
@@ -1294,10 +1295,16 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
}
/**
- * Splits a column in family:qualifier form into separate byte arrays.
+ * Splits a column in {@code family:qualifier} form into separate byte arrays. An empty qualifier
+ * (ie, {@code fam:}) is parsed as { fam, EMPTY_BYTE_ARRAY }
while no delimiter (ie,
+ * {@code fam}) is parsed as an array of one element, { fam }
.
+ *
+ * Don't forget, HBase DOES support empty qualifiers. (see HBASE-9549) + *
** Not recommend to be used as this is old-style API. - * @param c The column. + *
+ * @param c The column. * @return The parsed column. */ public static byte [][] parseColumn(byte [] c) { @@ -1306,10 +1313,10 @@ public class KeyValue implements Cell, HeapSize, Cloneable { // If no delimiter, return array of size 1 return new byte [][] { c }; } else if(index == c.length - 1) { - // Only a family, return array size 1 + // family with empty qualifier, return array size 2 byte [] family = new byte[c.length-1]; System.arraycopy(c, 0, family, 0, family.length); - return new byte [][] { family }; + return new byte [][] { family, HConstants.EMPTY_BYTE_ARRAY}; } // Family and column, return array size 2 final byte [][] result = new byte [2][]; @@ -1317,8 +1324,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable { System.arraycopy(c, 0, result[0], 0, index); final int len = c.length - (index + 1); result[1] = new byte[len]; - System.arraycopy(c, index + 1 /*Skip delimiter*/, result[1], 0, - len); + System.arraycopy(c, index + 1 /* Skip delimiter */, result[1], 0, len); return result; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index eda5dd88ff3..021b10163de 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -77,6 +77,11 @@ public class TestKeyValue extends TestCase { // Test empty value and empty column -- both should work. (not empty fam) check(Bytes.toBytes(getName()), Bytes.toBytes(getName()), null, 1, null); check(HConstants.EMPTY_BYTE_ARRAY, Bytes.toBytes(getName()), null, 1, null); + // empty qual is equivalent to null qual + assertEquals( + new KeyValue(Bytes.toBytes("rk"), Bytes.toBytes("fam"), null, 1, (byte[]) null), + new KeyValue(Bytes.toBytes("rk"), Bytes.toBytes("fam"), + HConstants.EMPTY_BYTE_ARRAY, 1, (byte[]) null)); } private void check(final byte [] row, final byte [] family, byte [] qualifier, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java index dd6ef511229..d6442a4e3e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java @@ -54,7 +54,7 @@ implements Configurable { public static final String SCAN_ROW_STOP = "hbase.mapreduce.scan.row.stop"; /** Column Family to Scan */ public static final String SCAN_COLUMN_FAMILY = "hbase.mapreduce.scan.column.family"; - /** Space delimited list of columns to scan. */ + /** Space delimited list of columns and column families to scan. */ public static final String SCAN_COLUMNS = "hbase.mapreduce.scan.columns"; /** The timestamp used to filter columns with a specific timestamp. */ public static final String SCAN_TIMESTAMP = "hbase.mapreduce.scan.timestamp"; @@ -159,30 +159,34 @@ implements Configurable { /** * Parses a combined family and qualifier and adds either both or just the - * family in case there is not qualifier. This assumes the older colon - * divided notation, e.g. "data:contents" or "meta:". - *- * Note: It will through an error when the colon is missing. + * family in case there is no qualifier. This assumes the older colon + * divided notation, e.g. "family:qualifier". * + * @param scan The Scan to update. * @param familyAndQualifier family and qualifier * @return A reference to this instance. - * @throws IllegalArgumentException When the colon is missing. + * @throws IllegalArgumentException When familyAndQualifier is invalid. */ private static void addColumn(Scan scan, byte[] familyAndQualifier) { byte [][] fq = KeyValue.parseColumn(familyAndQualifier); - if (fq.length > 1 && fq[1] != null && fq[1].length > 0) { + if (fq.length == 1) { + scan.addFamily(fq[0]); + } else if (fq.length == 2) { scan.addColumn(fq[0], fq[1]); } else { - scan.addFamily(fq[0]); + throw new IllegalArgumentException("Invalid familyAndQualifier provided."); } } /** * Adds an array of columns specified using old format, family:qualifier. *
- * Overrides previous calls to addFamily for any families in the input. + * Overrides previous calls to {@link Scan#addColumn(byte[], byte[])}for any families in the + * input. * - * @param columns array of columns, formatted as
family:qualifier+ * @param scan The Scan to update. + * @param columns array of columns, formatted as
family:qualifier
+ * @see Scan#addColumn(byte[], byte[])
*/
public static void addColumns(Scan scan, byte [][] columns) {
for (byte[] column : columns) {
@@ -191,13 +195,10 @@ implements Configurable {
}
/**
- * Convenience method to help parse old style (or rather user entry on the
- * command line) column definitions, e.g. "data:contents mime:". The columns
- * must be space delimited and always have a colon (":") to denote family
- * and qualifier.
+ * Convenience method to parse a string representation of an array of column specifiers.
*
+ * @param scan The Scan to update.
* @param columns The columns to parse.
- * @return A reference to this instance.
*/
private static void addColumns(Scan scan, String columns) {
String[] cols = columns.split(" ");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index c90ce6bda34..486ef921525 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -63,8 +63,8 @@ import org.apache.hadoop.net.DNS;
* Bytes.toBytes("exampleTable"));
* // mandatory
* setHTable(exampleTable);
- * Text[] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
- * Bytes.toBytes("columnB") };
+ * Text[] inputColumns = new byte [][] { Bytes.toBytes("cf1:columnA"),
+ * Bytes.toBytes("cf2") };
* // mandatory
* setInputColumns(inputColumns);
* RowFilterInterface exampleFilter = new RegExpRowFilter("keyPrefix.*");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index 2a397ec1a46..3a587f81c5f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -217,11 +217,12 @@ public class RowResource extends ResourceBase {
.build();
}
byte [][] parts = KeyValue.parseColumn(col);
- if (parts.length == 2 && parts[1].length > 0) {
- put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
- } else {
- put.add(parts[0], null, cell.getTimestamp(), cell.getValue());
+ if (parts.length != 2) {
+ return Response.status(Response.Status.BAD_REQUEST)
+ .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+ .build();
}
+ put.add(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
puts.add(put);
if (LOG.isDebugEnabled()) {
@@ -285,11 +286,12 @@ public class RowResource extends ResourceBase {
}
Put put = new Put(row);
byte parts[][] = KeyValue.parseColumn(column);
- if (parts.length == 2 && parts[1].length > 0) {
- put.add(parts[0], parts[1], timestamp, message);
- } else {
- put.add(parts[0], null, timestamp, message);
+ if (parts.length != 2) {
+ return Response.status(Response.Status.BAD_REQUEST)
+ .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+ .build();
}
+ put.add(parts[0], parts[1], timestamp, message);
table = servlet.getTable(tableResource.getName());
table.put(put);
if (LOG.isDebugEnabled()) {
@@ -373,16 +375,24 @@ public class RowResource extends ResourceBase {
for (byte[] column: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(column);
if (rowspec.hasTimestamp()) {
- if (split.length == 2 && split[1].length != 0) {
+ if (split.length == 1) {
+ delete.deleteFamily(split[0], rowspec.getTimestamp());
+ } else if (split.length == 2) {
delete.deleteColumns(split[0], split[1], rowspec.getTimestamp());
} else {
- delete.deleteFamily(split[0], rowspec.getTimestamp());
+ return Response.status(Response.Status.BAD_REQUEST)
+ .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+ .build();
}
} else {
- if (split.length == 2 && split[1].length != 0) {
+ if (split.length == 1) {
+ delete.deleteFamily(split[0]);
+ } else if (split.length == 2) {
delete.deleteColumns(split[0], split[1]);
} else {
- delete.deleteFamily(split[0]);
+ return Response.status(Response.Status.BAD_REQUEST)
+ .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+ .build();
}
}
}
@@ -441,28 +451,26 @@ public class RowResource extends ResourceBase {
CellModel valueToCheckCell = cellModels.get(cellModelCount - 1);
byte[] valueToCheckColumn = valueToCheckCell.getColumn();
byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn);
- if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
- CellModel valueToPutCell = null;
- for (int i = 0, n = cellModelCount - 1; i < n ; i++) {
- if(Bytes.equals(cellModels.get(i).getColumn(),
- valueToCheckCell.getColumn())) {
- valueToPutCell = cellModels.get(i);
- break;
- }
- }
- if (valueToPutCell != null) {
- put.add(valueToPutParts[0], valueToPutParts[1], valueToPutCell
- .getTimestamp(), valueToPutCell.getValue());
- } else {
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
- .build();
- }
- } else {
+ if (valueToPutParts.length != 2) {
return Response.status(Response.Status.BAD_REQUEST)
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
.build();
}
+ CellModel valueToPutCell = null;
+ for (int i = 0, n = cellModelCount - 1; i < n ; i++) {
+ if(Bytes.equals(cellModels.get(i).getColumn(),
+ valueToCheckCell.getColumn())) {
+ valueToPutCell = cellModels.get(i);
+ break;
+ }
+ }
+ if (null == valueToPutCell) {
+ return Response.status(Response.Status.BAD_REQUEST)
+ .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
+ .build();
+ }
+ put.add(valueToPutParts[0], valueToPutParts[1], valueToPutCell
+ .getTimestamp(), valueToPutCell.getValue());
table = servlet.getTable(this.tableResource.getName());
boolean retValue = table.checkAndPut(key, valueToPutParts[0],
@@ -527,13 +535,12 @@ public class RowResource extends ResourceBase {
}
}
byte[][] parts = KeyValue.parseColumn(valueToDeleteColumn);
- if (parts.length == 2 && parts[1].length > 0) {
- delete.deleteColumns(parts[0], parts[1]);
- } else {
+ if (parts.length != 2) {
return Response.status(Response.Status.BAD_REQUEST)
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
.build();
}
+ delete.deleteColumns(parts[0], parts[1]);
table = servlet.getTable(tableResource.getName());
boolean retValue = table.checkAndDelete(key, parts[0], parts[1],
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
index ed796072c6a..ee84e6d6b53 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
@@ -50,10 +50,12 @@ public class RowResultGenerator extends ResultGenerator {
if (rowspec.hasColumns()) {
for (byte[] col: rowspec.getColumns()) {
byte[][] split = KeyValue.parseColumn(col);
- if (split.length == 2 && split[1].length != 0) {
+ if (split.length == 1) {
+ get.addFamily(split[0]);
+ } else if (split.length == 2) {
get.addColumn(split[0], split[1]);
} else {
- get.addFamily(split[0]);
+ throw new IllegalArgumentException("Invalid column specifier.");
}
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
index 43743828701..5e252608b6f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
@@ -25,7 +25,6 @@ import java.util.Collection;
import java.util.TreeSet;
import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.util.Bytes;
@@ -122,11 +121,7 @@ public class RowSpec {
}
String s = URLDecoder.decode(column.toString(),
HConstants.UTF8_ENCODING);
- if (!s.contains(":")) {
- this.columns.add(Bytes.toBytes(s + ":"));
- } else {
- this.columns.add(Bytes.toBytes(s));
- }
+ this.columns.add(Bytes.toBytes(s));
column.setLength(0);
i++;
continue;
@@ -136,14 +131,10 @@ public class RowSpec {
}
i++;
// trailing list entry
- if (column.length() > 1) {
+ if (column.length() > 0) {
String s = URLDecoder.decode(column.toString(),
HConstants.UTF8_ENCODING);
- if (!s.contains(":")) {
- this.columns.add(Bytes.toBytes(s + ":"));
- } else {
- this.columns.add(Bytes.toBytes(s));
- }
+ this.columns.add(Bytes.toBytes(s));
}
} catch (IndexOutOfBoundsException e) {
throw new IllegalArgumentException(e);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
index ebeae0d8e7f..09fb7d4073d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
@@ -76,10 +76,12 @@ public class ScannerResultGenerator extends ResultGenerator {
byte[][] columns = rowspec.getColumns();
for (byte[] column: columns) {
byte[][] split = KeyValue.parseColumn(column);
- if (split.length > 1 && (split[1] != null && split[1].length != 0)) {
+ if (split.length == 1) {
+ scan.addFamily(split[0]);
+ } else if (split.length == 2) {
scan.addColumn(split[0], split[1]);
} else {
- scan.addFamily(split[0]);
+ throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
index 236026bdd57..fb16769f1f3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
@@ -28,17 +28,8 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import com.google.protobuf.Service;
-import com.google.protobuf.ServiceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.*;
-import org.apache.hadoop.hbase.client.coprocessor.Batch;
-import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-import org.apache.hadoop.util.StringUtils;
-
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@@ -48,17 +39,22 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.client.RowMutations;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Row;
+import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.io.TimeRange;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.rest.Constants;
import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
@@ -66,6 +62,10 @@ import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.rest.model.ScannerModel;
import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.util.StringUtils;
+
+import com.google.protobuf.Service;
+import com.google.protobuf.ServiceException;
/**
* HTable interface to remote tables accessed via REST gateway
@@ -116,8 +116,8 @@ public class RemoteHTable implements HTableInterface {
}
}
} else {
+ // this is an unqualified family. append the family name and NO ':'
sb.append(Bytes.toStringBinary((byte[])e.getKey()));
- sb.append(':');
}
if (i.hasNext()) {
sb.append(',');
@@ -172,7 +172,14 @@ public class RemoteHTable implements HTableInterface {
for (CellModel cell: row.getCells()) {
byte[][] split = KeyValue.parseColumn(cell.getColumn());
byte[] column = split[0];
- byte[] qualifier = split.length > 1 ? split[1] : null;
+ byte[] qualifier = null;
+ if (split.length == 1) {
+ qualifier = HConstants.EMPTY_BYTE_ARRAY;
+ } else if (split.length == 2) {
+ qualifier = split[1];
+ } else {
+ throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
+ }
kvs.add(new KeyValue(row.getKey(), column, qualifier,
cell.getTimestamp(), cell.getValue()));
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
index c22aa837f79..bb45c880a5e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
@@ -197,12 +197,10 @@ public class IncrementCoalescer implements IncrementCoalescerMBean {
private boolean internalQueueTincrement(TIncrement inc) throws TException {
byte[][] famAndQf = KeyValue.parseColumn(inc.getColumn());
- if (famAndQf.length < 1) return false;
- byte[] qual = famAndQf.length == 1 ? new byte[0] : famAndQf[1];
+ if (famAndQf.length != 2) return false;
- return internalQueueIncrement(inc.getTable(), inc.getRow(), famAndQf[0], qual,
+ return internalQueueIncrement(inc.getTable(), inc.getRow(), famAndQf[0], famAndQf[1],
inc.getAmmount());
-
}
private boolean internalQueueIncrement(byte[] tableName, byte[] rowKey, byte[] fam,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index eecc9e2214e..6b6278b8117 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -651,12 +651,22 @@ public class ThriftServerRunner implements Runnable {
Map