HBASE-7130 NULL qualifier is ignored
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1407695 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f317ef5b57
commit
e3b9058715
|
@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
import org.apache.hadoop.hbase.filter.Filter;
|
import org.apache.hadoop.hbase.filter.Filter;
|
||||||
import org.apache.hadoop.hbase.io.TimeRange;
|
import org.apache.hadoop.hbase.io.TimeRange;
|
||||||
|
@ -140,9 +140,10 @@ public class Get extends OperationWithAttributes
|
||||||
if(set == null) {
|
if(set == null) {
|
||||||
set = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
|
set = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
|
||||||
}
|
}
|
||||||
if (qualifier != null) {
|
if (qualifier == null) {
|
||||||
set.add(qualifier);
|
qualifier = HConstants.EMPTY_BYTE_ARRAY;
|
||||||
}
|
}
|
||||||
|
set.add(qualifier);
|
||||||
familyMap.put(family, set);
|
familyMap.put(family, set);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,14 +21,11 @@ package org.apache.hadoop.hbase.client;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.filter.Filter;
|
import org.apache.hadoop.hbase.filter.Filter;
|
||||||
import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
|
import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
|
||||||
import org.apache.hadoop.hbase.io.TimeRange;
|
import org.apache.hadoop.hbase.io.TimeRange;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.io.Writable;
|
|
||||||
import org.apache.hadoop.io.WritableFactories;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -250,11 +247,11 @@ public class Scan extends OperationWithAttributes {
|
||||||
if(set == null) {
|
if(set == null) {
|
||||||
set = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
|
set = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
|
||||||
}
|
}
|
||||||
if (qualifier != null) {
|
if (qualifier == null) {
|
||||||
set.add(qualifier);
|
qualifier = HConstants.EMPTY_BYTE_ARRAY;
|
||||||
}
|
}
|
||||||
|
set.add(qualifier);
|
||||||
familyMap.put(family, set);
|
familyMap.put(family, set);
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -79,7 +79,11 @@ public class AggregateImplementation<T, S> extends AggregateService implements
|
||||||
scanner = env.getRegion().getScanner(scan);
|
scanner = env.getRegion().getScanner(scan);
|
||||||
List<KeyValue> results = new ArrayList<KeyValue>();
|
List<KeyValue> results = new ArrayList<KeyValue>();
|
||||||
byte[] colFamily = scan.getFamilies()[0];
|
byte[] colFamily = scan.getFamilies()[0];
|
||||||
byte[] qualifier = scan.getFamilyMap().get(colFamily).pollFirst();
|
NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
|
||||||
|
byte[] qualifier = null;
|
||||||
|
if (qualifiers != null && !qualifiers.isEmpty()) {
|
||||||
|
qualifier = qualifiers.pollFirst();
|
||||||
|
}
|
||||||
// qualifier can be null.
|
// qualifier can be null.
|
||||||
boolean hasMoreRows = false;
|
boolean hasMoreRows = false;
|
||||||
do {
|
do {
|
||||||
|
@ -129,7 +133,11 @@ public class AggregateImplementation<T, S> extends AggregateService implements
|
||||||
scanner = env.getRegion().getScanner(scan);
|
scanner = env.getRegion().getScanner(scan);
|
||||||
List<KeyValue> results = new ArrayList<KeyValue>();
|
List<KeyValue> results = new ArrayList<KeyValue>();
|
||||||
byte[] colFamily = scan.getFamilies()[0];
|
byte[] colFamily = scan.getFamilies()[0];
|
||||||
byte[] qualifier = scan.getFamilyMap().get(colFamily).pollFirst();
|
NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
|
||||||
|
byte[] qualifier = null;
|
||||||
|
if (qualifiers != null && !qualifiers.isEmpty()) {
|
||||||
|
qualifier = qualifiers.pollFirst();
|
||||||
|
}
|
||||||
boolean hasMoreRows = false;
|
boolean hasMoreRows = false;
|
||||||
do {
|
do {
|
||||||
hasMoreRows = scanner.next(results);
|
hasMoreRows = scanner.next(results);
|
||||||
|
@ -177,7 +185,11 @@ public class AggregateImplementation<T, S> extends AggregateService implements
|
||||||
Scan scan = ProtobufUtil.toScan(request.getScan());
|
Scan scan = ProtobufUtil.toScan(request.getScan());
|
||||||
scanner = env.getRegion().getScanner(scan);
|
scanner = env.getRegion().getScanner(scan);
|
||||||
byte[] colFamily = scan.getFamilies()[0];
|
byte[] colFamily = scan.getFamilies()[0];
|
||||||
byte[] qualifier = scan.getFamilyMap().get(colFamily).pollFirst();
|
NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
|
||||||
|
byte[] qualifier = null;
|
||||||
|
if (qualifiers != null && !qualifiers.isEmpty()) {
|
||||||
|
qualifier = qualifiers.pollFirst();
|
||||||
|
}
|
||||||
List<KeyValue> results = new ArrayList<KeyValue>();
|
List<KeyValue> results = new ArrayList<KeyValue>();
|
||||||
boolean hasMoreRows = false;
|
boolean hasMoreRows = false;
|
||||||
do {
|
do {
|
||||||
|
@ -222,7 +234,11 @@ public class AggregateImplementation<T, S> extends AggregateService implements
|
||||||
try {
|
try {
|
||||||
Scan scan = ProtobufUtil.toScan(request.getScan());
|
Scan scan = ProtobufUtil.toScan(request.getScan());
|
||||||
byte[] colFamily = scan.getFamilies()[0];
|
byte[] colFamily = scan.getFamilies()[0];
|
||||||
byte[] qualifier = scan.getFamilyMap().get(colFamily).pollFirst();
|
NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
|
||||||
|
byte[] qualifier = null;
|
||||||
|
if (qualifiers != null && !qualifiers.isEmpty()) {
|
||||||
|
qualifier = qualifiers.pollFirst();
|
||||||
|
}
|
||||||
if (scan.getFilter() == null && qualifier == null)
|
if (scan.getFilter() == null && qualifier == null)
|
||||||
scan.setFilter(new FirstKeyOnlyFilter());
|
scan.setFilter(new FirstKeyOnlyFilter());
|
||||||
scanner = env.getRegion().getScanner(scan);
|
scanner = env.getRegion().getScanner(scan);
|
||||||
|
@ -277,7 +293,11 @@ public class AggregateImplementation<T, S> extends AggregateService implements
|
||||||
Scan scan = ProtobufUtil.toScan(request.getScan());
|
Scan scan = ProtobufUtil.toScan(request.getScan());
|
||||||
scanner = env.getRegion().getScanner(scan);
|
scanner = env.getRegion().getScanner(scan);
|
||||||
byte[] colFamily = scan.getFamilies()[0];
|
byte[] colFamily = scan.getFamilies()[0];
|
||||||
byte[] qualifier = scan.getFamilyMap().get(colFamily).pollFirst();
|
NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
|
||||||
|
byte[] qualifier = null;
|
||||||
|
if (qualifiers != null && !qualifiers.isEmpty()) {
|
||||||
|
qualifier = qualifiers.pollFirst();
|
||||||
|
}
|
||||||
List<KeyValue> results = new ArrayList<KeyValue>();
|
List<KeyValue> results = new ArrayList<KeyValue>();
|
||||||
boolean hasMoreRows = false;
|
boolean hasMoreRows = false;
|
||||||
|
|
||||||
|
@ -332,7 +352,11 @@ public class AggregateImplementation<T, S> extends AggregateService implements
|
||||||
Scan scan = ProtobufUtil.toScan(request.getScan());
|
Scan scan = ProtobufUtil.toScan(request.getScan());
|
||||||
scanner = env.getRegion().getScanner(scan);
|
scanner = env.getRegion().getScanner(scan);
|
||||||
byte[] colFamily = scan.getFamilies()[0];
|
byte[] colFamily = scan.getFamilies()[0];
|
||||||
byte[] qualifier = scan.getFamilyMap().get(colFamily).pollFirst();
|
NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
|
||||||
|
byte[] qualifier = null;
|
||||||
|
if (qualifiers != null && !qualifiers.isEmpty()) {
|
||||||
|
qualifier = qualifiers.pollFirst();
|
||||||
|
}
|
||||||
List<KeyValue> results = new ArrayList<KeyValue>();
|
List<KeyValue> results = new ArrayList<KeyValue>();
|
||||||
|
|
||||||
boolean hasMoreRows = false;
|
boolean hasMoreRows = false;
|
||||||
|
@ -391,10 +415,13 @@ public class AggregateImplementation<T, S> extends AggregateService implements
|
||||||
Scan scan = ProtobufUtil.toScan(request.getScan());
|
Scan scan = ProtobufUtil.toScan(request.getScan());
|
||||||
scanner = env.getRegion().getScanner(scan);
|
scanner = env.getRegion().getScanner(scan);
|
||||||
byte[] colFamily = scan.getFamilies()[0];
|
byte[] colFamily = scan.getFamilies()[0];
|
||||||
NavigableSet<byte[]> quals = scan.getFamilyMap().get(colFamily);
|
NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
|
||||||
byte[] valQualifier = quals.pollFirst();
|
byte[] valQualifier = null, weightQualifier = null;
|
||||||
|
if (qualifiers != null && !qualifiers.isEmpty()) {
|
||||||
|
valQualifier = qualifiers.pollFirst();
|
||||||
// if weighted median is requested, get qualifier for the weight column
|
// if weighted median is requested, get qualifier for the weight column
|
||||||
byte[] weightQualifier = quals.size() > 1 ? quals.pollLast() : null;
|
weightQualifier = qualifiers.pollLast();
|
||||||
|
}
|
||||||
List<KeyValue> results = new ArrayList<KeyValue>();
|
List<KeyValue> results = new ArrayList<KeyValue>();
|
||||||
|
|
||||||
boolean hasMoreRows = false;
|
boolean hasMoreRows = false;
|
||||||
|
|
|
@ -38,7 +38,6 @@ import java.util.Map.Entry;
|
||||||
import java.util.NavigableMap;
|
import java.util.NavigableMap;
|
||||||
import java.util.NavigableSet;
|
import java.util.NavigableSet;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
import java.util.TreeSet;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.DeserializationException;
|
import org.apache.hadoop.hbase.DeserializationException;
|
||||||
|
@ -643,21 +642,21 @@ public final class ProtobufUtil {
|
||||||
if (scan.hasFilter()) {
|
if (scan.hasFilter()) {
|
||||||
scanBuilder.setFilter(ProtobufUtil.toFilter(scan.getFilter()));
|
scanBuilder.setFilter(ProtobufUtil.toFilter(scan.getFilter()));
|
||||||
}
|
}
|
||||||
|
if (scan.hasFamilies()) {
|
||||||
Column.Builder columnBuilder = Column.newBuilder();
|
Column.Builder columnBuilder = Column.newBuilder();
|
||||||
for (Map.Entry<byte[],NavigableSet<byte []>>
|
for (Map.Entry<byte[],NavigableSet<byte []>>
|
||||||
family: scan.getFamilyMap().entrySet()) {
|
family: scan.getFamilyMap().entrySet()) {
|
||||||
columnBuilder.setFamily(ByteString.copyFrom(family.getKey()));
|
columnBuilder.setFamily(ByteString.copyFrom(family.getKey()));
|
||||||
NavigableSet<byte []> columns = family.getValue();
|
NavigableSet<byte []> qualifiers = family.getValue();
|
||||||
columnBuilder.clearQualifier();
|
columnBuilder.clearQualifier();
|
||||||
if (columns != null && columns.size() > 0) {
|
if (qualifiers != null && qualifiers.size() > 0) {
|
||||||
for (byte [] qualifier: family.getValue()) {
|
for (byte [] qualifier: qualifiers) {
|
||||||
if (qualifier != null) {
|
|
||||||
columnBuilder.addQualifier(ByteString.copyFrom(qualifier));
|
columnBuilder.addQualifier(ByteString.copyFrom(qualifier));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
scanBuilder.addColumn(columnBuilder.build());
|
scanBuilder.addColumn(columnBuilder.build());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if (scan.getMaxResultsPerColumnFamily() >= 0) {
|
if (scan.getMaxResultsPerColumnFamily() >= 0) {
|
||||||
scanBuilder.setStoreLimit(scan.getMaxResultsPerColumnFamily());
|
scanBuilder.setStoreLimit(scan.getMaxResultsPerColumnFamily());
|
||||||
}
|
}
|
||||||
|
@ -723,17 +722,16 @@ public final class ProtobufUtil {
|
||||||
scan.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
|
scan.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
|
||||||
}
|
}
|
||||||
if (proto.getColumnCount() > 0) {
|
if (proto.getColumnCount() > 0) {
|
||||||
TreeMap<byte [], NavigableSet<byte[]>> familyMap =
|
|
||||||
new TreeMap<byte [], NavigableSet<byte []>>(Bytes.BYTES_COMPARATOR);
|
|
||||||
for (Column column: proto.getColumnList()) {
|
for (Column column: proto.getColumnList()) {
|
||||||
byte[] family = column.getFamily().toByteArray();
|
byte[] family = column.getFamily().toByteArray();
|
||||||
TreeSet<byte []> set = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
|
if (column.getQualifierCount() > 0) {
|
||||||
for (ByteString qualifier: column.getQualifierList()) {
|
for (ByteString qualifier: column.getQualifierList()) {
|
||||||
set.add(qualifier.toByteArray());
|
scan.addColumn(family, qualifier.toByteArray());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
scan.addFamily(family);
|
||||||
}
|
}
|
||||||
familyMap.put(family, set);
|
|
||||||
}
|
}
|
||||||
scan.setFamilyMap(familyMap);
|
|
||||||
}
|
}
|
||||||
return scan;
|
return scan;
|
||||||
}
|
}
|
||||||
|
@ -821,11 +819,9 @@ public final class ProtobufUtil {
|
||||||
columnBuilder.clearQualifier();
|
columnBuilder.clearQualifier();
|
||||||
if (qualifiers != null && qualifiers.size() > 0) {
|
if (qualifiers != null && qualifiers.size() > 0) {
|
||||||
for (byte[] qualifier: qualifiers) {
|
for (byte[] qualifier: qualifiers) {
|
||||||
if (qualifier != null) {
|
|
||||||
columnBuilder.addQualifier(ByteString.copyFrom(qualifier));
|
columnBuilder.addQualifier(ByteString.copyFrom(qualifier));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
builder.addColumn(columnBuilder.build());
|
builder.addColumn(columnBuilder.build());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,7 +46,6 @@ import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.*;
|
import org.apache.hadoop.hbase.*;
|
||||||
import org.apache.hadoop.hbase.client.coprocessor.Batch;
|
|
||||||
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
||||||
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
|
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
|
||||||
import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
|
import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
|
||||||
|
@ -3630,6 +3629,29 @@ public class TestFromClientSide {
|
||||||
assertTrue(r.isEmpty());
|
assertTrue(r.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGet_NullQualifier() throws IOException {
|
||||||
|
HTable table = TEST_UTIL.createTable(Bytes.toBytes("testGet_NullQualifier"), FAMILY);
|
||||||
|
Put put = new Put(ROW);
|
||||||
|
put.add(FAMILY, QUALIFIER, VALUE);
|
||||||
|
table.put(put);
|
||||||
|
|
||||||
|
put = new Put(ROW);
|
||||||
|
put.add(FAMILY, null, VALUE);
|
||||||
|
table.put(put);
|
||||||
|
LOG.info("Row put");
|
||||||
|
|
||||||
|
Get get = new Get(ROW);
|
||||||
|
get.addColumn(FAMILY, null);
|
||||||
|
Result r = table.get(get);
|
||||||
|
assertEquals(1, r.size());
|
||||||
|
|
||||||
|
get = new Get(ROW);
|
||||||
|
get.addFamily(FAMILY);
|
||||||
|
r = table.get(get);
|
||||||
|
assertEquals(2, r.size());
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGet_NonExistentRow() throws IOException {
|
public void testGet_NonExistentRow() throws IOException {
|
||||||
HTable table = TEST_UTIL.createTable(Bytes.toBytes("testGet_NonExistentRow"), FAMILY);
|
HTable table = TEST_UTIL.createTable(Bytes.toBytes("testGet_NonExistentRow"), FAMILY);
|
||||||
|
@ -4999,5 +5021,33 @@ public class TestFromClientSide {
|
||||||
assertEquals(1, bar.length);
|
assertEquals(1, bar.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testScan_NullQualifier() throws IOException {
|
||||||
|
HTable table = TEST_UTIL.createTable(Bytes.toBytes("testScan_NullQualifier"), FAMILY);
|
||||||
|
Put put = new Put(ROW);
|
||||||
|
put.add(FAMILY, QUALIFIER, VALUE);
|
||||||
|
table.put(put);
|
||||||
|
|
||||||
|
put = new Put(ROW);
|
||||||
|
put.add(FAMILY, null, VALUE);
|
||||||
|
table.put(put);
|
||||||
|
LOG.info("Row put");
|
||||||
|
|
||||||
|
Scan scan = new Scan();
|
||||||
|
scan.addColumn(FAMILY, null);
|
||||||
|
|
||||||
|
ResultScanner scanner = table.getScanner(scan);
|
||||||
|
Result[] bar = scanner.next(100);
|
||||||
|
assertEquals(1, bar.length);
|
||||||
|
assertEquals(1, bar[0].size());
|
||||||
|
|
||||||
|
scan = new Scan();
|
||||||
|
scan.addFamily(FAMILY);
|
||||||
|
|
||||||
|
scanner = table.getScanner(scan);
|
||||||
|
bar = scanner.next(100);
|
||||||
|
assertEquals(1, bar.length);
|
||||||
|
assertEquals(2, bar[0].size());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,13 +19,9 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.client;
|
package org.apache.hadoop.hbase.client;
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
|
||||||
import java.io.ByteArrayOutputStream;
|
|
||||||
import java.io.DataInputStream;
|
|
||||||
import java.io.DataOutput;
|
|
||||||
import java.io.DataOutputStream;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.SmallTests;
|
import org.apache.hadoop.hbase.SmallTests;
|
||||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
|
@ -102,5 +98,13 @@ public class TestGet {
|
||||||
Assert.assertNull(get.getAttributesMap().get("attribute1"));
|
Assert.assertNull(get.getAttributesMap().get("attribute1"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNullQualifier() {
|
||||||
|
Get get = new Get(null);
|
||||||
|
byte[] family = Bytes.toBytes("family");
|
||||||
|
get.addColumn(family, null);
|
||||||
|
Set<byte[]> qualifiers = get.getFamilyMap().get(family);
|
||||||
|
Assert.assertEquals(1, qualifiers.size());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,13 +19,9 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.client;
|
package org.apache.hadoop.hbase.client;
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
|
||||||
import java.io.ByteArrayOutputStream;
|
|
||||||
import java.io.DataInputStream;
|
|
||||||
import java.io.DataOutput;
|
|
||||||
import java.io.DataOutputStream;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.SmallTests;
|
import org.apache.hadoop.hbase.SmallTests;
|
||||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
|
@ -103,5 +99,13 @@ public class TestScan {
|
||||||
Assert.assertNull(scan.getAttributesMap().get("attribute1"));
|
Assert.assertNull(scan.getAttributesMap().get("attribute1"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNullQualifier() {
|
||||||
|
Scan scan = new Scan();
|
||||||
|
byte[] family = Bytes.toBytes("family");
|
||||||
|
scan.addColumn(family, null);
|
||||||
|
Set<byte[]> qualifiers = scan.getFamilyMap().get(family);
|
||||||
|
Assert.assertEquals(1, qualifiers.size());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,6 @@ public class TestAggregateProtocol {
|
||||||
private static byte[][] ROWS = makeN(ROW, ROWSIZE);
|
private static byte[][] ROWS = makeN(ROW, ROWSIZE);
|
||||||
|
|
||||||
private static HBaseTestingUtility util = new HBaseTestingUtility();
|
private static HBaseTestingUtility util = new HBaseTestingUtility();
|
||||||
private static MiniHBaseCluster cluster = null;
|
|
||||||
private static Configuration conf = util.getConfiguration();
|
private static Configuration conf = util.getConfiguration();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -75,7 +74,6 @@ public class TestAggregateProtocol {
|
||||||
"org.apache.hadoop.hbase.coprocessor.AggregateImplementation");
|
"org.apache.hadoop.hbase.coprocessor.AggregateImplementation");
|
||||||
|
|
||||||
util.startMiniCluster(2);
|
util.startMiniCluster(2);
|
||||||
cluster = util.getMiniHBaseCluster();
|
|
||||||
HTable table = util.createTable(TEST_TABLE, TEST_FAMILY);
|
HTable table = util.createTable(TEST_TABLE, TEST_FAMILY);
|
||||||
util.createMultiRegions(util.getConfiguration(), table, TEST_FAMILY,
|
util.createMultiRegions(util.getConfiguration(), table, TEST_FAMILY,
|
||||||
new byte[][] { HConstants.EMPTY_BYTE_ARRAY, ROWS[rowSeperator1],
|
new byte[][] { HConstants.EMPTY_BYTE_ARRAY, ROWS[rowSeperator1],
|
||||||
|
|
Loading…
Reference in New Issue