HBASE-10841 Scan,Get,Put,Delete,etc setters should consistently return this
This commit is contained in:
parent
e13b629843
commit
34a5019b42
|
@ -20,12 +20,16 @@ package org.apache.hadoop.hbase.client;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.security.access.Permission;
|
||||
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
|
@ -50,8 +54,9 @@ public class Append extends Mutation {
|
|||
* A client that is not interested in the result can save network
|
||||
* bandwidth setting this to false.
|
||||
*/
|
||||
public void setReturnResults(boolean returnResults) {
|
||||
public Append setReturnResults(boolean returnResults) {
|
||||
setAttribute(RETURN_RESULTS, Bytes.toBytes(returnResults));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -127,4 +132,56 @@ public class Append extends Mutation {
|
|||
this.familyMap.put(family, list);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Append setAttribute(String name, byte[] value) {
|
||||
return (Append) super.setAttribute(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Append setId(String id) {
|
||||
return (Append) super.setId(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public Append setWriteToWAL(boolean write) {
|
||||
return (Append) super.setWriteToWAL(write);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Append setDurability(Durability d) {
|
||||
return (Append) super.setDurability(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Append setFamilyCellMap(NavigableMap<byte[], List<Cell>> map) {
|
||||
return (Append) super.setFamilyCellMap(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public Append setFamilyMap(NavigableMap<byte[], List<KeyValue>> map) {
|
||||
return (Append) super.setFamilyMap(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Append setClusterIds(List<UUID> clusterIds) {
|
||||
return (Append) super.setClusterIds(clusterIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Append setCellVisibility(CellVisibility expression) {
|
||||
return (Append) super.setCellVisibility(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Append setACL(String user, Permission perms) {
|
||||
return (Append) super.setACL(user, perms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Append setACL(Map<String, Permission> perms) {
|
||||
return (Append) super.setACL(perms);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ public interface Attributes {
|
|||
* @param name attribute name
|
||||
* @param value attribute value
|
||||
*/
|
||||
void setAttribute(String name, byte[] value);
|
||||
Attributes setAttribute(String name, byte[] value);
|
||||
|
||||
/**
|
||||
* Gets an attribute
|
||||
|
|
|
@ -23,6 +23,8 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
@ -30,6 +32,8 @@ import org.apache.hadoop.hbase.Cell;
|
|||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.security.access.Permission;
|
||||
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
|
@ -308,11 +312,12 @@ public class Delete extends Mutation implements Comparable<Row> {
|
|||
*
|
||||
* @param timestamp
|
||||
*/
|
||||
public void setTimestamp(long timestamp) {
|
||||
public Delete setTimestamp(long timestamp) {
|
||||
if (timestamp < 0) {
|
||||
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
|
||||
}
|
||||
this.ts = timestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -323,4 +328,56 @@ public class Delete extends Mutation implements Comparable<Row> {
|
|||
map.put("ts", this.ts);
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Delete setAttribute(String name, byte[] value) {
|
||||
return (Delete) super.setAttribute(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Delete setId(String id) {
|
||||
return (Delete) super.setId(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public Delete setWriteToWAL(boolean write) {
|
||||
return (Delete) super.setWriteToWAL(write);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Delete setDurability(Durability d) {
|
||||
return (Delete) super.setDurability(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Delete setFamilyCellMap(NavigableMap<byte[], List<Cell>> map) {
|
||||
return (Delete) super.setFamilyCellMap(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public Delete setFamilyMap(NavigableMap<byte[], List<KeyValue>> map) {
|
||||
return (Delete) super.setFamilyMap(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Delete setClusterIds(List<UUID> clusterIds) {
|
||||
return (Delete) super.setClusterIds(clusterIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Delete setCellVisibility(CellVisibility expression) {
|
||||
return (Delete) super.setCellVisibility(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Delete setACL(String user, Permission perms) {
|
||||
return (Delete) super.setACL(user, perms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Delete setACL(Map<String, Permission> perms) {
|
||||
return (Delete) super.setACL(perms);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,8 @@ import org.apache.hadoop.classification.InterfaceStability;
|
|||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.io.TimeRange;
|
||||
import org.apache.hadoop.hbase.security.access.Permission;
|
||||
import org.apache.hadoop.hbase.security.visibility.Authorizations;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
|
@ -114,16 +116,18 @@ public class Get extends Query
|
|||
return checkExistenceOnly;
|
||||
}
|
||||
|
||||
public void setCheckExistenceOnly(boolean checkExistenceOnly) {
|
||||
public Get setCheckExistenceOnly(boolean checkExistenceOnly) {
|
||||
this.checkExistenceOnly = checkExistenceOnly;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isClosestRowBefore() {
|
||||
return closestRowBefore;
|
||||
}
|
||||
|
||||
public void setClosestRowBefore(boolean closestRowBefore) {
|
||||
public Get setClosestRowBefore(boolean closestRowBefore) {
|
||||
this.closestRowBefore = closestRowBefore;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -253,8 +257,9 @@ public class Get extends Query
|
|||
* @param cacheBlocks if false, default settings are overridden and blocks
|
||||
* will not be cached
|
||||
*/
|
||||
public void setCacheBlocks(boolean cacheBlocks) {
|
||||
public Get setCacheBlocks(boolean cacheBlocks) {
|
||||
this.cacheBlocks = cacheBlocks;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -442,4 +447,39 @@ public class Get extends Query
|
|||
// TODO: This is wrong. Can't have two gets the same just because on same row.
|
||||
return compareTo(other) == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Get setAttribute(String name, byte[] value) {
|
||||
return (Get) super.setAttribute(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Get setId(String id) {
|
||||
return (Get) super.setId(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Get setAuthorizations(Authorizations authorizations) {
|
||||
return (Get) super.setAuthorizations(authorizations);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Get setACL(Map<String, Permission> perms) {
|
||||
return (Get) super.setACL(perms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Get setACL(String user, Permission perms) {
|
||||
return (Get) super.setACL(user, perms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Get setConsistency(Consistency consistency) {
|
||||
return (Get) super.setConsistency(consistency);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Get setReplicaId(int Id) {
|
||||
return (Get) super.setReplicaId(Id);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
@ -30,6 +31,8 @@ import org.apache.hadoop.hbase.Cell;
|
|||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.io.TimeRange;
|
||||
import org.apache.hadoop.hbase.security.access.Permission;
|
||||
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
|
||||
|
@ -166,6 +169,7 @@ public class Increment extends Mutation implements Comparable<Row> {
|
|||
* Method for retrieving the number of families to increment from
|
||||
* @return number of families
|
||||
*/
|
||||
@Override
|
||||
public int numFamilies() {
|
||||
return this.familyMap.size();
|
||||
}
|
||||
|
@ -274,7 +278,60 @@ public class Increment extends Mutation implements Comparable<Row> {
|
|||
return compareTo(other) == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected long extraHeapSize(){
|
||||
return HEAP_OVERHEAD;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Increment setAttribute(String name, byte[] value) {
|
||||
return (Increment) super.setAttribute(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Increment setId(String id) {
|
||||
return (Increment) super.setId(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public Increment setWriteToWAL(boolean write) {
|
||||
return (Increment) super.setWriteToWAL(write);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Increment setDurability(Durability d) {
|
||||
return (Increment) super.setDurability(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Increment setFamilyCellMap(NavigableMap<byte[], List<Cell>> map) {
|
||||
return (Increment) super.setFamilyCellMap(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public Increment setFamilyMap(NavigableMap<byte[], List<KeyValue>> map) {
|
||||
return (Increment) super.setFamilyMap(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Increment setClusterIds(List<UUID> clusterIds) {
|
||||
return (Increment) super.setClusterIds(clusterIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Increment setCellVisibility(CellVisibility expression) {
|
||||
return (Increment) super.setCellVisibility(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Increment setACL(String user, Permission perms) {
|
||||
return (Increment) super.setACL(user, perms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Increment setACL(Map<String, Permission> perms) {
|
||||
return (Increment) super.setACL(perms);
|
||||
}
|
||||
}
|
|
@ -226,16 +226,18 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
|
|||
* @deprecated Use {@link #setDurability(Durability)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public void setWriteToWAL(boolean write) {
|
||||
public Mutation setWriteToWAL(boolean write) {
|
||||
setDurability(write ? Durability.USE_DEFAULT : Durability.SKIP_WAL);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the durability for this mutation
|
||||
* @param d
|
||||
*/
|
||||
public void setDurability(Durability d) {
|
||||
public Mutation setDurability(Durability d) {
|
||||
this.durability = d;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Get the current durability */
|
||||
|
@ -254,10 +256,11 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
|
|||
/**
|
||||
* Method for setting the put's familyMap
|
||||
*/
|
||||
public void setFamilyCellMap(NavigableMap<byte [], List<Cell>> map) {
|
||||
public Mutation setFamilyCellMap(NavigableMap<byte [], List<Cell>> map) {
|
||||
// TODO: Shut this down or move it up to be a Constructor. Get new object rather than change
|
||||
// this internal data member.
|
||||
this.familyMap = map;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -284,12 +287,13 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
|
|||
* @deprecated use {@link #setFamilyCellMap(NavigableMap)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public void setFamilyMap(NavigableMap<byte [], List<KeyValue>> map) {
|
||||
public Mutation setFamilyMap(NavigableMap<byte [], List<KeyValue>> map) {
|
||||
TreeMap<byte[], List<Cell>> fm = new TreeMap<byte[], List<Cell>>(Bytes.BYTES_COMPARATOR);
|
||||
for (Map.Entry<byte[], List<KeyValue>> e : map.entrySet()) {
|
||||
fm.put(e.getKey(), Lists.<Cell>newArrayList(e.getValue()));
|
||||
}
|
||||
this.familyMap = fm;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -326,7 +330,7 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
|
|||
* Marks that the clusters with the given clusterIds have consumed the mutation
|
||||
* @param clusterIds of the clusters that have consumed the mutation
|
||||
*/
|
||||
public void setClusterIds(List<UUID> clusterIds) {
|
||||
public Mutation setClusterIds(List<UUID> clusterIds) {
|
||||
ByteArrayDataOutput out = ByteStreams.newDataOutput();
|
||||
out.writeInt(clusterIds.size());
|
||||
for (UUID clusterId : clusterIds) {
|
||||
|
@ -334,6 +338,7 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
|
|||
out.writeLong(clusterId.getLeastSignificantBits());
|
||||
}
|
||||
setAttribute(CONSUMED_CLUSTER_IDS, out.toByteArray());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -357,9 +362,10 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
|
|||
* It is illegal to set <code>CellVisibility</code> on <code>Delete</code> mutation.
|
||||
* @param expression
|
||||
*/
|
||||
public void setCellVisibility(CellVisibility expression) {
|
||||
public Mutation setCellVisibility(CellVisibility expression) {
|
||||
this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, ProtobufUtil
|
||||
.toCellVisibility(expression).toByteArray());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -437,21 +443,23 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
|
|||
* @param user User short name
|
||||
* @param perms Permissions for the user
|
||||
*/
|
||||
public void setACL(String user, Permission perms) {
|
||||
public Mutation setACL(String user, Permission perms) {
|
||||
setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL,
|
||||
ProtobufUtil.toUsersAndPermissions(user, perms).toByteArray());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param perms A map of permissions for a user or users
|
||||
*/
|
||||
public void setACL(Map<String, Permission> perms) {
|
||||
public Mutation setACL(Map<String, Permission> perms) {
|
||||
ListMultimap<String, Permission> permMap = ArrayListMultimap.create();
|
||||
for (Map.Entry<String, Permission> entry : perms.entrySet()) {
|
||||
permMap.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL,
|
||||
ProtobufUtil.toUsersAndPermissions(permMap).toByteArray());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -37,9 +37,10 @@ public abstract class OperationWithAttributes extends Operation implements Attri
|
|||
// used for uniquely identifying an operation
|
||||
public static final String ID_ATRIBUTE = "_operation.attributes.id";
|
||||
|
||||
public void setAttribute(String name, byte[] value) {
|
||||
@Override
|
||||
public OperationWithAttributes setAttribute(String name, byte[] value) {
|
||||
if (attributes == null && value == null) {
|
||||
return;
|
||||
return this;
|
||||
}
|
||||
|
||||
if (attributes == null) {
|
||||
|
@ -54,8 +55,10 @@ public abstract class OperationWithAttributes extends Operation implements Attri
|
|||
} else {
|
||||
attributes.put(name, value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getAttribute(String name) {
|
||||
if (attributes == null) {
|
||||
return null;
|
||||
|
@ -64,6 +67,7 @@ public abstract class OperationWithAttributes extends Operation implements Attri
|
|||
return attributes.get(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, byte[]> getAttributesMap() {
|
||||
if (attributes == null) {
|
||||
return Collections.emptyMap();
|
||||
|
@ -92,8 +96,9 @@ public abstract class OperationWithAttributes extends Operation implements Attri
|
|||
* @param id
|
||||
* id to set for the scan
|
||||
*/
|
||||
public void setId(String id) {
|
||||
public OperationWithAttributes setId(String id) {
|
||||
setAttribute(ID_ATRIBUTE, Bytes.toBytes(id));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -24,7 +24,9 @@ import java.nio.ByteBuffer;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
@ -34,6 +36,8 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.Tag;
|
||||
import org.apache.hadoop.hbase.io.HeapSize;
|
||||
import org.apache.hadoop.hbase.security.access.Permission;
|
||||
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
|
@ -416,4 +420,56 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
|
|||
}
|
||||
return filteredList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Put setAttribute(String name, byte[] value) {
|
||||
return (Put) super.setAttribute(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Put setId(String id) {
|
||||
return (Put) super.setId(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public Put setWriteToWAL(boolean write) {
|
||||
return (Put) super.setWriteToWAL(write);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Put setDurability(Durability d) {
|
||||
return (Put) super.setDurability(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Put setFamilyCellMap(NavigableMap<byte[], List<Cell>> map) {
|
||||
return (Put) super.setFamilyCellMap(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public Put setFamilyMap(NavigableMap<byte[], List<KeyValue>> map) {
|
||||
return (Put) super.setFamilyMap(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Put setClusterIds(List<UUID> clusterIds) {
|
||||
return (Put) super.setClusterIds(clusterIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Put setCellVisibility(CellVisibility expression) {
|
||||
return (Put) super.setCellVisibility(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Put setACL(String user, Permission perms) {
|
||||
return (Put) super.setACL(user, perms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Put setACL(Map<String, Permission> perms) {
|
||||
return (Put) super.setACL(perms);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,9 +61,10 @@ public abstract class Query extends OperationWithAttributes {
|
|||
* Sets the authorizations to be used by this Query
|
||||
* @param authorizations
|
||||
*/
|
||||
public void setAuthorizations(Authorizations authorizations) {
|
||||
public Query setAuthorizations(Authorizations authorizations) {
|
||||
this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, ProtobufUtil
|
||||
.toAuthorizations(authorizations).toByteArray());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -87,21 +88,23 @@ public abstract class Query extends OperationWithAttributes {
|
|||
* @param user User short name
|
||||
* @param perms Permissions for the user
|
||||
*/
|
||||
public void setACL(String user, Permission perms) {
|
||||
public Query setACL(String user, Permission perms) {
|
||||
setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL,
|
||||
ProtobufUtil.toUsersAndPermissions(user, perms).toByteArray());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param perms A map of permissions for a user or users
|
||||
*/
|
||||
public void setACL(Map<String, Permission> perms) {
|
||||
public Query setACL(Map<String, Permission> perms) {
|
||||
ListMultimap<String, Permission> permMap = ArrayListMultimap.create();
|
||||
for (Map.Entry<String, Permission> entry : perms.entrySet()) {
|
||||
permMap.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL,
|
||||
ProtobufUtil.toUsersAndPermissions(permMap).toByteArray());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -116,8 +119,9 @@ public abstract class Query extends OperationWithAttributes {
|
|||
* Sets the consistency level for this operation
|
||||
* @param consistency the consistency level
|
||||
*/
|
||||
public void setConsistency(Consistency consistency) {
|
||||
public Query setConsistency(Consistency consistency) {
|
||||
this.consistency = consistency;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -127,8 +131,9 @@ public abstract class Query extends OperationWithAttributes {
|
|||
* <br><b> Expert: </b>This is an advanced API exposed. Only use it if you know what you are doing
|
||||
* @param Id
|
||||
*/
|
||||
public void setReplicaId(int Id) {
|
||||
public Query setReplicaId(int Id) {
|
||||
this.targetReplicaId = Id;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -36,6 +36,8 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
|
||||
import org.apache.hadoop.hbase.io.TimeRange;
|
||||
import org.apache.hadoop.hbase.security.access.Permission;
|
||||
import org.apache.hadoop.hbase.security.visibility.Authorizations;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
|
@ -378,29 +380,32 @@ public class Scan extends Query {
|
|||
* Set the maximum number of values to return for each call to next()
|
||||
* @param batch the maximum number of values
|
||||
*/
|
||||
public void setBatch(int batch) {
|
||||
public Scan setBatch(int batch) {
|
||||
if (this.hasFilter() && this.filter.hasFilterRow()) {
|
||||
throw new IncompatibleFilterException(
|
||||
"Cannot set batch on a scan using a filter" +
|
||||
" that returns true for filter.hasFilterRow");
|
||||
}
|
||||
this.batch = batch;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum number of values to return per row per Column Family
|
||||
* @param limit the maximum number of values returned / row / CF
|
||||
*/
|
||||
public void setMaxResultsPerColumnFamily(int limit) {
|
||||
public Scan setMaxResultsPerColumnFamily(int limit) {
|
||||
this.storeLimit = limit;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set offset for the row per Column Family.
|
||||
* @param offset is the number of kvs that will be skipped.
|
||||
*/
|
||||
public void setRowOffsetPerColumnFamily(int offset) {
|
||||
public Scan setRowOffsetPerColumnFamily(int offset) {
|
||||
this.storeOffset = offset;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -409,8 +414,9 @@ public class Scan extends Query {
|
|||
* Higher caching values will enable faster scanners but will use more memory.
|
||||
* @param caching the number of rows for caching
|
||||
*/
|
||||
public void setCaching(int caching) {
|
||||
public Scan setCaching(int caching) {
|
||||
this.caching = caching;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -427,8 +433,9 @@ public class Scan extends Query {
|
|||
*
|
||||
* @param maxResultSize The maximum result size in bytes.
|
||||
*/
|
||||
public void setMaxResultSize(long maxResultSize) {
|
||||
public Scan setMaxResultSize(long maxResultSize) {
|
||||
this.maxResultSize = maxResultSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -565,8 +572,9 @@ public class Scan extends Query {
|
|||
* @param cacheBlocks if false, default settings are overridden and blocks
|
||||
* will not be cached
|
||||
*/
|
||||
public void setCacheBlocks(boolean cacheBlocks) {
|
||||
public Scan setCacheBlocks(boolean cacheBlocks) {
|
||||
this.cacheBlocks = cacheBlocks;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -615,8 +623,9 @@ public class Scan extends Query {
|
|||
* - if there's a concurrent split and you have more than 2 column families, some rows may be
|
||||
* missing some column families.
|
||||
*/
|
||||
public void setLoadColumnFamiliesOnDemand(boolean value) {
|
||||
public Scan setLoadColumnFamiliesOnDemand(boolean value) {
|
||||
this.loadColumnFamiliesOnDemand = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -729,8 +738,9 @@ public class Scan extends Query {
|
|||
* It is an error to specify any column when "raw" is set.
|
||||
* @param raw True/False to enable/disable "raw" mode.
|
||||
*/
|
||||
public void setRaw(boolean raw) {
|
||||
public Scan setRaw(boolean raw) {
|
||||
setAttribute(RAW_ATTR, Bytes.toBytes(raw));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -752,8 +762,9 @@ public class Scan extends Query {
|
|||
* is assumed to be READ_COMMITTED.
|
||||
* @param level IsolationLevel for this scan
|
||||
*/
|
||||
public void setIsolationLevel(IsolationLevel level) {
|
||||
public Scan setIsolationLevel(IsolationLevel level) {
|
||||
setAttribute(ISOLATION_LEVEL, level.toBytes());
|
||||
return this;
|
||||
}
|
||||
/*
|
||||
* @return The isolation level of this scan.
|
||||
|
@ -787,8 +798,9 @@ public class Scan extends Query {
|
|||
*
|
||||
* @param small
|
||||
*/
|
||||
public void setSmall(boolean small) {
|
||||
public Scan setSmall(boolean small) {
|
||||
this.small = small;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -798,4 +810,39 @@ public class Scan extends Query {
|
|||
public boolean isSmall() {
|
||||
return small;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scan setAttribute(String name, byte[] value) {
|
||||
return (Scan) super.setAttribute(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scan setId(String id) {
|
||||
return (Scan) super.setId(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scan setAuthorizations(Authorizations authorizations) {
|
||||
return (Scan) super.setAuthorizations(authorizations);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scan setACL(Map<String, Permission> perms) {
|
||||
return (Scan) super.setACL(perms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scan setACL(String user, Permission perms) {
|
||||
return (Scan) super.setACL(user, perms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scan setConsistency(Consistency consistency) {
|
||||
return (Scan) super.setConsistency(consistency);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scan setReplicaId(int Id) {
|
||||
return (Scan) super.setReplicaId(Id);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
|
@ -30,6 +31,7 @@ import org.junit.Assert;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
|
@ -291,7 +293,7 @@ public class TestOperation {
|
|||
}
|
||||
|
||||
/**
|
||||
* Test the client Operations' JSON encoding to ensure that produced JSON is
|
||||
* Test the client Operations' JSON encoding to ensure that produced JSON is
|
||||
* parseable and that the details are present and not corrupted.
|
||||
* @throws IOException
|
||||
*/
|
||||
|
@ -352,7 +354,7 @@ public class TestOperation {
|
|||
assertEquals("Qualifier incorrect in Put.toJSON()",
|
||||
Bytes.toStringBinary(QUALIFIER),
|
||||
kvMap.get("qualifier"));
|
||||
assertEquals("Value length incorrect in Put.toJSON()",
|
||||
assertEquals("Value length incorrect in Put.toJSON()",
|
||||
VALUE.length, kvMap.get("vlen"));
|
||||
|
||||
// produce a Delete operation
|
||||
|
@ -370,7 +372,7 @@ public class TestOperation {
|
|||
assertNotNull("Family absent in Delete.toJSON()", familyInfo);
|
||||
assertEquals("KeyValue absent in Delete.toJSON()", 1, familyInfo.size());
|
||||
kvMap = (Map) familyInfo.get(0);
|
||||
assertEquals("Qualifier incorrect in Delete.toJSON()",
|
||||
assertEquals("Qualifier incorrect in Delete.toJSON()",
|
||||
Bytes.toStringBinary(QUALIFIER), kvMap.get("qualifier"));
|
||||
}
|
||||
|
||||
|
@ -419,6 +421,50 @@ public class TestOperation {
|
|||
Assert.assertEquals(0, KeyValue.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0))));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("rawtypes")
|
||||
public void testOperationSubClassMethodsAreBuilderStyle() {
|
||||
/* All Operation subclasses should have a builder style setup where setXXX/addXXX methods
|
||||
* can be chainable together:
|
||||
* . For example:
|
||||
* Scan scan = new Scan()
|
||||
* .setFoo(foo)
|
||||
* .setBar(bar)
|
||||
* .setBuz(buz)
|
||||
*
|
||||
* This test ensures that all methods starting with "set" returns an Operation object
|
||||
*/
|
||||
|
||||
// TODO: We should ensure all subclasses of Operation is checked.
|
||||
Class[] classes = new Class[] {
|
||||
Operation.class,
|
||||
OperationWithAttributes.class,
|
||||
Mutation.class,
|
||||
Query.class,
|
||||
Delete.class,
|
||||
Increment.class,
|
||||
Append.class,
|
||||
Put.class,
|
||||
Get.class,
|
||||
Scan.class};
|
||||
|
||||
for (Class clazz : classes) {
|
||||
System.out.println("Checking " + clazz);
|
||||
Method[] methods = clazz.getDeclaredMethods();
|
||||
for (Method method : methods) {
|
||||
Class<?> ret = method.getReturnType();
|
||||
if (method.getName().startsWith("set") || method.getName().startsWith("add")) {
|
||||
System.out.println(" " + clazz.getSimpleName() + "." + method.getName() + "() : "
|
||||
+ ret.getSimpleName());
|
||||
String errorMsg = "All setXXX() methods in " + clazz.getSimpleName() + " should return a "
|
||||
+ clazz.getSimpleName() + " object in builder style. Offending method:"
|
||||
+ method.getName();
|
||||
assertTrue(errorMsg, Operation.class.isAssignableFrom(ret)
|
||||
|| Attributes.class.isAssignableFrom(ret)); // for setAttributes()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -238,8 +238,8 @@ EOF
|
|||
def _count_internal(interval = 1000, caching_rows = 10)
|
||||
# We can safely set scanner caching with the first key only filter
|
||||
scan = org.apache.hadoop.hbase.client.Scan.new
|
||||
scan.cache_blocks = false
|
||||
scan.caching = caching_rows
|
||||
scan.setCacheBlocks(false)
|
||||
scan.setCaching(caching_rows)
|
||||
scan.setFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter.new)
|
||||
|
||||
# Run the scanner
|
||||
|
|
Loading…
Reference in New Issue