HBASE-19462 Deprecate all addImmutable methods in Put
Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
parent
1a173f820b
commit
70f02dbc7c
|
@ -1342,35 +1342,80 @@ public class MetaTableAccessor {
|
|||
return delete;
|
||||
}
|
||||
|
||||
public static Put makeBarrierPut(byte[] encodedRegionName, long seq, byte[] tableName) {
|
||||
public static Put makeBarrierPut(byte[] encodedRegionName, long seq, byte[] tableName)
|
||||
throws IOException {
|
||||
byte[] seqBytes = Bytes.toBytes(seq);
|
||||
return new Put(encodedRegionName)
|
||||
.addImmutable(HConstants.REPLICATION_BARRIER_FAMILY, seqBytes, seqBytes)
|
||||
.addImmutable(HConstants.REPLICATION_META_FAMILY, tableNameCq, tableName);
|
||||
Put put = new Put(encodedRegionName);
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(HConstants.REPLICATION_BARRIER_FAMILY)
|
||||
.setQualifier(seqBytes)
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(seqBytes)
|
||||
.build())
|
||||
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(HConstants.REPLICATION_META_FAMILY)
|
||||
.setQualifier(tableNameCq)
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(tableName)
|
||||
.build());
|
||||
return put;
|
||||
}
|
||||
|
||||
|
||||
public static Put makeDaughterPut(byte[] encodedRegionName, byte[] value) {
|
||||
return new Put(encodedRegionName).addImmutable(HConstants.REPLICATION_META_FAMILY,
|
||||
daughterNameCq, value);
|
||||
public static Put makeDaughterPut(byte[] encodedRegionName, byte[] value) throws IOException {
|
||||
Put put = new Put(encodedRegionName);
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(HConstants.REPLICATION_META_FAMILY)
|
||||
.setQualifier(daughterNameCq)
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(value)
|
||||
.build());
|
||||
return put;
|
||||
}
|
||||
|
||||
public static Put makeParentPut(byte[] encodedRegionName, byte[] value) {
|
||||
return new Put(encodedRegionName).addImmutable(HConstants.REPLICATION_META_FAMILY,
|
||||
parentNameCq, value);
|
||||
public static Put makeParentPut(byte[] encodedRegionName, byte[] value) throws IOException {
|
||||
Put put = new Put(encodedRegionName);
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(HConstants.REPLICATION_META_FAMILY)
|
||||
.setQualifier(parentNameCq)
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(value)
|
||||
.build());
|
||||
return put;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds split daughters to the Put
|
||||
*/
|
||||
public static Put addDaughtersToPut(Put put, RegionInfo splitA, RegionInfo splitB) {
|
||||
public static Put addDaughtersToPut(Put put, RegionInfo splitA, RegionInfo splitB)
|
||||
throws IOException {
|
||||
if (splitA != null) {
|
||||
put.addImmutable(
|
||||
HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER, RegionInfo.toByteArray(splitA));
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(HConstants.CATALOG_FAMILY)
|
||||
.setQualifier(HConstants.SPLITA_QUALIFIER)
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(RegionInfo.toByteArray(splitA))
|
||||
.build());
|
||||
}
|
||||
if (splitB != null) {
|
||||
put.addImmutable(
|
||||
HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER, RegionInfo.toByteArray(splitB));
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(HConstants.CATALOG_FAMILY)
|
||||
.setQualifier(HConstants.SPLITB_QUALIFIER)
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(RegionInfo.toByteArray(splitB))
|
||||
.build());
|
||||
}
|
||||
return put;
|
||||
}
|
||||
|
@ -1658,10 +1703,22 @@ public class MetaTableAccessor {
|
|||
|
||||
// Put for parent
|
||||
Put putOfMerged = makePutFromRegionInfo(mergedRegion, time);
|
||||
putOfMerged.addImmutable(HConstants.CATALOG_FAMILY, HConstants.MERGEA_QUALIFIER,
|
||||
RegionInfo.toByteArray(regionA));
|
||||
putOfMerged.addImmutable(HConstants.CATALOG_FAMILY, HConstants.MERGEB_QUALIFIER,
|
||||
RegionInfo.toByteArray(regionB));
|
||||
putOfMerged.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(putOfMerged.getRow())
|
||||
.setFamily(HConstants.CATALOG_FAMILY)
|
||||
.setQualifier(HConstants.MERGEA_QUALIFIER)
|
||||
.setTimestamp(putOfMerged.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(RegionInfo.toByteArray(regionA))
|
||||
.build())
|
||||
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(putOfMerged.getRow())
|
||||
.setFamily(HConstants.CATALOG_FAMILY)
|
||||
.setQualifier(HConstants.MERGEB_QUALIFIER)
|
||||
.setTimestamp(putOfMerged.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(RegionInfo.toByteArray(regionB))
|
||||
.build());
|
||||
|
||||
// Deletes for merging regions
|
||||
Delete deleteA = makeDeleteFromRegionInfo(regionA, time);
|
||||
|
@ -1898,10 +1955,15 @@ public class MetaTableAccessor {
|
|||
Map<String, Long> positions) throws IOException {
|
||||
List<Put> puts = new ArrayList<>(positions.entrySet().size());
|
||||
for (Map.Entry<String, Long> entry : positions.entrySet()) {
|
||||
long value = Math.abs(entry.getValue());
|
||||
Put put = new Put(Bytes.toBytes(entry.getKey()));
|
||||
put.addImmutable(HConstants.REPLICATION_POSITION_FAMILY, Bytes.toBytes(peerId),
|
||||
Bytes.toBytes(value));
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(HConstants.REPLICATION_POSITION_FAMILY)
|
||||
.setQualifier(Bytes.toBytes(peerId))
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(Math.abs(entry.getValue())))
|
||||
.build());
|
||||
puts.add(put);
|
||||
}
|
||||
getMetaHTable(connection).put(puts);
|
||||
|
@ -2062,31 +2124,73 @@ public class MetaTableAccessor {
|
|||
|
||||
public static Put addRegionInfo(final Put p, final RegionInfo hri)
|
||||
throws IOException {
|
||||
p.addImmutable(getCatalogFamily(), HConstants.REGIONINFO_QUALIFIER,
|
||||
RegionInfo.toByteArray(hri));
|
||||
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(p.getRow())
|
||||
.setFamily(getCatalogFamily())
|
||||
.setQualifier(HConstants.REGIONINFO_QUALIFIER)
|
||||
.setTimestamp(p.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(RegionInfo.toByteArray(hri))
|
||||
.build());
|
||||
return p;
|
||||
}
|
||||
|
||||
public static Put addLocation(final Put p, final ServerName sn, long openSeqNum,
|
||||
long time, int replicaId){
|
||||
long time, int replicaId) throws IOException {
|
||||
if (time <= 0) {
|
||||
time = EnvironmentEdgeManager.currentTime();
|
||||
}
|
||||
p.addImmutable(getCatalogFamily(), getServerColumn(replicaId), time,
|
||||
Bytes.toBytes(sn.getHostAndPort()));
|
||||
p.addImmutable(getCatalogFamily(), getStartCodeColumn(replicaId), time,
|
||||
Bytes.toBytes(sn.getStartcode()));
|
||||
p.addImmutable(getCatalogFamily(), getSeqNumColumn(replicaId), time,
|
||||
Bytes.toBytes(openSeqNum));
|
||||
return p;
|
||||
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
|
||||
return p.add(builder.clear()
|
||||
.setRow(p.getRow())
|
||||
.setFamily(getCatalogFamily())
|
||||
.setQualifier(getServerColumn(replicaId))
|
||||
.setTimestamp(time)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(sn.getAddress().toString()))
|
||||
.build())
|
||||
.add(builder.clear()
|
||||
.setRow(p.getRow())
|
||||
.setFamily(getCatalogFamily())
|
||||
.setQualifier(getStartCodeColumn(replicaId))
|
||||
.setTimestamp(time)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(sn.getStartcode()))
|
||||
.build())
|
||||
.add(builder.clear()
|
||||
.setRow(p.getRow())
|
||||
.setFamily(getCatalogFamily())
|
||||
.setQualifier(getSeqNumColumn(replicaId))
|
||||
.setTimestamp(time)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(openSeqNum))
|
||||
.build());
|
||||
}
|
||||
|
||||
public static Put addEmptyLocation(final Put p, int replicaId) {
|
||||
public static Put addEmptyLocation(final Put p, int replicaId) throws IOException {
|
||||
long now = EnvironmentEdgeManager.currentTime();
|
||||
p.addImmutable(getCatalogFamily(), getServerColumn(replicaId), now, null);
|
||||
p.addImmutable(getCatalogFamily(), getStartCodeColumn(replicaId), now, null);
|
||||
p.addImmutable(getCatalogFamily(), getSeqNumColumn(replicaId), now, null);
|
||||
return p;
|
||||
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
|
||||
return p.add(builder.clear()
|
||||
.setRow(p.getRow())
|
||||
.setFamily(getCatalogFamily())
|
||||
.setQualifier(getServerColumn(replicaId))
|
||||
.setTimestamp(now)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.build())
|
||||
.add(builder.clear()
|
||||
.setRow(p.getRow())
|
||||
.setFamily(getCatalogFamily())
|
||||
.setQualifier(getStartCodeColumn(replicaId))
|
||||
.setTimestamp(now)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.build())
|
||||
.add(builder.clear()
|
||||
.setRow(p.getRow())
|
||||
.setFamily(getCatalogFamily())
|
||||
.setQualifier(getSeqNumColumn(replicaId))
|
||||
.setTimestamp(now)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.build());
|
||||
}
|
||||
|
||||
private static String mutationsToString(List<? extends Mutation> mutations) throws IOException {
|
||||
|
@ -2103,13 +2207,19 @@ public class MetaTableAccessor {
|
|||
return p.getClass().getSimpleName() + p.toJSON();
|
||||
}
|
||||
|
||||
public static Put addSequenceNum(final Put p, long openSeqNum, long time, int replicaId) {
|
||||
public static Put addSequenceNum(final Put p, long openSeqNum, long time,
|
||||
int replicaId) throws IOException {
|
||||
if (time <= 0) {
|
||||
time = EnvironmentEdgeManager.currentTime();
|
||||
}
|
||||
p.addImmutable(HConstants.CATALOG_FAMILY, getSeqNumColumn(replicaId), time,
|
||||
Bytes.toBytes(openSeqNum));
|
||||
return p;
|
||||
return p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(p.getRow())
|
||||
.setFamily(HConstants.CATALOG_FAMILY)
|
||||
.setQualifier(getSeqNumColumn(replicaId))
|
||||
.setTimestamp(time)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(openSeqNum))
|
||||
.build());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.CellUtil;
|
|||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.IndividualBytesFieldCell;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.Tag;
|
||||
import org.apache.hadoop.hbase.io.HeapSize;
|
||||
import org.apache.hadoop.hbase.security.access.Permission;
|
||||
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
|
||||
|
@ -182,22 +181,14 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
|
|||
* See {@link #addColumn(byte[], byte[], byte[])}. This version expects
|
||||
* that the underlying arrays won't change. It's intended
|
||||
* for usage internal HBase to and for advanced client applications.
|
||||
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
|
||||
* Use {@link #add(Cell)} and {@link org.apache.hadoop.hbase.CellBuilder} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public Put addImmutable(byte [] family, byte [] qualifier, byte [] value) {
|
||||
return addImmutable(family, qualifier, this.ts, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* This expects that the underlying arrays won't change. It's intended
|
||||
* for usage internal HBase to and for advanced client applications.
|
||||
* <p>Marked as audience Private as of 1.2.0. {@link Tag} is an internal implementation detail
|
||||
* that should not be exposed publicly.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public Put addImmutable(byte[] family, byte [] qualifier, byte [] value, Tag[] tag) {
|
||||
return addImmutable(family, qualifier, this.ts, value, tag);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the specified column and value, with the specified timestamp as
|
||||
* its version to this Put operation.
|
||||
|
@ -221,7 +212,10 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
|
|||
* See {@link #addColumn(byte[], byte[], long, byte[])}. This version expects
|
||||
* that the underlying arrays won't change. It's intended
|
||||
* for usage internal HBase to and for advanced client applications.
|
||||
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
|
||||
* Use {@link #add(Cell)} and {@link org.apache.hadoop.hbase.CellBuilder} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public Put addImmutable(byte [] family, byte [] qualifier, long ts, byte [] value) {
|
||||
// Family can not be null, otherwise NullPointerException is thrown when putting the cell into familyMap
|
||||
if (family == null) {
|
||||
|
@ -238,39 +232,6 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* This expects that the underlying arrays won't change. It's intended
|
||||
* for usage internal HBase to and for advanced client applications.
|
||||
* <p>Marked as audience Private as of 1.2.0. {@link Tag} is an internal implementation detail
|
||||
* that should not be exposed publicly.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public Put addImmutable(byte[] family, byte[] qualifier, long ts, byte[] value, Tag[] tag) {
|
||||
List<Cell> list = getCellList(family);
|
||||
KeyValue kv = createPutKeyValue(family, qualifier, ts, value, tag);
|
||||
list.add(kv);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* This expects that the underlying arrays won't change. It's intended
|
||||
* for usage internal HBase to and for advanced client applications.
|
||||
* <p>Marked as audience Private as of 1.2.0. {@link Tag} is an internal implementation detail
|
||||
* that should not be exposed publicly.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public Put addImmutable(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value,
|
||||
Tag[] tag) {
|
||||
if (ts < 0) {
|
||||
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
|
||||
}
|
||||
List<Cell> list = getCellList(family);
|
||||
KeyValue kv = createPutKeyValue(family, qualifier, ts, value, tag);
|
||||
list.add(kv);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add the specified column and value, with the specified timestamp as
|
||||
* its version to this Put operation.
|
||||
|
@ -294,7 +255,10 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
|
|||
* See {@link #addColumn(byte[], ByteBuffer, long, ByteBuffer)}. This version expects
|
||||
* that the underlying arrays won't change. It's intended
|
||||
* for usage internal HBase to and for advanced client applications.
|
||||
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
|
||||
* Use {@link #add(Cell)} and {@link org.apache.hadoop.hbase.CellBuilder} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public Put addImmutable(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value) {
|
||||
if (ts < 0) {
|
||||
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
|
||||
|
@ -313,7 +277,18 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
|
|||
* @return this
|
||||
* @throws java.io.IOException e
|
||||
*/
|
||||
public Put add(Cell kv) throws IOException{
|
||||
public Put add(Cell kv) throws IOException {
|
||||
// Family can not be null, otherwise NullPointerException is thrown when putting
|
||||
// the cell into familyMap
|
||||
if (kv.getFamilyArray() == null) {
|
||||
throw new IllegalArgumentException("Family cannot be null");
|
||||
}
|
||||
|
||||
// Check timestamp
|
||||
if (ts < 0) {
|
||||
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
|
||||
}
|
||||
|
||||
byte [] family = CellUtil.cloneFamily(kv);
|
||||
List<Cell> list = getCellList(family);
|
||||
//Checking that the row of the kv is the same as the put
|
||||
|
|
|
@ -17,10 +17,19 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.protobuf;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.google.protobuf.CodedInputStream;
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
import com.google.protobuf.Message;
|
||||
import com.google.protobuf.Parser;
|
||||
import com.google.protobuf.RpcChannel;
|
||||
import com.google.protobuf.RpcController;
|
||||
import com.google.protobuf.Service;
|
||||
import com.google.protobuf.ServiceException;
|
||||
import com.google.protobuf.TextFormat;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Method;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -28,9 +37,9 @@ import java.util.Map;
|
|||
import java.util.Map.Entry;
|
||||
import java.util.NavigableSet;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.CellScanner;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
|
@ -43,8 +52,6 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.Tag;
|
||||
import org.apache.hadoop.hbase.TagUtil;
|
||||
import org.apache.hadoop.hbase.client.Append;
|
||||
import org.apache.hadoop.hbase.client.Consistency;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
|
@ -91,17 +98,6 @@ import org.apache.hadoop.hbase.util.Methods;
|
|||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.google.protobuf.CodedInputStream;
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
import com.google.protobuf.Message;
|
||||
import com.google.protobuf.Parser;
|
||||
import com.google.protobuf.RpcChannel;
|
||||
import com.google.protobuf.RpcController;
|
||||
import com.google.protobuf.Service;
|
||||
import com.google.protobuf.ServiceException;
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
/**
|
||||
* Protobufs utility.
|
||||
* NOTE: This class OVERLAPS ProtobufUtil in the subpackage 'shaded'. The latter is used
|
||||
|
@ -516,10 +512,6 @@ public final class ProtobufUtil {
|
|||
throw new DoNotRetryIOException(
|
||||
"Missing required field: qualifier value");
|
||||
}
|
||||
ByteBuffer qualifier =
|
||||
qv.hasQualifier() ? qv.getQualifier().asReadOnlyByteBuffer() : null;
|
||||
ByteBuffer value =
|
||||
qv.hasValue() ? qv.getValue().asReadOnlyByteBuffer() : null;
|
||||
long ts = timestamp;
|
||||
if (qv.hasTimestamp()) {
|
||||
ts = qv.getTimestamp();
|
||||
|
@ -529,30 +521,42 @@ public final class ProtobufUtil {
|
|||
allTagsBytes = qv.getTags().toByteArray();
|
||||
if(qv.hasDeleteType()) {
|
||||
put.add(cellBuilder.clear()
|
||||
.setRow(proto.getRow().toByteArray())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(fromDeleteType(qv.getDeleteType()).getCode())
|
||||
.setTags(allTagsBytes)
|
||||
.build());
|
||||
.setRow(put.getRow())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(fromDeleteType(qv.getDeleteType()).getCode())
|
||||
.setTags(allTagsBytes)
|
||||
.build());
|
||||
} else {
|
||||
List<Tag> tags =
|
||||
TagUtil.asList(allTagsBytes, 0, (short) allTagsBytes.length);
|
||||
Tag[] tagsArray = new Tag[tags.size()];
|
||||
put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray));
|
||||
put.add(cellBuilder.clear()
|
||||
.setRow(put.getRow())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
|
||||
.setTags(allTagsBytes)
|
||||
.build());
|
||||
}
|
||||
} else {
|
||||
if(qv.hasDeleteType()) {
|
||||
put.add(cellBuilder.clear()
|
||||
.setRow(proto.getRow().toByteArray())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(fromDeleteType(qv.getDeleteType()).getCode())
|
||||
.build());
|
||||
.setRow(put.getRow())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(fromDeleteType(qv.getDeleteType()).getCode())
|
||||
.build());
|
||||
} else{
|
||||
put.addImmutable(family, qualifier, ts, value);
|
||||
put.add(cellBuilder.clear()
|
||||
.setRow(put.getRow())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
|
||||
.build());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.ByteBufferCell;
|
|||
import org.apache.hadoop.hbase.CacheEvictionStats;
|
||||
import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.CellScanner;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
|
@ -61,8 +62,6 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
|
|||
import org.apache.hadoop.hbase.ServerLoad;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.Tag;
|
||||
import org.apache.hadoop.hbase.TagUtil;
|
||||
import org.apache.hadoop.hbase.client.Append;
|
||||
import org.apache.hadoop.hbase.client.ClientUtil;
|
||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
|
||||
|
@ -103,6 +102,15 @@ import org.apache.hadoop.hbase.replication.ReplicationLoadSink;
|
|||
import org.apache.hadoop.hbase.replication.ReplicationLoadSource;
|
||||
import org.apache.hadoop.hbase.security.visibility.Authorizations;
|
||||
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
|
||||
import org.apache.hadoop.hbase.util.Addressing;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.DynamicClassLoader;
|
||||
import org.apache.hadoop.hbase.util.ExceptionUtil;
|
||||
import org.apache.hadoop.hbase.util.Methods;
|
||||
import org.apache.hadoop.hbase.util.VersionInfo;
|
||||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.gson.JsonArray;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.gson.JsonElement;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
|
||||
|
@ -181,14 +189,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos;
|
||||
import org.apache.hadoop.hbase.util.Addressing;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.DynamicClassLoader;
|
||||
import org.apache.hadoop.hbase.util.ExceptionUtil;
|
||||
import org.apache.hadoop.hbase.util.Methods;
|
||||
import org.apache.hadoop.hbase.util.VersionInfo;
|
||||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
/**
|
||||
* Protobufs utility.
|
||||
|
@ -654,10 +654,6 @@ public final class ProtobufUtil {
|
|||
throw new DoNotRetryIOException(
|
||||
"Missing required field: qualifier value");
|
||||
}
|
||||
ByteBuffer qualifier =
|
||||
qv.hasQualifier() ? qv.getQualifier().asReadOnlyByteBuffer() : null;
|
||||
ByteBuffer value =
|
||||
qv.hasValue() ? qv.getValue().asReadOnlyByteBuffer() : null;
|
||||
long ts = timestamp;
|
||||
if (qv.hasTimestamp()) {
|
||||
ts = qv.getTimestamp();
|
||||
|
@ -667,30 +663,42 @@ public final class ProtobufUtil {
|
|||
allTagsBytes = qv.getTags().toByteArray();
|
||||
if(qv.hasDeleteType()) {
|
||||
put.add(cellBuilder.clear()
|
||||
.setRow(proto.getRow().toByteArray())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(fromDeleteType(qv.getDeleteType()).getCode())
|
||||
.setTags(allTagsBytes)
|
||||
.build());
|
||||
.setRow(proto.getRow().toByteArray())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(fromDeleteType(qv.getDeleteType()).getCode())
|
||||
.setTags(allTagsBytes)
|
||||
.build());
|
||||
} else {
|
||||
List<Tag> tags =
|
||||
TagUtil.asList(allTagsBytes, 0, (short) allTagsBytes.length);
|
||||
Tag[] tagsArray = new Tag[tags.size()];
|
||||
put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray));
|
||||
put.add(cellBuilder.clear()
|
||||
.setRow(put.getRow())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
|
||||
.setTags(allTagsBytes)
|
||||
.build());
|
||||
}
|
||||
} else {
|
||||
if(qv.hasDeleteType()) {
|
||||
put.add(cellBuilder.clear()
|
||||
.setRow(proto.getRow().toByteArray())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(fromDeleteType(qv.getDeleteType()).getCode())
|
||||
.build());
|
||||
.setRow(put.getRow())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(fromDeleteType(qv.getDeleteType()).getCode())
|
||||
.build());
|
||||
} else{
|
||||
put.addImmutable(family, qualifier, ts, value);
|
||||
put.add(cellBuilder.clear()
|
||||
.setRow(put.getRow())
|
||||
.setFamily(family)
|
||||
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
|
||||
.setTimestamp(ts)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
|
||||
.build());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,18 +19,21 @@
|
|||
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ SmallTests.class, ClientTests.class })
|
||||
public class TestPut {
|
||||
@Test
|
||||
|
@ -66,7 +69,7 @@ public class TestPut {
|
|||
|
||||
// HBASE-14882
|
||||
@Test
|
||||
public void testAddImmutable() {
|
||||
public void testAddImmutable() throws IOException {
|
||||
byte[] row = Bytes.toBytes("immutable-row");
|
||||
byte[] family = Bytes.toBytes("immutable-family");
|
||||
|
||||
|
@ -77,9 +80,24 @@ public class TestPut {
|
|||
byte[] value1 = Bytes.toBytes("immutable-value-1");
|
||||
long ts1 = 5000L;
|
||||
|
||||
Put put = new Put(row, true); // "true" indicates that the input row is immutable
|
||||
put.addImmutable(family, qualifier0, value0);
|
||||
put.addImmutable(family, qualifier1, ts1, value1);
|
||||
// "true" indicates that the input row is immutable
|
||||
Put put = new Put(row, true);
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(row)
|
||||
.setFamily(family)
|
||||
.setQualifier(qualifier0)
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(value0)
|
||||
.build())
|
||||
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(row)
|
||||
.setFamily(family)
|
||||
.setQualifier(qualifier1)
|
||||
.setTimestamp(ts1)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(value1)
|
||||
.build());
|
||||
|
||||
// Verify the cell of family:qualifier0
|
||||
Cell cell0 = put.get(family, qualifier0).get(0);
|
||||
|
|
|
@ -22,6 +22,9 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configured;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.Connection;
|
||||
import org.apache.hadoop.hbase.client.ConnectionFactory;
|
||||
|
@ -218,7 +221,14 @@ public class MultiThreadedClientExample extends Configured implements Tool {
|
|||
for (int i = 0; i < 30; i++) {
|
||||
byte[] rk = Bytes.toBytes(ThreadLocalRandom.current().nextLong());
|
||||
Put p = new Put(rk);
|
||||
p.addImmutable(FAMILY, QUAL, value);
|
||||
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(rk)
|
||||
.setFamily(FAMILY)
|
||||
.setQualifier(QUAL)
|
||||
.setTimestamp(p.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(value)
|
||||
.build());
|
||||
puts.add(p);
|
||||
}
|
||||
|
||||
|
@ -248,7 +258,14 @@ public class MultiThreadedClientExample extends Configured implements Tool {
|
|||
byte[] value = Bytes.toBytes(Double.toString(ThreadLocalRandom.current().nextDouble()));
|
||||
byte[] rk = Bytes.toBytes(ThreadLocalRandom.current().nextLong());
|
||||
Put p = new Put(rk);
|
||||
p.addImmutable(FAMILY, QUAL, value);
|
||||
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(rk)
|
||||
.setFamily(FAMILY)
|
||||
.setQualifier(QUAL)
|
||||
.setTimestamp(p.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(value)
|
||||
.build());
|
||||
t.put(p);
|
||||
}
|
||||
return true;
|
||||
|
|
|
@ -39,6 +39,9 @@ import javax.ws.rs.core.UriInfo;
|
|||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
|
@ -238,7 +241,14 @@ public class RowResource extends ResourceBase {
|
|||
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
|
||||
.build();
|
||||
}
|
||||
put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(parts[0])
|
||||
.setQualifier(parts[1])
|
||||
.setTimestamp(cell.getTimestamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(cell.getValue())
|
||||
.build());
|
||||
}
|
||||
puts.add(put);
|
||||
if (LOG.isTraceEnabled()) {
|
||||
|
@ -306,7 +316,14 @@ public class RowResource extends ResourceBase {
|
|||
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
|
||||
.build();
|
||||
}
|
||||
put.addImmutable(parts[0], parts[1], timestamp, message);
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(parts[0])
|
||||
.setQualifier(parts[1])
|
||||
.setTimestamp(timestamp)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(message)
|
||||
.build());
|
||||
table = servlet.getTable(tableResource.getName());
|
||||
table.put(put);
|
||||
if (LOG.isTraceEnabled()) {
|
||||
|
@ -496,8 +513,14 @@ public class RowResource extends ResourceBase {
|
|||
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
|
||||
.build();
|
||||
}
|
||||
put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
|
||||
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(parts[0])
|
||||
.setQualifier(parts[1])
|
||||
.setTimestamp(cell.getTimestamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(cell.getValue())
|
||||
.build());
|
||||
if(Bytes.equals(col,
|
||||
valueToCheckCell.getColumn())) {
|
||||
valueToPutCell = cell;
|
||||
|
|
|
@ -35,6 +35,9 @@ import org.apache.commons.lang3.StringUtils;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.HBaseIOException;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.MetaTableAccessor;
|
||||
|
@ -176,8 +179,14 @@ public class FavoredNodeAssignmentHelper {
|
|||
if (favoredNodeList != null) {
|
||||
put = MetaTableAccessor.makePutFromRegionInfo(regionInfo);
|
||||
byte[] favoredNodes = getFavoredNodes(favoredNodeList);
|
||||
put.addImmutable(HConstants.CATALOG_FAMILY, FAVOREDNODES_QUALIFIER,
|
||||
EnvironmentEdgeManager.currentTime(), favoredNodes);
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(HConstants.CATALOG_FAMILY)
|
||||
.setQualifier(FAVOREDNODES_QUALIFIER)
|
||||
.setTimestamp(EnvironmentEdgeManager.currentTime())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(favoredNodes)
|
||||
.build());
|
||||
LOG.debug("Create the region " + regionInfo.getRegionNameAsString() +
|
||||
" with favored nodes " + favoredNodeList);
|
||||
}
|
||||
|
|
|
@ -26,6 +26,9 @@ import org.apache.commons.lang3.StringUtils;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
@ -34,6 +37,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
|
|||
import org.apache.hadoop.hbase.NamespaceDescriptor;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.ZKNamespaceManager;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
|
@ -149,10 +153,16 @@ public class TableNamespaceManager {
|
|||
if (nsTable == null) {
|
||||
throw new IOException(this.getClass().getName() + " isn't ready to serve");
|
||||
}
|
||||
Put p = new Put(Bytes.toBytes(ns.getName()));
|
||||
p.addImmutable(HTableDescriptor.NAMESPACE_FAMILY_INFO_BYTES,
|
||||
HTableDescriptor.NAMESPACE_COL_DESC_BYTES,
|
||||
ProtobufUtil.toProtoNamespaceDescriptor(ns).toByteArray());
|
||||
byte[] row = Bytes.toBytes(ns.getName());
|
||||
Put p = new Put(row, true);
|
||||
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(row)
|
||||
.setFamily(TableDescriptorBuilder.NAMESPACE_FAMILY_INFO_BYTES)
|
||||
.setQualifier(TableDescriptorBuilder.NAMESPACE_COL_DESC_BYTES)
|
||||
.setTimestamp(p.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(ProtobufUtil.toProtoNamespaceDescriptor(ns).toByteArray())
|
||||
.build());
|
||||
nsTable.put(p);
|
||||
}
|
||||
|
||||
|
|
|
@ -23,10 +23,12 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.MetaTableAccessor;
|
||||
|
@ -178,12 +180,24 @@ public class RegionStateStore {
|
|||
} else if (regionLocation != null && !regionLocation.equals(lastHost)) {
|
||||
// Ideally, if no regionLocation, write null to the hbase:meta but this will confuse clients
|
||||
// currently; they want a server to hit. TODO: Make clients wait if no location.
|
||||
put.addImmutable(HConstants.CATALOG_FAMILY, getServerNameColumn(replicaId),
|
||||
Bytes.toBytes(regionLocation.getServerName()));
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(HConstants.CATALOG_FAMILY)
|
||||
.setQualifier(getServerNameColumn(replicaId))
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(regionLocation.getServerName()))
|
||||
.build());
|
||||
info.append(", regionLocation=").append(regionLocation);
|
||||
}
|
||||
put.addImmutable(HConstants.CATALOG_FAMILY, getStateColumn(replicaId),
|
||||
Bytes.toBytes(state.name()));
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(HConstants.CATALOG_FAMILY)
|
||||
.setQualifier(getStateColumn(replicaId))
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(state.name()))
|
||||
.build());
|
||||
LOG.info(info);
|
||||
|
||||
final boolean serialReplication = hasSerialReplicationScope(regionInfo.getTable());
|
||||
|
|
|
@ -37,6 +37,9 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.AuthUtil;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.CompareOperator;
|
||||
import org.apache.hadoop.hbase.NamespaceDescriptor;
|
||||
|
@ -165,8 +168,14 @@ public class AccessControlLists {
|
|||
for (Permission.Action action : actionSet) {
|
||||
value[index++] = action.code();
|
||||
}
|
||||
|
||||
p.addImmutable(ACL_LIST_FAMILY, key, value);
|
||||
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(p.getRow())
|
||||
.setFamily(ACL_LIST_FAMILY)
|
||||
.setQualifier(key)
|
||||
.setTimestamp(p.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(value)
|
||||
.build());
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("Writing permission with rowKey "+
|
||||
Bytes.toString(rowKey)+" "+
|
||||
|
@ -744,7 +753,7 @@ public class AccessControlLists {
|
|||
// Deserialize the table permissions from the KV
|
||||
// TODO: This can be improved. Don't build UsersAndPermissions just to unpack it again,
|
||||
// use the builder
|
||||
AccessControlProtos.UsersAndPermissions.Builder builder =
|
||||
AccessControlProtos.UsersAndPermissions.Builder builder =
|
||||
AccessControlProtos.UsersAndPermissions.newBuilder();
|
||||
if (tag.hasArray()) {
|
||||
ProtobufUtil.mergeFrom(builder, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.io.ByteArrayOutputStream;
|
|||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.BitSet;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -45,7 +46,12 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hbase.ArrayBackedTag;
|
||||
import org.apache.hadoop.hbase.AuthUtil;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.ExtendedCellBuilder;
|
||||
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
|
||||
import org.apache.hadoop.hbase.PrivateCellUtil;
|
||||
import org.apache.hadoop.hbase.Tag;
|
||||
|
@ -205,8 +211,16 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
|
|||
protected void addSystemLabel(Region region, Map<String, Integer> labels,
|
||||
Map<String, List<Integer>> userAuths) throws IOException {
|
||||
if (!labels.containsKey(SYSTEM_LABEL)) {
|
||||
Put p = new Put(Bytes.toBytes(SYSTEM_LABEL_ORDINAL));
|
||||
p.addImmutable(LABELS_TABLE_FAMILY, LABEL_QUALIFIER, Bytes.toBytes(SYSTEM_LABEL));
|
||||
byte[] row = Bytes.toBytes(SYSTEM_LABEL_ORDINAL);
|
||||
Put p = new Put(row);
|
||||
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(row)
|
||||
.setFamily(LABELS_TABLE_FAMILY)
|
||||
.setQualifier(LABEL_QUALIFIER)
|
||||
.setTimestamp(p.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(SYSTEM_LABEL))
|
||||
.build());
|
||||
region.put(p);
|
||||
labels.put(SYSTEM_LABEL, SYSTEM_LABEL_ORDINAL);
|
||||
}
|
||||
|
@ -218,14 +232,24 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
|
|||
OperationStatus[] finalOpStatus = new OperationStatus[labels.size()];
|
||||
List<Mutation> puts = new ArrayList<>(labels.size());
|
||||
int i = 0;
|
||||
ExtendedCellBuilder builder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
|
||||
for (byte[] label : labels) {
|
||||
String labelStr = Bytes.toString(label);
|
||||
if (this.labelsCache.getLabelOrdinal(labelStr) > 0) {
|
||||
finalOpStatus[i] = new OperationStatus(OperationStatusCode.FAILURE,
|
||||
new LabelAlreadyExistsException("Label '" + labelStr + "' already exists"));
|
||||
} else {
|
||||
Put p = new Put(Bytes.toBytes(ordinalCounter.get()));
|
||||
p.addImmutable(LABELS_TABLE_FAMILY, LABEL_QUALIFIER, label, LABELS_TABLE_TAGS);
|
||||
byte[] row = Bytes.toBytes(ordinalCounter.get());
|
||||
Put p = new Put(row);
|
||||
p.add(builder.clear()
|
||||
.setRow(row)
|
||||
.setFamily(LABELS_TABLE_FAMILY)
|
||||
.setQualifier(LABEL_QUALIFIER)
|
||||
.setTimestamp(p.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(label)
|
||||
.setTags(Tag.fromList(Arrays.asList(LABELS_TABLE_TAGS)))
|
||||
.build());
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("Adding the label " + labelStr);
|
||||
}
|
||||
|
@ -246,6 +270,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
|
|||
OperationStatus[] finalOpStatus = new OperationStatus[authLabels.size()];
|
||||
List<Mutation> puts = new ArrayList<>(authLabels.size());
|
||||
int i = 0;
|
||||
ExtendedCellBuilder builder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
|
||||
for (byte[] auth : authLabels) {
|
||||
String authStr = Bytes.toString(auth);
|
||||
int labelOrdinal = this.labelsCache.getLabelOrdinal(authStr);
|
||||
|
@ -254,8 +279,17 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
|
|||
finalOpStatus[i] = new OperationStatus(OperationStatusCode.FAILURE,
|
||||
new InvalidLabelException("Label '" + authStr + "' doesn't exists"));
|
||||
} else {
|
||||
Put p = new Put(Bytes.toBytes(labelOrdinal));
|
||||
p.addImmutable(LABELS_TABLE_FAMILY, user, DUMMY_VALUE, LABELS_TABLE_TAGS);
|
||||
byte[] row = Bytes.toBytes(labelOrdinal);
|
||||
Put p = new Put(row);
|
||||
p.add(builder.clear()
|
||||
.setRow(row)
|
||||
.setFamily(LABELS_TABLE_FAMILY)
|
||||
.setQualifier(user)
|
||||
.setTimestamp(p.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(DUMMY_VALUE)
|
||||
.setTags(Tag.fromList(Arrays.asList(LABELS_TABLE_TAGS)))
|
||||
.build());
|
||||
puts.add(p);
|
||||
}
|
||||
i++;
|
||||
|
|
|
@ -18,6 +18,9 @@
|
|||
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.CompatibilityFactory;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
|
@ -149,7 +152,14 @@ public class TestMultiRespectsLimits {
|
|||
|
||||
for (byte[] col:cols) {
|
||||
Put p = new Put(row);
|
||||
p.addImmutable(FAMILY, col, value);
|
||||
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(row)
|
||||
.setFamily(FAMILY)
|
||||
.setQualifier(col)
|
||||
.setTimestamp(p.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(value)
|
||||
.build());
|
||||
t.put(p);
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,11 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
|
@ -143,11 +147,29 @@ public class TestCompactionLifeCycleTracker {
|
|||
.addCoprocessor(CompactionObserver.class.getName()).build());
|
||||
try (Table table = UTIL.getConnection().getTable(NAME)) {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
table.put(new Put(Bytes.toBytes(i)).addImmutable(CF1, QUALIFIER, Bytes.toBytes(i)));
|
||||
byte[] row = Bytes.toBytes(i);
|
||||
table.put(new Put(row)
|
||||
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(row)
|
||||
.setFamily(CF1)
|
||||
.setQualifier(QUALIFIER)
|
||||
.setTimestamp(HConstants.LATEST_TIMESTAMP)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(i))
|
||||
.build()));
|
||||
}
|
||||
UTIL.getAdmin().flush(NAME);
|
||||
for (int i = 100; i < 200; i++) {
|
||||
table.put(new Put(Bytes.toBytes(i)).addImmutable(CF1, QUALIFIER, Bytes.toBytes(i)));
|
||||
byte[] row = Bytes.toBytes(i);
|
||||
table.put(new Put(row)
|
||||
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(row)
|
||||
.setFamily(CF1)
|
||||
.setQualifier(QUALIFIER)
|
||||
.setTimestamp(HConstants.LATEST_TIMESTAMP)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(i))
|
||||
.build()));
|
||||
}
|
||||
UTIL.getAdmin().flush(NAME);
|
||||
}
|
||||
|
|
|
@ -28,7 +28,11 @@ import java.io.InterruptedIOException;
|
|||
import java.util.Optional;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
|
@ -189,7 +193,16 @@ public class TestFlushLifeCycleTracker {
|
|||
public void test() throws IOException, InterruptedException {
|
||||
try (Table table = UTIL.getConnection().getTable(NAME)) {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
table.put(new Put(Bytes.toBytes(i)).addImmutable(CF, QUALIFIER, Bytes.toBytes(i)));
|
||||
byte[] row = Bytes.toBytes(i);
|
||||
table.put(new Put(row, true)
|
||||
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(row)
|
||||
.setFamily(CF)
|
||||
.setQualifier(QUALIFIER)
|
||||
.setTimestamp(HConstants.LATEST_TIMESTAMP)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(i))
|
||||
.build()));
|
||||
}
|
||||
}
|
||||
Tracker tracker = new Tracker();
|
||||
|
@ -214,7 +227,16 @@ public class TestFlushLifeCycleTracker {
|
|||
public void testNotExecuted() throws IOException, InterruptedException {
|
||||
try (Table table = UTIL.getConnection().getTable(NAME)) {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
table.put(new Put(Bytes.toBytes(i)).addImmutable(CF, QUALIFIER, Bytes.toBytes(i)));
|
||||
byte[] row = Bytes.toBytes(i);
|
||||
table.put(new Put(row, true)
|
||||
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(row)
|
||||
.setFamily(CF)
|
||||
.setQualifier(QUALIFIER)
|
||||
.setTimestamp(HConstants.LATEST_TIMESTAMP)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(Bytes.toBytes(i))
|
||||
.build()));
|
||||
}
|
||||
}
|
||||
// here we may have overlap when calling the CP hooks so we do not assert on TRACKER
|
||||
|
|
|
@ -74,6 +74,9 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.hbase.ArrayBackedTag;
|
||||
import org.apache.hadoop.hbase.CategoryBasedTimeout;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.CompareOperator;
|
||||
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
|
||||
|
@ -6274,9 +6277,26 @@ public class TestHRegion {
|
|||
final HRegion region = initHRegion(tableName, a, c, method, CONF, false, fam1);
|
||||
|
||||
Mutation[] mutations = new Mutation[] {
|
||||
new Put(a).addImmutable(fam1, null, null),
|
||||
new Put(c).addImmutable(fam1, null, null), // this is outside the region boundary
|
||||
new Put(b).addImmutable(fam1, null, null),
|
||||
new Put(a)
|
||||
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(a)
|
||||
.setFamily(fam1)
|
||||
.setTimestamp(HConstants.LATEST_TIMESTAMP)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.build()),
|
||||
// this is outside the region boundary
|
||||
new Put(c).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(c)
|
||||
.setFamily(fam1)
|
||||
.setTimestamp(HConstants.LATEST_TIMESTAMP)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.build()),
|
||||
new Put(b).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(b)
|
||||
.setFamily(fam1)
|
||||
.setTimestamp(HConstants.LATEST_TIMESTAMP)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.build())
|
||||
};
|
||||
|
||||
OperationStatus[] status = region.batchMutate(mutations);
|
||||
|
@ -6307,8 +6327,18 @@ public class TestHRegion {
|
|||
@Override
|
||||
public Void call() throws Exception {
|
||||
Mutation[] mutations = new Mutation[] {
|
||||
new Put(a).addImmutable(fam1, null, null),
|
||||
new Put(b).addImmutable(fam1, null, null),
|
||||
new Put(a).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(a)
|
||||
.setFamily(fam1)
|
||||
.setTimestamp(HConstants.LATEST_TIMESTAMP)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.build()),
|
||||
new Put(b).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(b)
|
||||
.setFamily(fam1)
|
||||
.setTimestamp(HConstants.LATEST_TIMESTAMP)
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.build()),
|
||||
};
|
||||
|
||||
// this will wait for the row lock, and it will eventually succeed
|
||||
|
|
|
@ -38,6 +38,9 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hbase.ArrayBackedTag;
|
||||
import org.apache.hadoop.hbase.AuthUtil;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
|
||||
import org.apache.hadoop.hbase.PrivateCellUtil;
|
||||
import org.apache.hadoop.hbase.Tag;
|
||||
|
@ -101,8 +104,16 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
|
|||
assert labelsRegion != null;
|
||||
OperationStatus[] finalOpStatus = new OperationStatus[authLabels.size()];
|
||||
Put p = new Put(user);
|
||||
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
|
||||
for (byte[] auth : authLabels) {
|
||||
p.addImmutable(LABELS_TABLE_FAMILY, auth, DUMMY_VALUE);
|
||||
p.add(builder.clear()
|
||||
.setRow(p.getRow())
|
||||
.setFamily(LABELS_TABLE_FAMILY)
|
||||
.setQualifier(auth)
|
||||
.setTimestamp(p.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(DUMMY_VALUE)
|
||||
.build());
|
||||
}
|
||||
this.labelsRegion.put(p);
|
||||
// This is a testing impl and so not doing any caching
|
||||
|
|
|
@ -230,7 +230,9 @@ public final class SnapshotTestingUtils {
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
for (byte[] b : snapshotFamilies) {
|
||||
LOG.info("[CHIA] " + Bytes.toStringBinary(b));
|
||||
}
|
||||
// Verify that there are store files in the specified families
|
||||
if (nonEmptyTestFamilies != null) {
|
||||
for (final byte[] familyName: nonEmptyTestFamilies) {
|
||||
|
|
|
@ -42,7 +42,6 @@ import javax.security.auth.callback.Callback;
|
|||
import javax.security.auth.callback.UnsupportedCallbackException;
|
||||
import javax.security.sasl.AuthorizeCallback;
|
||||
import javax.security.sasl.SaslServer;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.OptionGroup;
|
||||
|
@ -50,6 +49,9 @@ import org.apache.commons.lang3.ArrayUtils;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
|
@ -1327,6 +1329,7 @@ public class ThriftServerRunner implements Runnable {
|
|||
}
|
||||
|
||||
// I apologize for all this mess :)
|
||||
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
|
||||
for (Mutation m : mutations) {
|
||||
byte[][] famAndQf = CellUtil.parseColumn(getBytes(m.column));
|
||||
if (m.isDelete) {
|
||||
|
@ -1342,9 +1345,15 @@ public class ThriftServerRunner implements Runnable {
|
|||
LOG.warn("No column qualifier specified. Delete is the only mutation supported "
|
||||
+ "over the whole column family.");
|
||||
} else {
|
||||
put.addImmutable(famAndQf[0], famAndQf[1],
|
||||
m.value != null ? getBytes(m.value)
|
||||
: HConstants.EMPTY_BYTE_ARRAY);
|
||||
put.add(builder.clear()
|
||||
.setRow(put.getRow())
|
||||
.setFamily(famAndQf[0])
|
||||
.setQualifier(famAndQf[1])
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(m.value != null ? getBytes(m.value)
|
||||
: HConstants.EMPTY_BYTE_ARRAY)
|
||||
.build());
|
||||
}
|
||||
put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
|
||||
}
|
||||
|
@ -1378,7 +1387,7 @@ public class ThriftServerRunner implements Runnable {
|
|||
throws IOError, IllegalArgument, TException {
|
||||
List<Put> puts = new ArrayList<>();
|
||||
List<Delete> deletes = new ArrayList<>();
|
||||
|
||||
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
|
||||
for (BatchMutation batch : rowBatches) {
|
||||
byte[] row = getBytes(batch.row);
|
||||
List<Mutation> mutations = batch.mutations;
|
||||
|
@ -1403,9 +1412,19 @@ public class ThriftServerRunner implements Runnable {
|
|||
+ "over the whole column family.");
|
||||
}
|
||||
if (famAndQf.length == 2) {
|
||||
put.addImmutable(famAndQf[0], famAndQf[1],
|
||||
m.value != null ? getBytes(m.value)
|
||||
: HConstants.EMPTY_BYTE_ARRAY);
|
||||
try {
|
||||
put.add(builder.clear()
|
||||
.setRow(put.getRow())
|
||||
.setFamily(famAndQf[0])
|
||||
.setQualifier(famAndQf[1])
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(m.value != null ? getBytes(m.value)
|
||||
: HConstants.EMPTY_BYTE_ARRAY)
|
||||
.build());
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Invalid famAndQf provided.");
|
||||
}
|
||||
|
@ -1877,12 +1896,17 @@ public class ThriftServerRunner implements Runnable {
|
|||
addAttributes(put, attributes);
|
||||
|
||||
byte[][] famAndQf = CellUtil.parseColumn(getBytes(mput.column));
|
||||
|
||||
put.addImmutable(famAndQf[0], famAndQf[1], mput.value != null ? getBytes(mput.value)
|
||||
: HConstants.EMPTY_BYTE_ARRAY);
|
||||
|
||||
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
|
||||
.setRow(put.getRow())
|
||||
.setFamily(famAndQf[0])
|
||||
.setQualifier(famAndQf[1])
|
||||
.setTimestamp(put.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(mput.value != null ? getBytes(mput.value)
|
||||
: HConstants.EMPTY_BYTE_ARRAY)
|
||||
.build());
|
||||
put.setDurability(mput.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
|
||||
} catch (IllegalArgumentException e) {
|
||||
} catch (IOException | IllegalArgumentException e) {
|
||||
LOG.warn(e.getMessage(), e);
|
||||
throw new IllegalArgument(Throwables.getStackTraceAsString(e));
|
||||
}
|
||||
|
|
|
@ -28,6 +28,9 @@ import java.util.Map;
|
|||
|
||||
import org.apache.commons.collections4.MapUtils;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellBuilder;
|
||||
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.CellBuilderType;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.CompareOperator;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
@ -115,7 +118,7 @@ public class ThriftUtilities {
|
|||
if (in.isSetAuthorizations()) {
|
||||
out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
|
||||
}
|
||||
|
||||
|
||||
if (!in.isSetColumns()) {
|
||||
return out;
|
||||
}
|
||||
|
@ -217,20 +220,35 @@ public class ThriftUtilities {
|
|||
}
|
||||
|
||||
for (TColumnValue columnValue : in.getColumnValues()) {
|
||||
if (columnValue.isSetTimestamp()) {
|
||||
out.addImmutable(
|
||||
columnValue.getFamily(), columnValue.getQualifier(), columnValue.getTimestamp(),
|
||||
columnValue.getValue());
|
||||
} else {
|
||||
out.addImmutable(
|
||||
columnValue.getFamily(), columnValue.getQualifier(), columnValue.getValue());
|
||||
try {
|
||||
if (columnValue.isSetTimestamp()) {
|
||||
out.add(CellBuilderFactory.create(CellBuilderType.DEEP_COPY)
|
||||
.setRow(out.getRow())
|
||||
.setFamily(columnValue.getFamily())
|
||||
.setQualifier(columnValue.getQualifier())
|
||||
.setTimestamp(columnValue.getTimestamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(columnValue.getValue())
|
||||
.build());
|
||||
} else {
|
||||
out.add(CellBuilderFactory.create(CellBuilderType.DEEP_COPY)
|
||||
.setRow(out.getRow())
|
||||
.setFamily(columnValue.getFamily())
|
||||
.setQualifier(columnValue.getQualifier())
|
||||
.setTimestamp(out.getTimeStamp())
|
||||
.setType(CellBuilder.DataType.Put)
|
||||
.setValue(columnValue.getValue())
|
||||
.build());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException((e));
|
||||
}
|
||||
}
|
||||
|
||||
if (in.isSetAttributes()) {
|
||||
addAttributes(out,in.getAttributes());
|
||||
}
|
||||
|
||||
|
||||
if (in.getCellVisibility() != null) {
|
||||
out.setCellVisibility(new CellVisibility(in.getCellVisibility().getExpression()));
|
||||
}
|
||||
|
@ -437,7 +455,7 @@ public class ThriftUtilities {
|
|||
if (in.isSetAttributes()) {
|
||||
addAttributes(out,in.getAttributes());
|
||||
}
|
||||
|
||||
|
||||
if (in.isSetAuthorizations()) {
|
||||
out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
|
||||
}
|
||||
|
@ -484,7 +502,7 @@ public class ThriftUtilities {
|
|||
if (in.isSetDurability()) {
|
||||
out.setDurability(durabilityFromThrift(in.getDurability()));
|
||||
}
|
||||
|
||||
|
||||
if(in.getCellVisibility() != null) {
|
||||
out.setCellVisibility(new CellVisibility(in.getCellVisibility().getExpression()));
|
||||
}
|
||||
|
@ -505,7 +523,7 @@ public class ThriftUtilities {
|
|||
if (append.isSetDurability()) {
|
||||
out.setDurability(durabilityFromThrift(append.getDurability()));
|
||||
}
|
||||
|
||||
|
||||
if(append.getCellVisibility() != null) {
|
||||
out.setCellVisibility(new CellVisibility(append.getCellVisibility().getExpression()));
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue