HBASE-17162 Avoid unconditional call to getXXXArray() in write path.
This commit is contained in:
parent
3f7f1c1353
commit
030054bcce
|
@ -165,8 +165,7 @@ public class Delete extends Mutation implements Comparable<Row> {
|
||||||
throw new IOException("The recently added KeyValue is not of type "
|
throw new IOException("The recently added KeyValue is not of type "
|
||||||
+ "delete. Rowkey: " + Bytes.toStringBinary(this.row));
|
+ "delete. Rowkey: " + Bytes.toStringBinary(this.row));
|
||||||
}
|
}
|
||||||
if (Bytes.compareTo(this.row, 0, row.length, kv.getRowArray(),
|
if (!CellUtil.matchingRow(kv, this.row)) {
|
||||||
kv.getRowOffset(), kv.getRowLength()) != 0) {
|
|
||||||
throw new WrongRowIOException("The row in " + kv.toString() +
|
throw new WrongRowIOException("The row in " + kv.toString() +
|
||||||
" doesn't match the original one " + Bytes.toStringBinary(this.row));
|
" doesn't match the original one " + Bytes.toStringBinary(this.row));
|
||||||
}
|
}
|
||||||
|
|
|
@ -98,9 +98,7 @@ public class Increment extends Mutation implements Comparable<Row> {
|
||||||
byte [] family = CellUtil.cloneFamily(cell);
|
byte [] family = CellUtil.cloneFamily(cell);
|
||||||
List<Cell> list = getCellList(family);
|
List<Cell> list = getCellList(family);
|
||||||
//Checking that the row of the kv is the same as the put
|
//Checking that the row of the kv is the same as the put
|
||||||
int res = Bytes.compareTo(this.row, 0, row.length,
|
if (!CellUtil.matchingRow(cell, this.row)) {
|
||||||
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
|
|
||||||
if (res != 0) {
|
|
||||||
throw new WrongRowIOException("The row in " + cell +
|
throw new WrongRowIOException("The row in " + cell +
|
||||||
" doesn't match the original one " + Bytes.toStringBinary(this.row));
|
" doesn't match the original one " + Bytes.toStringBinary(this.row));
|
||||||
}
|
}
|
||||||
|
|
|
@ -739,8 +739,8 @@ public final class ProtobufUtil {
|
||||||
throws IOException {
|
throws IOException {
|
||||||
MutationType type = proto.getMutateType();
|
MutationType type = proto.getMutateType();
|
||||||
assert type == MutationType.APPEND : type.name();
|
assert type == MutationType.APPEND : type.name();
|
||||||
byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null;
|
byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null;
|
||||||
Append append = null;
|
Append append = row != null ? new Append(row) : null;
|
||||||
int cellCount = proto.hasAssociatedCellCount()? proto.getAssociatedCellCount(): 0;
|
int cellCount = proto.hasAssociatedCellCount()? proto.getAssociatedCellCount(): 0;
|
||||||
if (cellCount > 0) {
|
if (cellCount > 0) {
|
||||||
// The proto has metadata only and the data is separate to be found in the cellScanner.
|
// The proto has metadata only and the data is separate to be found in the cellScanner.
|
||||||
|
@ -760,7 +760,9 @@ public final class ProtobufUtil {
|
||||||
append.add(cell);
|
append.add(cell);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
append = new Append(row);
|
if (append == null) {
|
||||||
|
throw new IllegalArgumentException("row cannot be null");
|
||||||
|
}
|
||||||
for (ColumnValue column: proto.getColumnValueList()) {
|
for (ColumnValue column: proto.getColumnValueList()) {
|
||||||
byte[] family = column.getFamily().toByteArray();
|
byte[] family = column.getFamily().toByteArray();
|
||||||
for (QualifierValue qv: column.getQualifierValueList()) {
|
for (QualifierValue qv: column.getQualifierValueList()) {
|
||||||
|
@ -819,7 +821,7 @@ public final class ProtobufUtil {
|
||||||
MutationType type = proto.getMutateType();
|
MutationType type = proto.getMutateType();
|
||||||
assert type == MutationType.INCREMENT : type.name();
|
assert type == MutationType.INCREMENT : type.name();
|
||||||
byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null;
|
byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null;
|
||||||
Increment increment = null;
|
Increment increment = row != null ? new Increment(row) : null;
|
||||||
int cellCount = proto.hasAssociatedCellCount()? proto.getAssociatedCellCount(): 0;
|
int cellCount = proto.hasAssociatedCellCount()? proto.getAssociatedCellCount(): 0;
|
||||||
if (cellCount > 0) {
|
if (cellCount > 0) {
|
||||||
// The proto has metadata only and the data is separate to be found in the cellScanner.
|
// The proto has metadata only and the data is separate to be found in the cellScanner.
|
||||||
|
@ -839,7 +841,9 @@ public final class ProtobufUtil {
|
||||||
increment.add(cell);
|
increment.add(cell);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
increment = new Increment(row);
|
if (increment == null) {
|
||||||
|
throw new IllegalArgumentException("row cannot be null");
|
||||||
|
}
|
||||||
for (ColumnValue column: proto.getColumnValueList()) {
|
for (ColumnValue column: proto.getColumnValueList()) {
|
||||||
byte[] family = column.getFamily().toByteArray();
|
byte[] family = column.getFamily().toByteArray();
|
||||||
for (QualifierValue qv: column.getQualifierValueList()) {
|
for (QualifierValue qv: column.getQualifierValueList()) {
|
||||||
|
@ -895,12 +899,9 @@ public final class ProtobufUtil {
|
||||||
}
|
}
|
||||||
Cell cell = cellScanner.current();
|
Cell cell = cellScanner.current();
|
||||||
if (get == null) {
|
if (get == null) {
|
||||||
get = new Get(Bytes.copy(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
|
get = new Get(CellUtil.cloneRow(cell));
|
||||||
}
|
}
|
||||||
get.addColumn(
|
get.addColumn(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell));
|
||||||
Bytes.copy(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()),
|
|
||||||
Bytes.copy(cell.getQualifierArray(), cell.getQualifierOffset(),
|
|
||||||
cell.getQualifierLength()));
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
get = new Get(row);
|
get = new Get(row);
|
||||||
|
|
|
@ -1834,6 +1834,20 @@ public final class CellUtil {
|
||||||
return createFirstOnRow(row, 0, (short)row.length);
|
return createFirstOnRow(row, 0, (short)row.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return Cell that is smaller than all other possible Cells for the given Cell's row and passed
|
||||||
|
* family.
|
||||||
|
*/
|
||||||
|
public static Cell createFirstOnRowFamily(Cell cell, byte[] fArray, int foff, int flen) {
|
||||||
|
if (cell instanceof ByteBufferCell) {
|
||||||
|
return new FirstOnRowColByteBufferCell(((ByteBufferCell) cell).getRowByteBuffer(),
|
||||||
|
((ByteBufferCell) cell).getRowPosition(), cell.getRowLength(), ByteBuffer.wrap(fArray),
|
||||||
|
foff, (byte) flen, HConstants.EMPTY_BYTE_BUFFER, 0, 0);
|
||||||
|
}
|
||||||
|
return new FirstOnRowColCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
|
||||||
|
fArray, foff, (byte) flen, HConstants.EMPTY_BYTE_ARRAY, 0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a Cell that is smaller than all other possible Cells for the given Cell's row.
|
* Create a Cell that is smaller than all other possible Cells for the given Cell's row.
|
||||||
* The family length is considered to be 0
|
* The family length is considered to be 0
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.io.DataOutput;
|
||||||
import java.io.DataOutputStream;
|
import java.io.DataOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -32,6 +33,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
|
import org.apache.hadoop.hbase.ByteBufferCell;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellComparator;
|
import org.apache.hadoop.hbase.CellComparator;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
|
@ -47,6 +49,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable;
|
||||||
import org.apache.hadoop.hbase.security.EncryptionUtil;
|
import org.apache.hadoop.hbase.security.EncryptionUtil;
|
||||||
import org.apache.hadoop.hbase.security.User;
|
import org.apache.hadoop.hbase.security.User;
|
||||||
import org.apache.hadoop.hbase.util.BloomFilterWriter;
|
import org.apache.hadoop.hbase.util.BloomFilterWriter;
|
||||||
|
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.FSUtils;
|
import org.apache.hadoop.hbase.util.FSUtils;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
|
@ -346,8 +349,7 @@ public class HFileWriterImpl implements HFile.Writer {
|
||||||
public static Cell getMidpoint(final CellComparator comparator, final Cell left,
|
public static Cell getMidpoint(final CellComparator comparator, final Cell left,
|
||||||
final Cell right) {
|
final Cell right) {
|
||||||
// TODO: Redo so only a single pass over the arrays rather than one to
|
// TODO: Redo so only a single pass over the arrays rather than one to
|
||||||
// compare and then a
|
// compare and then a second composing midpoint.
|
||||||
// second composing midpoint.
|
|
||||||
if (right == null) {
|
if (right == null) {
|
||||||
throw new IllegalArgumentException("right cell can not be null");
|
throw new IllegalArgumentException("right cell can not be null");
|
||||||
}
|
}
|
||||||
|
@ -356,8 +358,7 @@ public class HFileWriterImpl implements HFile.Writer {
|
||||||
}
|
}
|
||||||
// If Cells from meta table, don't mess around. meta table Cells have schema
|
// If Cells from meta table, don't mess around. meta table Cells have schema
|
||||||
// (table,startrow,hash) so can't be treated as plain byte arrays. Just skip
|
// (table,startrow,hash) so can't be treated as plain byte arrays. Just skip
|
||||||
// out without
|
// out without trying to do this optimization.
|
||||||
// trying to do this optimization.
|
|
||||||
if (comparator instanceof MetaCellComparator) {
|
if (comparator instanceof MetaCellComparator) {
|
||||||
return right;
|
return right;
|
||||||
}
|
}
|
||||||
|
@ -366,36 +367,44 @@ public class HFileWriterImpl implements HFile.Writer {
|
||||||
throw new IllegalArgumentException("Left row sorts after right row; left="
|
throw new IllegalArgumentException("Left row sorts after right row; left="
|
||||||
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
|
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
|
||||||
}
|
}
|
||||||
|
byte[] midRow;
|
||||||
|
boolean bufferBacked = left instanceof ByteBufferCell && right instanceof ByteBufferCell;
|
||||||
if (diff < 0) {
|
if (diff < 0) {
|
||||||
// Left row is < right row.
|
// Left row is < right row.
|
||||||
byte[] midRow = getMinimumMidpointArray(left.getRowArray(), left.getRowOffset(),
|
if (bufferBacked) {
|
||||||
left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength());
|
midRow = getMinimumMidpointArray(((ByteBufferCell) left).getRowByteBuffer(),
|
||||||
|
((ByteBufferCell) left).getRowPosition(), left.getRowLength(),
|
||||||
|
((ByteBufferCell) right).getRowByteBuffer(),
|
||||||
|
((ByteBufferCell) right).getRowPosition(), right.getRowLength());
|
||||||
|
} else {
|
||||||
|
midRow = getMinimumMidpointArray(left.getRowArray(), left.getRowOffset(),
|
||||||
|
left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength());
|
||||||
|
}
|
||||||
// If midRow is null, just return 'right'. Can't do optimization.
|
// If midRow is null, just return 'right'. Can't do optimization.
|
||||||
if (midRow == null)
|
if (midRow == null) return right;
|
||||||
return right;
|
return CellUtil.createFirstOnRow(midRow);
|
||||||
return CellUtil.createCell(midRow);
|
|
||||||
}
|
}
|
||||||
// Rows are same. Compare on families.
|
// Rows are same. Compare on families.
|
||||||
int lFamOffset = left.getFamilyOffset();
|
|
||||||
int rFamOffset = right.getFamilyOffset();
|
|
||||||
int lFamLength = left.getFamilyLength();
|
|
||||||
int rFamLength = right.getFamilyLength();
|
|
||||||
diff = CellComparator.compareFamilies(left, right);
|
diff = CellComparator.compareFamilies(left, right);
|
||||||
if (diff > 0) {
|
if (diff > 0) {
|
||||||
throw new IllegalArgumentException("Left family sorts after right family; left="
|
throw new IllegalArgumentException("Left family sorts after right family; left="
|
||||||
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
|
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
|
||||||
}
|
}
|
||||||
if (diff < 0) {
|
if (diff < 0) {
|
||||||
byte[] midRow = getMinimumMidpointArray(left.getFamilyArray(), lFamOffset,
|
if (bufferBacked) {
|
||||||
lFamLength, right.getFamilyArray(), rFamOffset,
|
midRow = getMinimumMidpointArray(((ByteBufferCell) left).getFamilyByteBuffer(),
|
||||||
rFamLength);
|
((ByteBufferCell) left).getFamilyPosition(), left.getFamilyLength(),
|
||||||
|
((ByteBufferCell) right).getFamilyByteBuffer(),
|
||||||
|
((ByteBufferCell) right).getFamilyPosition(), right.getFamilyLength());
|
||||||
|
} else {
|
||||||
|
midRow = getMinimumMidpointArray(left.getFamilyArray(), left.getFamilyOffset(),
|
||||||
|
left.getFamilyLength(), right.getFamilyArray(), right.getFamilyOffset(),
|
||||||
|
right.getFamilyLength());
|
||||||
|
}
|
||||||
// If midRow is null, just return 'right'. Can't do optimization.
|
// If midRow is null, just return 'right'. Can't do optimization.
|
||||||
if (midRow == null)
|
if (midRow == null) return right;
|
||||||
return right;
|
|
||||||
// Return new Cell where we use right row and then a mid sort family.
|
// Return new Cell where we use right row and then a mid sort family.
|
||||||
return CellUtil.createCell(right.getRowArray(), right.getRowOffset(), right.getRowLength(),
|
return CellUtil.createFirstOnRowFamily(right, midRow, 0, midRow.length);
|
||||||
midRow, 0, midRow.length, HConstants.EMPTY_BYTE_ARRAY, 0,
|
|
||||||
HConstants.EMPTY_BYTE_ARRAY.length);
|
|
||||||
}
|
}
|
||||||
// Families are same. Compare on qualifiers.
|
// Families are same. Compare on qualifiers.
|
||||||
diff = CellComparator.compareQualifiers(left, right);
|
diff = CellComparator.compareQualifiers(left, right);
|
||||||
|
@ -404,17 +413,20 @@ public class HFileWriterImpl implements HFile.Writer {
|
||||||
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
|
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
|
||||||
}
|
}
|
||||||
if (diff < 0) {
|
if (diff < 0) {
|
||||||
byte[] midRow = getMinimumMidpointArray(left.getQualifierArray(), left.getQualifierOffset(),
|
if (bufferBacked) {
|
||||||
left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(),
|
midRow = getMinimumMidpointArray(((ByteBufferCell) left).getQualifierByteBuffer(),
|
||||||
right.getQualifierLength());
|
((ByteBufferCell) left).getQualifierPosition(), left.getQualifierLength(),
|
||||||
|
((ByteBufferCell) right).getQualifierByteBuffer(),
|
||||||
|
((ByteBufferCell) right).getQualifierPosition(), right.getQualifierLength());
|
||||||
|
} else {
|
||||||
|
midRow = getMinimumMidpointArray(left.getQualifierArray(), left.getQualifierOffset(),
|
||||||
|
left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(),
|
||||||
|
right.getQualifierLength());
|
||||||
|
}
|
||||||
// If midRow is null, just return 'right'. Can't do optimization.
|
// If midRow is null, just return 'right'. Can't do optimization.
|
||||||
if (midRow == null)
|
if (midRow == null) return right;
|
||||||
return right;
|
// Return new Cell where we use right row and family and then a mid sort qualifier.
|
||||||
// Return new Cell where we use right row and family and then a mid sort
|
return CellUtil.createFirstOnRowCol(right, midRow, 0, midRow.length);
|
||||||
// qualifier.
|
|
||||||
return CellUtil.createCell(right.getRowArray(), right.getRowOffset(), right.getRowLength(),
|
|
||||||
right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength(), midRow, 0,
|
|
||||||
midRow.length);
|
|
||||||
}
|
}
|
||||||
// No opportunity for optimization. Just return right key.
|
// No opportunity for optimization. Just return right key.
|
||||||
return right;
|
return right;
|
||||||
|
@ -459,6 +471,35 @@ public class HFileWriterImpl implements HFile.Writer {
|
||||||
return minimumMidpointArray;
|
return minimumMidpointArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static byte[] getMinimumMidpointArray(ByteBuffer left, int leftOffset, int leftLength,
|
||||||
|
ByteBuffer right, int rightOffset, int rightLength) {
|
||||||
|
// rows are different
|
||||||
|
int minLength = leftLength < rightLength ? leftLength : rightLength;
|
||||||
|
int diffIdx = 0;
|
||||||
|
while (diffIdx < minLength && ByteBufferUtils.toByte(left,
|
||||||
|
leftOffset + diffIdx) == ByteBufferUtils.toByte(right, rightOffset + diffIdx)) {
|
||||||
|
diffIdx++;
|
||||||
|
}
|
||||||
|
byte[] minMidpoint = null;
|
||||||
|
if (diffIdx >= minLength) {
|
||||||
|
// leftKey's row is prefix of rightKey's.
|
||||||
|
minMidpoint = new byte[diffIdx + 1];
|
||||||
|
ByteBufferUtils.copyFromBufferToArray(minMidpoint, right, rightOffset, 0, diffIdx + 1);
|
||||||
|
} else {
|
||||||
|
int diffByte = ByteBufferUtils.toByte(left, leftOffset + diffIdx);
|
||||||
|
if ((0xff & diffByte) < 0xff
|
||||||
|
&& (diffByte + 1) < (ByteBufferUtils.toByte(right, rightOffset + diffIdx) & 0xff)) {
|
||||||
|
minMidpoint = new byte[diffIdx + 1];
|
||||||
|
ByteBufferUtils.copyFromBufferToArray(minMidpoint, left, leftOffset, 0, diffIdx);
|
||||||
|
minMidpoint[diffIdx] = (byte) (diffByte + 1);
|
||||||
|
} else {
|
||||||
|
minMidpoint = new byte[diffIdx + 1];
|
||||||
|
ByteBufferUtils.copyFromBufferToArray(minMidpoint, right, rightOffset, 0, diffIdx + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return minMidpoint;
|
||||||
|
}
|
||||||
|
|
||||||
/** Gives inline block writers an opportunity to contribute blocks. */
|
/** Gives inline block writers an opportunity to contribute blocks. */
|
||||||
private void writeInlineBlocks(boolean closing) throws IOException {
|
private void writeInlineBlocks(boolean closing) throws IOException {
|
||||||
for (InlineBlockWriter ibw : inlineBlockWriters) {
|
for (InlineBlockWriter ibw : inlineBlockWriters) {
|
||||||
|
|
|
@ -7621,7 +7621,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
|
||||||
// throw DoNotRetryIOException instead of IllegalArgumentException
|
// throw DoNotRetryIOException instead of IllegalArgumentException
|
||||||
throw new DoNotRetryIOException("Field is not a long, it's " + len + " bytes wide");
|
throw new DoNotRetryIOException("Field is not a long, it's " + len + " bytes wide");
|
||||||
}
|
}
|
||||||
return Bytes.toLong(cell.getValueArray(), cell.getValueOffset(), len);
|
return CellUtil.getValueAsLong(cell);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -24,8 +24,8 @@ import java.util.SortedSet;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellComparator;
|
import org.apache.hadoop.hbase.CellComparator;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
import org.apache.hadoop.hbase.KeyValueUtil;
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
import org.apache.hadoop.hbase.util.ClassSize;
|
import org.apache.hadoop.hbase.util.ClassSize;
|
||||||
|
@ -59,9 +59,7 @@ public class MutableSegment extends Segment {
|
||||||
|
|
||||||
// Get the Cells for the row/family/qualifier regardless of timestamp.
|
// Get the Cells for the row/family/qualifier regardless of timestamp.
|
||||||
// For this case we want to clean up any other puts
|
// For this case we want to clean up any other puts
|
||||||
Cell firstCell = KeyValueUtil.createFirstOnRow(cell.getRowArray(), cell.getRowOffset(),
|
Cell firstCell = CellUtil.createFirstOnRowColTS(cell, HConstants.LATEST_TIMESTAMP);
|
||||||
cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
|
|
||||||
cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
|
|
||||||
SortedSet<Cell> ss = this.tailSet(firstCell);
|
SortedSet<Cell> ss = this.tailSet(firstCell);
|
||||||
Iterator<Cell> it = ss.iterator();
|
Iterator<Cell> it = ss.iterator();
|
||||||
// versions visible to oldest scanner
|
// versions visible to oldest scanner
|
||||||
|
|
|
@ -261,9 +261,7 @@ public class AccessController extends BaseMasterAndRegionObserver
|
||||||
for (Map.Entry<byte[], List<Cell>> f : familyMap.entrySet()) {
|
for (Map.Entry<byte[], List<Cell>> f : familyMap.entrySet()) {
|
||||||
List<Cell> cells = f.getValue();
|
List<Cell> cells = f.getValue();
|
||||||
for (Cell cell: cells) {
|
for (Cell cell: cells) {
|
||||||
if (Bytes.equals(cell.getFamilyArray(), cell.getFamilyOffset(),
|
if (CellUtil.matchingFamily(cell, AccessControlLists.ACL_LIST_FAMILY)) {
|
||||||
cell.getFamilyLength(), AccessControlLists.ACL_LIST_FAMILY, 0,
|
|
||||||
AccessControlLists.ACL_LIST_FAMILY.length)) {
|
|
||||||
entries.add(CellUtil.cloneRow(cell));
|
entries.add(CellUtil.cloneRow(cell));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue