HBASE-17162 Avoid unconditional call to getXXXArray() in write path.
This commit is contained in:
parent
3f7f1c1353
commit
030054bcce
|
@ -165,8 +165,7 @@ public class Delete extends Mutation implements Comparable<Row> {
|
|||
throw new IOException("The recently added KeyValue is not of type "
|
||||
+ "delete. Rowkey: " + Bytes.toStringBinary(this.row));
|
||||
}
|
||||
if (Bytes.compareTo(this.row, 0, row.length, kv.getRowArray(),
|
||||
kv.getRowOffset(), kv.getRowLength()) != 0) {
|
||||
if (!CellUtil.matchingRow(kv, this.row)) {
|
||||
throw new WrongRowIOException("The row in " + kv.toString() +
|
||||
" doesn't match the original one " + Bytes.toStringBinary(this.row));
|
||||
}
|
||||
|
|
|
@ -98,9 +98,7 @@ public class Increment extends Mutation implements Comparable<Row> {
|
|||
byte [] family = CellUtil.cloneFamily(cell);
|
||||
List<Cell> list = getCellList(family);
|
||||
//Checking that the row of the kv is the same as the put
|
||||
int res = Bytes.compareTo(this.row, 0, row.length,
|
||||
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
|
||||
if (res != 0) {
|
||||
if (!CellUtil.matchingRow(cell, this.row)) {
|
||||
throw new WrongRowIOException("The row in " + cell +
|
||||
" doesn't match the original one " + Bytes.toStringBinary(this.row));
|
||||
}
|
||||
|
|
|
@ -739,8 +739,8 @@ public final class ProtobufUtil {
|
|||
throws IOException {
|
||||
MutationType type = proto.getMutateType();
|
||||
assert type == MutationType.APPEND : type.name();
|
||||
byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null;
|
||||
Append append = null;
|
||||
byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null;
|
||||
Append append = row != null ? new Append(row) : null;
|
||||
int cellCount = proto.hasAssociatedCellCount()? proto.getAssociatedCellCount(): 0;
|
||||
if (cellCount > 0) {
|
||||
// The proto has metadata only and the data is separate to be found in the cellScanner.
|
||||
|
@ -760,7 +760,9 @@ public final class ProtobufUtil {
|
|||
append.add(cell);
|
||||
}
|
||||
} else {
|
||||
append = new Append(row);
|
||||
if (append == null) {
|
||||
throw new IllegalArgumentException("row cannot be null");
|
||||
}
|
||||
for (ColumnValue column: proto.getColumnValueList()) {
|
||||
byte[] family = column.getFamily().toByteArray();
|
||||
for (QualifierValue qv: column.getQualifierValueList()) {
|
||||
|
@ -819,7 +821,7 @@ public final class ProtobufUtil {
|
|||
MutationType type = proto.getMutateType();
|
||||
assert type == MutationType.INCREMENT : type.name();
|
||||
byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null;
|
||||
Increment increment = null;
|
||||
Increment increment = row != null ? new Increment(row) : null;
|
||||
int cellCount = proto.hasAssociatedCellCount()? proto.getAssociatedCellCount(): 0;
|
||||
if (cellCount > 0) {
|
||||
// The proto has metadata only and the data is separate to be found in the cellScanner.
|
||||
|
@ -839,7 +841,9 @@ public final class ProtobufUtil {
|
|||
increment.add(cell);
|
||||
}
|
||||
} else {
|
||||
increment = new Increment(row);
|
||||
if (increment == null) {
|
||||
throw new IllegalArgumentException("row cannot be null");
|
||||
}
|
||||
for (ColumnValue column: proto.getColumnValueList()) {
|
||||
byte[] family = column.getFamily().toByteArray();
|
||||
for (QualifierValue qv: column.getQualifierValueList()) {
|
||||
|
@ -895,12 +899,9 @@ public final class ProtobufUtil {
|
|||
}
|
||||
Cell cell = cellScanner.current();
|
||||
if (get == null) {
|
||||
get = new Get(Bytes.copy(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
|
||||
get = new Get(CellUtil.cloneRow(cell));
|
||||
}
|
||||
get.addColumn(
|
||||
Bytes.copy(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()),
|
||||
Bytes.copy(cell.getQualifierArray(), cell.getQualifierOffset(),
|
||||
cell.getQualifierLength()));
|
||||
get.addColumn(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell));
|
||||
}
|
||||
} else {
|
||||
get = new Get(row);
|
||||
|
|
|
@ -1834,6 +1834,20 @@ public final class CellUtil {
|
|||
return createFirstOnRow(row, 0, (short)row.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Cell that is smaller than all other possible Cells for the given Cell's row and passed
|
||||
* family.
|
||||
*/
|
||||
public static Cell createFirstOnRowFamily(Cell cell, byte[] fArray, int foff, int flen) {
|
||||
if (cell instanceof ByteBufferCell) {
|
||||
return new FirstOnRowColByteBufferCell(((ByteBufferCell) cell).getRowByteBuffer(),
|
||||
((ByteBufferCell) cell).getRowPosition(), cell.getRowLength(), ByteBuffer.wrap(fArray),
|
||||
foff, (byte) flen, HConstants.EMPTY_BYTE_BUFFER, 0, 0);
|
||||
}
|
||||
return new FirstOnRowColCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
|
||||
fArray, foff, (byte) flen, HConstants.EMPTY_BYTE_ARRAY, 0, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Cell that is smaller than all other possible Cells for the given Cell's row.
|
||||
* The family length is considered to be 0
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.DataOutput;
|
|||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -32,6 +33,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
|
|||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.hbase.ByteBufferCell;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellComparator;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
|
@ -47,6 +49,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable;
|
|||
import org.apache.hadoop.hbase.security.EncryptionUtil;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.apache.hadoop.hbase.util.BloomFilterWriter;
|
||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
|
@ -346,8 +349,7 @@ public class HFileWriterImpl implements HFile.Writer {
|
|||
public static Cell getMidpoint(final CellComparator comparator, final Cell left,
|
||||
final Cell right) {
|
||||
// TODO: Redo so only a single pass over the arrays rather than one to
|
||||
// compare and then a
|
||||
// second composing midpoint.
|
||||
// compare and then a second composing midpoint.
|
||||
if (right == null) {
|
||||
throw new IllegalArgumentException("right cell can not be null");
|
||||
}
|
||||
|
@ -356,8 +358,7 @@ public class HFileWriterImpl implements HFile.Writer {
|
|||
}
|
||||
// If Cells from meta table, don't mess around. meta table Cells have schema
|
||||
// (table,startrow,hash) so can't be treated as plain byte arrays. Just skip
|
||||
// out without
|
||||
// trying to do this optimization.
|
||||
// out without trying to do this optimization.
|
||||
if (comparator instanceof MetaCellComparator) {
|
||||
return right;
|
||||
}
|
||||
|
@ -366,36 +367,44 @@ public class HFileWriterImpl implements HFile.Writer {
|
|||
throw new IllegalArgumentException("Left row sorts after right row; left="
|
||||
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
|
||||
}
|
||||
byte[] midRow;
|
||||
boolean bufferBacked = left instanceof ByteBufferCell && right instanceof ByteBufferCell;
|
||||
if (diff < 0) {
|
||||
// Left row is < right row.
|
||||
byte[] midRow = getMinimumMidpointArray(left.getRowArray(), left.getRowOffset(),
|
||||
left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength());
|
||||
if (bufferBacked) {
|
||||
midRow = getMinimumMidpointArray(((ByteBufferCell) left).getRowByteBuffer(),
|
||||
((ByteBufferCell) left).getRowPosition(), left.getRowLength(),
|
||||
((ByteBufferCell) right).getRowByteBuffer(),
|
||||
((ByteBufferCell) right).getRowPosition(), right.getRowLength());
|
||||
} else {
|
||||
midRow = getMinimumMidpointArray(left.getRowArray(), left.getRowOffset(),
|
||||
left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength());
|
||||
}
|
||||
// If midRow is null, just return 'right'. Can't do optimization.
|
||||
if (midRow == null)
|
||||
return right;
|
||||
return CellUtil.createCell(midRow);
|
||||
if (midRow == null) return right;
|
||||
return CellUtil.createFirstOnRow(midRow);
|
||||
}
|
||||
// Rows are same. Compare on families.
|
||||
int lFamOffset = left.getFamilyOffset();
|
||||
int rFamOffset = right.getFamilyOffset();
|
||||
int lFamLength = left.getFamilyLength();
|
||||
int rFamLength = right.getFamilyLength();
|
||||
diff = CellComparator.compareFamilies(left, right);
|
||||
if (diff > 0) {
|
||||
throw new IllegalArgumentException("Left family sorts after right family; left="
|
||||
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
|
||||
}
|
||||
if (diff < 0) {
|
||||
byte[] midRow = getMinimumMidpointArray(left.getFamilyArray(), lFamOffset,
|
||||
lFamLength, right.getFamilyArray(), rFamOffset,
|
||||
rFamLength);
|
||||
if (bufferBacked) {
|
||||
midRow = getMinimumMidpointArray(((ByteBufferCell) left).getFamilyByteBuffer(),
|
||||
((ByteBufferCell) left).getFamilyPosition(), left.getFamilyLength(),
|
||||
((ByteBufferCell) right).getFamilyByteBuffer(),
|
||||
((ByteBufferCell) right).getFamilyPosition(), right.getFamilyLength());
|
||||
} else {
|
||||
midRow = getMinimumMidpointArray(left.getFamilyArray(), left.getFamilyOffset(),
|
||||
left.getFamilyLength(), right.getFamilyArray(), right.getFamilyOffset(),
|
||||
right.getFamilyLength());
|
||||
}
|
||||
// If midRow is null, just return 'right'. Can't do optimization.
|
||||
if (midRow == null)
|
||||
return right;
|
||||
if (midRow == null) return right;
|
||||
// Return new Cell where we use right row and then a mid sort family.
|
||||
return CellUtil.createCell(right.getRowArray(), right.getRowOffset(), right.getRowLength(),
|
||||
midRow, 0, midRow.length, HConstants.EMPTY_BYTE_ARRAY, 0,
|
||||
HConstants.EMPTY_BYTE_ARRAY.length);
|
||||
return CellUtil.createFirstOnRowFamily(right, midRow, 0, midRow.length);
|
||||
}
|
||||
// Families are same. Compare on qualifiers.
|
||||
diff = CellComparator.compareQualifiers(left, right);
|
||||
|
@ -404,17 +413,20 @@ public class HFileWriterImpl implements HFile.Writer {
|
|||
+ CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right));
|
||||
}
|
||||
if (diff < 0) {
|
||||
byte[] midRow = getMinimumMidpointArray(left.getQualifierArray(), left.getQualifierOffset(),
|
||||
left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(),
|
||||
right.getQualifierLength());
|
||||
if (bufferBacked) {
|
||||
midRow = getMinimumMidpointArray(((ByteBufferCell) left).getQualifierByteBuffer(),
|
||||
((ByteBufferCell) left).getQualifierPosition(), left.getQualifierLength(),
|
||||
((ByteBufferCell) right).getQualifierByteBuffer(),
|
||||
((ByteBufferCell) right).getQualifierPosition(), right.getQualifierLength());
|
||||
} else {
|
||||
midRow = getMinimumMidpointArray(left.getQualifierArray(), left.getQualifierOffset(),
|
||||
left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(),
|
||||
right.getQualifierLength());
|
||||
}
|
||||
// If midRow is null, just return 'right'. Can't do optimization.
|
||||
if (midRow == null)
|
||||
return right;
|
||||
// Return new Cell where we use right row and family and then a mid sort
|
||||
// qualifier.
|
||||
return CellUtil.createCell(right.getRowArray(), right.getRowOffset(), right.getRowLength(),
|
||||
right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength(), midRow, 0,
|
||||
midRow.length);
|
||||
if (midRow == null) return right;
|
||||
// Return new Cell where we use right row and family and then a mid sort qualifier.
|
||||
return CellUtil.createFirstOnRowCol(right, midRow, 0, midRow.length);
|
||||
}
|
||||
// No opportunity for optimization. Just return right key.
|
||||
return right;
|
||||
|
@ -459,6 +471,35 @@ public class HFileWriterImpl implements HFile.Writer {
|
|||
return minimumMidpointArray;
|
||||
}
|
||||
|
||||
private static byte[] getMinimumMidpointArray(ByteBuffer left, int leftOffset, int leftLength,
|
||||
ByteBuffer right, int rightOffset, int rightLength) {
|
||||
// rows are different
|
||||
int minLength = leftLength < rightLength ? leftLength : rightLength;
|
||||
int diffIdx = 0;
|
||||
while (diffIdx < minLength && ByteBufferUtils.toByte(left,
|
||||
leftOffset + diffIdx) == ByteBufferUtils.toByte(right, rightOffset + diffIdx)) {
|
||||
diffIdx++;
|
||||
}
|
||||
byte[] minMidpoint = null;
|
||||
if (diffIdx >= minLength) {
|
||||
// leftKey's row is prefix of rightKey's.
|
||||
minMidpoint = new byte[diffIdx + 1];
|
||||
ByteBufferUtils.copyFromBufferToArray(minMidpoint, right, rightOffset, 0, diffIdx + 1);
|
||||
} else {
|
||||
int diffByte = ByteBufferUtils.toByte(left, leftOffset + diffIdx);
|
||||
if ((0xff & diffByte) < 0xff
|
||||
&& (diffByte + 1) < (ByteBufferUtils.toByte(right, rightOffset + diffIdx) & 0xff)) {
|
||||
minMidpoint = new byte[diffIdx + 1];
|
||||
ByteBufferUtils.copyFromBufferToArray(minMidpoint, left, leftOffset, 0, diffIdx);
|
||||
minMidpoint[diffIdx] = (byte) (diffByte + 1);
|
||||
} else {
|
||||
minMidpoint = new byte[diffIdx + 1];
|
||||
ByteBufferUtils.copyFromBufferToArray(minMidpoint, right, rightOffset, 0, diffIdx + 1);
|
||||
}
|
||||
}
|
||||
return minMidpoint;
|
||||
}
|
||||
|
||||
/** Gives inline block writers an opportunity to contribute blocks. */
|
||||
private void writeInlineBlocks(boolean closing) throws IOException {
|
||||
for (InlineBlockWriter ibw : inlineBlockWriters) {
|
||||
|
|
|
@ -7621,7 +7621,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
|
|||
// throw DoNotRetryIOException instead of IllegalArgumentException
|
||||
throw new DoNotRetryIOException("Field is not a long, it's " + len + " bytes wide");
|
||||
}
|
||||
return Bytes.toLong(cell.getValueArray(), cell.getValueOffset(), len);
|
||||
return CellUtil.getValueAsLong(cell);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -24,8 +24,8 @@ import java.util.SortedSet;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellComparator;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
|
@ -59,9 +59,7 @@ public class MutableSegment extends Segment {
|
|||
|
||||
// Get the Cells for the row/family/qualifier regardless of timestamp.
|
||||
// For this case we want to clean up any other puts
|
||||
Cell firstCell = KeyValueUtil.createFirstOnRow(cell.getRowArray(), cell.getRowOffset(),
|
||||
cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
|
||||
cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
|
||||
Cell firstCell = CellUtil.createFirstOnRowColTS(cell, HConstants.LATEST_TIMESTAMP);
|
||||
SortedSet<Cell> ss = this.tailSet(firstCell);
|
||||
Iterator<Cell> it = ss.iterator();
|
||||
// versions visible to oldest scanner
|
||||
|
|
|
@ -261,9 +261,7 @@ public class AccessController extends BaseMasterAndRegionObserver
|
|||
for (Map.Entry<byte[], List<Cell>> f : familyMap.entrySet()) {
|
||||
List<Cell> cells = f.getValue();
|
||||
for (Cell cell: cells) {
|
||||
if (Bytes.equals(cell.getFamilyArray(), cell.getFamilyOffset(),
|
||||
cell.getFamilyLength(), AccessControlLists.ACL_LIST_FAMILY, 0,
|
||||
AccessControlLists.ACL_LIST_FAMILY.length)) {
|
||||
if (CellUtil.matchingFamily(cell, AccessControlLists.ACL_LIST_FAMILY)) {
|
||||
entries.add(CellUtil.cloneRow(cell));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue