HBASE-23107 Avoid temp byte array creation when doing cacheDataOnWrite (#678)
Signed-off-by: huzheng <openinx@gmail.com> Signed-off-by: stack <stack@apache.org>
This commit is contained in:
parent
cc76318f76
commit
b0b7e5f5b8
|
@ -22,6 +22,7 @@ import java.nio.BufferOverflowException;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.nio.ByteBuff;
|
||||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.yetus.audience.InterfaceAudience;
|
import org.apache.yetus.audience.InterfaceAudience;
|
||||||
|
@ -112,6 +113,10 @@ public class ByteArrayOutputStream extends OutputStream implements ByteBufferWri
|
||||||
return Arrays.copyOf(buf, pos);
|
return Arrays.copyOf(buf, pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void toByteBuff(ByteBuff buff) {
|
||||||
|
buff.put(buf, 0, pos);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the underlying array where the data gets accumulated
|
* @return the underlying array where the data gets accumulated
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -843,6 +843,8 @@ public class HFileBlock implements Cacheable {
|
||||||
/** Meta data that holds information about the hfileblock**/
|
/** Meta data that holds information about the hfileblock**/
|
||||||
private HFileContext fileContext;
|
private HFileContext fileContext;
|
||||||
|
|
||||||
|
private final ByteBuffAllocator allocator;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void beforeShipped() {
|
public void beforeShipped() {
|
||||||
if (getEncodingState() != null) {
|
if (getEncodingState() != null) {
|
||||||
|
@ -857,12 +859,19 @@ public class HFileBlock implements Cacheable {
|
||||||
/**
|
/**
|
||||||
* @param dataBlockEncoder data block encoding algorithm to use
|
* @param dataBlockEncoder data block encoding algorithm to use
|
||||||
*/
|
*/
|
||||||
|
@VisibleForTesting
|
||||||
public Writer(HFileDataBlockEncoder dataBlockEncoder, HFileContext fileContext) {
|
public Writer(HFileDataBlockEncoder dataBlockEncoder, HFileContext fileContext) {
|
||||||
|
this(dataBlockEncoder, fileContext, ByteBuffAllocator.HEAP);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Writer(HFileDataBlockEncoder dataBlockEncoder, HFileContext fileContext,
|
||||||
|
ByteBuffAllocator allocator) {
|
||||||
if (fileContext.getBytesPerChecksum() < HConstants.HFILEBLOCK_HEADER_SIZE) {
|
if (fileContext.getBytesPerChecksum() < HConstants.HFILEBLOCK_HEADER_SIZE) {
|
||||||
throw new RuntimeException("Unsupported value of bytesPerChecksum. " +
|
throw new RuntimeException("Unsupported value of bytesPerChecksum. " +
|
||||||
" Minimum is " + HConstants.HFILEBLOCK_HEADER_SIZE + " but the configured value is " +
|
" Minimum is " + HConstants.HFILEBLOCK_HEADER_SIZE + " but the configured value is " +
|
||||||
fileContext.getBytesPerChecksum());
|
fileContext.getBytesPerChecksum());
|
||||||
}
|
}
|
||||||
|
this.allocator = allocator;
|
||||||
this.dataBlockEncoder = dataBlockEncoder != null?
|
this.dataBlockEncoder = dataBlockEncoder != null?
|
||||||
dataBlockEncoder: NoOpDataBlockEncoder.INSTANCE;
|
dataBlockEncoder: NoOpDataBlockEncoder.INSTANCE;
|
||||||
this.dataBlockEncodingCtx = this.dataBlockEncoder.
|
this.dataBlockEncodingCtx = this.dataBlockEncoder.
|
||||||
|
@ -1013,6 +1022,18 @@ public class HFileBlock implements Cacheable {
|
||||||
Bytes.putInt(dest, offset, onDiskDataSize);
|
Bytes.putInt(dest, offset, onDiskDataSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void putHeader(ByteBuff buff, int onDiskSize,
|
||||||
|
int uncompressedSize, int onDiskDataSize) {
|
||||||
|
buff.rewind();
|
||||||
|
blockType.write(buff);
|
||||||
|
buff.putInt(onDiskSize - HConstants.HFILEBLOCK_HEADER_SIZE);
|
||||||
|
buff.putInt(uncompressedSize - HConstants.HFILEBLOCK_HEADER_SIZE);
|
||||||
|
buff.putLong(prevOffset);
|
||||||
|
buff.put(fileContext.getChecksumType().getCode());
|
||||||
|
buff.putInt(fileContext.getBytesPerChecksum());
|
||||||
|
buff.putInt(onDiskDataSize);
|
||||||
|
}
|
||||||
|
|
||||||
private void putHeader(ByteArrayOutputStream dest, int onDiskSize,
|
private void putHeader(ByteArrayOutputStream dest, int onDiskSize,
|
||||||
int uncompressedSize, int onDiskDataSize) {
|
int uncompressedSize, int onDiskDataSize) {
|
||||||
putHeader(dest.getBuffer(),0, onDiskSize, uncompressedSize, onDiskDataSize);
|
putHeader(dest.getBuffer(),0, onDiskSize, uncompressedSize, onDiskDataSize);
|
||||||
|
@ -1171,19 +1192,19 @@ public class HFileBlock implements Cacheable {
|
||||||
* cache. Can be called in the "writing" state or the "block ready" state.
|
* cache. Can be called in the "writing" state or the "block ready" state.
|
||||||
* Returns only the header and data, does not include checksum data.
|
* Returns only the header and data, does not include checksum data.
|
||||||
*
|
*
|
||||||
* @return Returns a copy of uncompressed block bytes for caching on write
|
* @return Returns an uncompressed block ByteBuff for caching on write
|
||||||
*/
|
*/
|
||||||
@VisibleForTesting
|
ByteBuff cloneUncompressedBufferWithHeader() {
|
||||||
ByteBuffer cloneUncompressedBufferWithHeader() {
|
|
||||||
expectState(State.BLOCK_READY);
|
expectState(State.BLOCK_READY);
|
||||||
byte[] uncompressedBlockBytesWithHeader = baosInMemory.toByteArray();
|
ByteBuff bytebuff = allocator.allocate(baosInMemory.size());
|
||||||
|
baosInMemory.toByteBuff(bytebuff);
|
||||||
int numBytes = (int) ChecksumUtil.numBytes(
|
int numBytes = (int) ChecksumUtil.numBytes(
|
||||||
onDiskBlockBytesWithHeader.size(),
|
onDiskBlockBytesWithHeader.size(),
|
||||||
fileContext.getBytesPerChecksum());
|
fileContext.getBytesPerChecksum());
|
||||||
putHeader(uncompressedBlockBytesWithHeader, 0,
|
putHeader(bytebuff, onDiskBlockBytesWithHeader.size() + numBytes,
|
||||||
onDiskBlockBytesWithHeader.size() + numBytes,
|
|
||||||
baosInMemory.size(), onDiskBlockBytesWithHeader.size());
|
baosInMemory.size(), onDiskBlockBytesWithHeader.size());
|
||||||
return ByteBuffer.wrap(uncompressedBlockBytesWithHeader);
|
bytebuff.rewind();
|
||||||
|
return bytebuff;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1192,9 +1213,12 @@ public class HFileBlock implements Cacheable {
|
||||||
* include checksum data.
|
* include checksum data.
|
||||||
* @return Returns a copy of block bytes for caching on write
|
* @return Returns a copy of block bytes for caching on write
|
||||||
*/
|
*/
|
||||||
private ByteBuffer cloneOnDiskBufferWithHeader() {
|
private ByteBuff cloneOnDiskBufferWithHeader() {
|
||||||
expectState(State.BLOCK_READY);
|
expectState(State.BLOCK_READY);
|
||||||
return ByteBuffer.wrap(onDiskBlockBytesWithHeader.toByteArray());
|
ByteBuff bytebuff = allocator.allocate(onDiskBlockBytesWithHeader.size());
|
||||||
|
onDiskBlockBytesWithHeader.toByteBuff(bytebuff);
|
||||||
|
bytebuff.rewind();
|
||||||
|
return bytebuff;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void expectState(State expectedState) {
|
private void expectState(State expectedState) {
|
||||||
|
@ -1247,24 +1271,24 @@ public class HFileBlock implements Cacheable {
|
||||||
.build();
|
.build();
|
||||||
// Build the HFileBlock.
|
// Build the HFileBlock.
|
||||||
HFileBlockBuilder builder = new HFileBlockBuilder();
|
HFileBlockBuilder builder = new HFileBlockBuilder();
|
||||||
ByteBuffer buffer;
|
ByteBuff buff;
|
||||||
if (cacheConf.shouldCacheCompressed(blockType.getCategory())) {
|
if (cacheConf.shouldCacheCompressed(blockType.getCategory())) {
|
||||||
buffer = cloneOnDiskBufferWithHeader();
|
buff = cloneOnDiskBufferWithHeader();
|
||||||
} else {
|
} else {
|
||||||
buffer = cloneUncompressedBufferWithHeader();
|
buff = cloneUncompressedBufferWithHeader();
|
||||||
}
|
}
|
||||||
return builder.withBlockType(blockType)
|
return builder.withBlockType(blockType)
|
||||||
.withOnDiskSizeWithoutHeader(getOnDiskSizeWithoutHeader())
|
.withOnDiskSizeWithoutHeader(getOnDiskSizeWithoutHeader())
|
||||||
.withUncompressedSizeWithoutHeader(getUncompressedSizeWithoutHeader())
|
.withUncompressedSizeWithoutHeader(getUncompressedSizeWithoutHeader())
|
||||||
.withPrevBlockOffset(prevOffset)
|
.withPrevBlockOffset(prevOffset)
|
||||||
.withByteBuff(ByteBuff.wrap(buffer))
|
.withByteBuff(buff)
|
||||||
.withFillHeader(FILL_HEADER)
|
.withFillHeader(FILL_HEADER)
|
||||||
.withOffset(startOffset)
|
.withOffset(startOffset)
|
||||||
.withNextBlockOnDiskSize(UNSET)
|
.withNextBlockOnDiskSize(UNSET)
|
||||||
.withOnDiskDataSizeWithHeader(onDiskBlockBytesWithHeader.size() + onDiskChecksum.length)
|
.withOnDiskDataSizeWithHeader(onDiskBlockBytesWithHeader.size() + onDiskChecksum.length)
|
||||||
.withHFileContext(newContext)
|
.withHFileContext(newContext)
|
||||||
.withByteBuffAllocator(cacheConf.getByteBuffAllocator())
|
.withByteBuffAllocator(cacheConf.getByteBuffAllocator())
|
||||||
.withShared(!buffer.hasArray())
|
.withShared(!buff.hasArray())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -294,9 +294,8 @@ public class HFileWriterImpl implements HFile.Writer {
|
||||||
if (blockWriter != null) {
|
if (blockWriter != null) {
|
||||||
throw new IllegalStateException("finishInit called twice");
|
throw new IllegalStateException("finishInit called twice");
|
||||||
}
|
}
|
||||||
|
blockWriter = new HFileBlock.Writer(blockEncoder, hFileContext,
|
||||||
blockWriter = new HFileBlock.Writer(blockEncoder, hFileContext);
|
cacheConf.getByteBuffAllocator());
|
||||||
|
|
||||||
// Data block index writer
|
// Data block index writer
|
||||||
boolean cacheIndexesOnWrite = cacheConf.shouldCacheIndexesOnWrite();
|
boolean cacheIndexesOnWrite = cacheConf.shouldCacheIndexesOnWrite();
|
||||||
dataBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter(blockWriter,
|
dataBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter(blockWriter,
|
||||||
|
@ -546,8 +545,13 @@ public class HFileWriterImpl implements HFile.Writer {
|
||||||
private void doCacheOnWrite(long offset) {
|
private void doCacheOnWrite(long offset) {
|
||||||
cacheConf.getBlockCache().ifPresent(cache -> {
|
cacheConf.getBlockCache().ifPresent(cache -> {
|
||||||
HFileBlock cacheFormatBlock = blockWriter.getBlockForCaching(cacheConf);
|
HFileBlock cacheFormatBlock = blockWriter.getBlockForCaching(cacheConf);
|
||||||
|
try {
|
||||||
cache.cacheBlock(new BlockCacheKey(name, offset, true, cacheFormatBlock.getBlockType()),
|
cache.cacheBlock(new BlockCacheKey(name, offset, true, cacheFormatBlock.getBlockType()),
|
||||||
cacheFormatBlock);
|
cacheFormatBlock);
|
||||||
|
} finally {
|
||||||
|
// refCnt will auto increase when block add to Cache, see RAMCache#putIfAbsent
|
||||||
|
cacheFormatBlock.release();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -452,26 +452,21 @@ public class TestHFileBlock {
|
||||||
HFileBlock.Writer hbw = new HFileBlock.Writer(dataBlockEncoder, meta);
|
HFileBlock.Writer hbw = new HFileBlock.Writer(dataBlockEncoder, meta);
|
||||||
long totalSize = 0;
|
long totalSize = 0;
|
||||||
final List<Integer> encodedSizes = new ArrayList<>();
|
final List<Integer> encodedSizes = new ArrayList<>();
|
||||||
final List<ByteBuffer> encodedBlocks = new ArrayList<>();
|
final List<ByteBuff> encodedBlocks = new ArrayList<>();
|
||||||
for (int blockId = 0; blockId < numBlocks; ++blockId) {
|
for (int blockId = 0; blockId < numBlocks; ++blockId) {
|
||||||
hbw.startWriting(BlockType.DATA);
|
hbw.startWriting(BlockType.DATA);
|
||||||
writeTestKeyValues(hbw, blockId, includesMemstoreTS, includesTag);
|
writeTestKeyValues(hbw, blockId, includesMemstoreTS, includesTag);
|
||||||
hbw.writeHeaderAndData(os);
|
hbw.writeHeaderAndData(os);
|
||||||
int headerLen = HConstants.HFILEBLOCK_HEADER_SIZE;
|
int headerLen = HConstants.HFILEBLOCK_HEADER_SIZE;
|
||||||
byte[] encodedResultWithHeader = hbw.cloneUncompressedBufferWithHeader().array();
|
ByteBuff encodedResultWithHeader = hbw.cloneUncompressedBufferWithHeader();
|
||||||
final int encodedSize = encodedResultWithHeader.length - headerLen;
|
final int encodedSize = encodedResultWithHeader.limit() - headerLen;
|
||||||
if (encoding != DataBlockEncoding.NONE) {
|
if (encoding != DataBlockEncoding.NONE) {
|
||||||
// We need to account for the two-byte encoding algorithm ID that
|
// We need to account for the two-byte encoding algorithm ID that
|
||||||
// comes after the 24-byte block header but before encoded KVs.
|
// comes after the 24-byte block header but before encoded KVs.
|
||||||
headerLen += DataBlockEncoding.ID_SIZE;
|
headerLen += DataBlockEncoding.ID_SIZE;
|
||||||
}
|
}
|
||||||
byte[] encodedDataSection =
|
|
||||||
new byte[encodedResultWithHeader.length - headerLen];
|
|
||||||
System.arraycopy(encodedResultWithHeader, headerLen,
|
|
||||||
encodedDataSection, 0, encodedDataSection.length);
|
|
||||||
final ByteBuffer encodedBuf =
|
|
||||||
ByteBuffer.wrap(encodedDataSection);
|
|
||||||
encodedSizes.add(encodedSize);
|
encodedSizes.add(encodedSize);
|
||||||
|
ByteBuff encodedBuf = encodedResultWithHeader.position(headerLen).slice();
|
||||||
encodedBlocks.add(encodedBuf);
|
encodedBlocks.add(encodedBuf);
|
||||||
totalSize += hbw.getOnDiskSizeWithHeader();
|
totalSize += hbw.getOnDiskSizeWithHeader();
|
||||||
}
|
}
|
||||||
|
@ -521,12 +516,11 @@ public class TestHFileBlock {
|
||||||
actualBuffer = actualBuffer.slice();
|
actualBuffer = actualBuffer.slice();
|
||||||
}
|
}
|
||||||
|
|
||||||
ByteBuffer expectedBuffer = encodedBlocks.get(blockId);
|
ByteBuff expectedBuff = encodedBlocks.get(blockId);
|
||||||
expectedBuffer.rewind();
|
expectedBuff.rewind();
|
||||||
|
|
||||||
// test if content matches, produce nice message
|
// test if content matches, produce nice message
|
||||||
assertBuffersEqual(new SingleByteBuff(expectedBuffer), actualBuffer, algo, encoding,
|
assertBuffersEqual(expectedBuff, actualBuffer, algo, encoding, pread);
|
||||||
pread);
|
|
||||||
|
|
||||||
// test serialized blocks
|
// test serialized blocks
|
||||||
for (boolean reuseBuffer : new boolean[] { false, true }) {
|
for (boolean reuseBuffer : new boolean[] { false, true }) {
|
||||||
|
@ -882,8 +876,10 @@ public class TestHFileBlock {
|
||||||
hbw.writeHeaderAndData(os);
|
hbw.writeHeaderAndData(os);
|
||||||
totalSize += hbw.getOnDiskSizeWithHeader();
|
totalSize += hbw.getOnDiskSizeWithHeader();
|
||||||
|
|
||||||
if (cacheOnWrite)
|
if (cacheOnWrite) {
|
||||||
expectedContents.add(hbw.cloneUncompressedBufferWithHeader());
|
ByteBuff buff = hbw.cloneUncompressedBufferWithHeader();
|
||||||
|
expectedContents.add(buff.asSubByteBuffer(buff.capacity()));
|
||||||
|
}
|
||||||
|
|
||||||
if (detailedLogging) {
|
if (detailedLogging) {
|
||||||
LOG.info("Written block #" + i + " of type " + bt
|
LOG.info("Written block #" + i + " of type " + bt
|
||||||
|
|
Loading…
Reference in New Issue