HBASE-7239. Introduces chunked reading for large cellblocks
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1469654 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b7391e5449
commit
35038df81d
|
@ -1026,7 +1026,7 @@ public class HBaseClient {
|
|||
if (responseHeader.hasCellBlockMeta()) {
|
||||
int size = responseHeader.getCellBlockMeta().getLength();
|
||||
byte [] cellBlock = new byte[size];
|
||||
IOUtils.readFully(this.in, cellBlock, 0, cellBlock.length);
|
||||
IPCUtil.readChunked(this.in, cellBlock, 0, size);
|
||||
cellBlockScanner = ipcUtil.createCellScanner(this.codec, this.compressor, cellBlock);
|
||||
}
|
||||
// it's possible that this call may have been cleaned up due to a RPC
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.hbase.ipc;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -230,6 +231,23 @@ class IPCUtil {
|
|||
return bytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read in chunks of 8K (HBASE-7239)
|
||||
* @param in
|
||||
* @param dest
|
||||
* @param offset
|
||||
* @param len
|
||||
* @throws IOException
|
||||
*/
|
||||
static void readChunked(final DataInput in, byte[] dest, int offset, int len)
|
||||
throws IOException {
|
||||
int maxRead = 8192;
|
||||
|
||||
for (; offset < len; offset += maxRead) {
|
||||
in.readFully(dest, offset, Math.min(len - offset, maxRead));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param header
|
||||
* @param body
|
||||
|
|
Loading…
Reference in New Issue