HADOOP-14081. S3A: Consider avoiding array copy in S3ABlockOutputStream (ByteArrayBlock). Contributed by Rajesh Balamohan

This commit is contained in:
Steve Loughran 2017-02-20 16:21:00 +00:00
parent b1c1f05b1d
commit 8a05ea4aba
1 changed files with 22 additions and 4 deletions

View File

@ -298,6 +298,25 @@ final class S3ADataBlocks {
} }
static class S3AByteArrayOutputStream extends ByteArrayOutputStream {
S3AByteArrayOutputStream(int size) {
super(size);
}
/**
* InputStream backed by the internal byte array
*
* @return
*/
ByteArrayInputStream getInputStream() {
ByteArrayInputStream bin = new ByteArrayInputStream(this.buf, 0, count);
this.reset();
this.buf = null;
return bin;
}
}
/** /**
* Stream to memory via a {@code ByteArrayOutputStream}. * Stream to memory via a {@code ByteArrayOutputStream}.
* *
@ -310,14 +329,14 @@ final class S3ADataBlocks {
*/ */
static class ByteArrayBlock extends DataBlock { static class ByteArrayBlock extends DataBlock {
private ByteArrayOutputStream buffer; private S3AByteArrayOutputStream buffer;
private final int limit; private final int limit;
// cache data size so that it is consistent after the buffer is reset. // cache data size so that it is consistent after the buffer is reset.
private Integer dataSize; private Integer dataSize;
ByteArrayBlock(int limit) { ByteArrayBlock(int limit) {
this.limit = limit; this.limit = limit;
buffer = new ByteArrayOutputStream(); buffer = new S3AByteArrayOutputStream(limit);
} }
/** /**
@ -333,8 +352,7 @@ final class S3ADataBlocks {
InputStream startUpload() throws IOException { InputStream startUpload() throws IOException {
super.startUpload(); super.startUpload();
dataSize = buffer.size(); dataSize = buffer.size();
ByteArrayInputStream bufferData = new ByteArrayInputStream( ByteArrayInputStream bufferData = buffer.getInputStream();
buffer.toByteArray());
buffer = null; buffer = null;
return bufferData; return bufferData;
} }