Fix for zero-sized content throwing off toChannelBuffer(). Also

short-circuit writeTo(..) accordingly to avoid unnecessary work.

Fixes #5543
This commit is contained in:
Holger Hoffstätte 2014-03-26 10:24:35 +01:00
parent 374b633a4b
commit ab3e22d17c
3 changed files with 44 additions and 24 deletions

View File

@ -84,30 +84,40 @@ public final class PagedBytesReference implements BytesReference {
@Override
public void writeTo(OutputStream os) throws IOException {
BytesRef ref = new BytesRef();
int written = 0;
// are we a slice?
if (offset != 0) {
// remaining size of page fragment at offset
int fragmentSize = Math.min(length, PAGE_SIZE - (offset % PAGE_SIZE));
bytearray.get(offset, fragmentSize, ref);
os.write(ref.bytes, ref.offset, fragmentSize);
written += fragmentSize;
}
// nothing to do
if (length == 0) {
return;
}
// handle remainder of pages + trailing fragment
while (written < length) {
int remaining = length - written;
int bulkSize = (remaining > PAGE_SIZE) ? PAGE_SIZE : remaining;
bytearray.get(offset + written, bulkSize, ref);
os.write(ref.bytes, ref.offset, bulkSize);
written += bulkSize;
}
BytesRef ref = new BytesRef();
int written = 0;
// are we a slice?
if (offset != 0) {
// remaining size of page fragment at offset
int fragmentSize = Math.min(length, PAGE_SIZE - (offset % PAGE_SIZE));
bytearray.get(offset, fragmentSize, ref);
os.write(ref.bytes, ref.offset, fragmentSize);
written += fragmentSize;
}
// handle remainder of pages + trailing fragment
while (written < length) {
int remaining = length - written;
int bulkSize = (remaining > PAGE_SIZE) ? PAGE_SIZE : remaining;
bytearray.get(offset + written, bulkSize, ref);
os.write(ref.bytes, ref.offset, bulkSize);
written += bulkSize;
}
}
@Override
public void writeTo(GatheringByteChannel channel) throws IOException {
// nothing to do
if (length == 0) {
return;
}
ByteBuffer[] buffers;
ByteBuffer currentBuffer = null;
BytesRef ref = new BytesRef();
@ -201,6 +211,11 @@ public final class PagedBytesReference implements BytesReference {
@Override
public ChannelBuffer toChannelBuffer() {
// nothing to do
if (length == 0) {
return ChannelBuffers.EMPTY_BUFFER;
}
ChannelBuffer[] buffers;
ChannelBuffer currentBuffer = null;
BytesRef ref = new BytesRef();

View File

@ -19,9 +19,8 @@
package org.elasticsearch.common.io.stream;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.PagedBytesReference;
import org.elasticsearch.common.io.BytesStream;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.ByteArray;
@ -154,9 +153,7 @@ public class BytesStreamOutput extends StreamOutput implements BytesStream {
@Override
public BytesReference bytes() {
BytesRef bref = new BytesRef();
bytes.get(0, count, bref);
return new BytesArray(bref, false);
return new PagedBytesReference(bigarrays, bytes, count);
}
private void ensureCapacity(int offset) {

View File

@ -283,6 +283,14 @@ public class PagedBytesReferenceTest extends ElasticsearchTestCase {
assertArrayEquals(pbr.toBytes(), bufferBytes);
}
public void testEmptyToChannelBuffer() {
BytesReference pbr = getRandomizedPagedBytesReference(0);
ChannelBuffer cb = pbr.toChannelBuffer();
assertNotNull(cb);
assertEquals(0, pbr.length());
assertEquals(0, cb.capacity());
}
public void testSliceToChannelBuffer() {
int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2,8));
BytesReference pbr = getRandomizedPagedBytesReference(length);