mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 02:14:54 +00:00
Allow InputStreamStreamInput
array size validation where applicable (#26692)
Today we can't validate the array length in `InputStreamStreamInput` since we can't rely on `InputStream.available` yet in some situations we know the size of the stream and can apply additional validation.
This commit is contained in:
parent
23093adcb9
commit
9f97f9072a
@ -28,9 +28,28 @@ import java.io.InputStream;
|
||||
public class InputStreamStreamInput extends StreamInput {
|
||||
|
||||
private final InputStream is;
|
||||
private final long sizeLimit;
|
||||
|
||||
/**
|
||||
* Creates a new InputStreamStreamInput with unlimited size
|
||||
* @param is the input stream to wrap
|
||||
*/
|
||||
public InputStreamStreamInput(InputStream is) {
|
||||
this(is, Long.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new InputStreamStreamInput with a size limit
|
||||
* @param is the input stream to wrap
|
||||
* @param sizeLimit a hard limit of the number of bytes in the given input stream. This is used for internal input validation
|
||||
*/
|
||||
public InputStreamStreamInput(InputStream is, long sizeLimit) {
|
||||
this.is = is;
|
||||
if (sizeLimit < 0) {
|
||||
throw new IllegalArgumentException("size limit must be positive");
|
||||
}
|
||||
this.sizeLimit = sizeLimit;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -98,6 +117,8 @@ public class InputStreamStreamInput extends StreamInput {
|
||||
|
||||
@Override
|
||||
protected void ensureCanReadBytes(int length) throws EOFException {
|
||||
// TODO what can we do here?
|
||||
if (length > sizeLimit) {
|
||||
throw new EOFException("tried to read: " + length + " bytes but this stream is limited to: " + sizeLimit);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -928,7 +928,7 @@ public abstract class StreamInput extends InputStream {
|
||||
}
|
||||
|
||||
public static StreamInput wrap(byte[] bytes, int offset, int length) {
|
||||
return new InputStreamStreamInput(new ByteArrayInputStream(bytes, offset, length));
|
||||
return new InputStreamStreamInput(new ByteArrayInputStream(bytes, offset, length), length);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -79,7 +79,8 @@ public class TranslogReader extends BaseTranslogReader implements Closeable {
|
||||
final FileChannel channel, final Path path, final Checkpoint checkpoint, final String translogUUID) throws IOException {
|
||||
|
||||
try {
|
||||
InputStreamStreamInput headerStream = new InputStreamStreamInput(java.nio.channels.Channels.newInputStream(channel)); // don't close
|
||||
InputStreamStreamInput headerStream = new InputStreamStreamInput(java.nio.channels.Channels.newInputStream(channel),
|
||||
channel.size()); // don't close
|
||||
// Lucene's CodecUtil writes a magic number of 0x3FD76C17 with the
|
||||
// header, in binary this looks like:
|
||||
//
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@ -192,6 +193,22 @@ public class StreamTests extends ESTestCase {
|
||||
assertEquals(streamInput.available(), length - bytesToRead);
|
||||
}
|
||||
|
||||
public void testReadArraySize() throws IOException {
|
||||
BytesStreamOutput stream = new BytesStreamOutput();
|
||||
byte[] array = new byte[randomIntBetween(1, 10)];
|
||||
for (int i = 0; i < array.length; i++) {
|
||||
array[i] = randomByte();
|
||||
}
|
||||
stream.writeByteArray(array);
|
||||
InputStreamStreamInput streamInput = new InputStreamStreamInput(StreamInput.wrap(BytesReference.toBytes(stream.bytes())), array
|
||||
.length-1);
|
||||
expectThrows(EOFException.class, streamInput::readByteArray);
|
||||
streamInput = new InputStreamStreamInput(StreamInput.wrap(BytesReference.toBytes(stream.bytes())), BytesReference.toBytes(stream
|
||||
.bytes()).length);
|
||||
|
||||
assertArrayEquals(array, streamInput.readByteArray());
|
||||
}
|
||||
|
||||
public void testWritableArrays() throws IOException {
|
||||
|
||||
final String[] strings = generateRandomStringArray(10, 10, false, true);
|
||||
|
@ -705,7 +705,8 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
||||
if (binaryDocValues.advanceExact(docId)) {
|
||||
BytesRef qbSource = binaryDocValues.binaryValue();
|
||||
try (InputStream in = new ByteArrayInputStream(qbSource.bytes, qbSource.offset, qbSource.length)) {
|
||||
try (StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), registry)) {
|
||||
try (StreamInput input = new NamedWriteableAwareStreamInput(
|
||||
new InputStreamStreamInput(in, qbSource.length), registry)) {
|
||||
input.setVersion(indexVersion);
|
||||
// Query builder's content is stored via BinaryFieldMapper, which has a custom encoding
|
||||
// to encode multiple binary values into a single binary doc values field.
|
||||
|
Loading…
x
Reference in New Issue
Block a user