Merge pull request #10157 from spinscale/1503-cleanup-remove-bytesarray-unsafe

Cleanup: Remove unsafe field in BytesStreamInput
This commit is contained in:
Simon Willnauer 2015-03-20 13:14:02 -07:00
commit 1d357cb48a
11 changed files with 18 additions and 25 deletions

View File

@ -605,7 +605,7 @@ public class ClusterState implements ToXContent {
* @param localNode used to set the local node in the cluster state.
*/
public static ClusterState fromBytes(byte[] data, DiscoveryNode localNode) throws IOException {
return readFrom(new BytesStreamInput(data, false), localNode);
return readFrom(new BytesStreamInput(data), localNode);
}
public static void writeTo(ClusterState state, StreamOutput out) throws IOException {

View File

@ -97,7 +97,7 @@ public class BytesArray implements BytesReference {
@Override
public StreamInput streamInput() {
return new BytesStreamInput(bytes, offset, length, false);
return new BytesStreamInput(bytes, offset, length);
}
@Override

View File

@ -37,8 +37,6 @@ public class BytesStreamInput extends StreamInput {
protected int end;
private final boolean unsafe;
public BytesStreamInput(BytesReference bytes) {
if (!bytes.hasArray()) {
bytes = bytes.toBytesArray();
@ -46,25 +44,20 @@ public class BytesStreamInput extends StreamInput {
this.buf = bytes.array();
this.pos = bytes.arrayOffset();
this.end = pos + bytes.length();
this.unsafe = false;
}
public BytesStreamInput(byte buf[], boolean unsafe) {
this(buf, 0, buf.length, unsafe);
public BytesStreamInput(byte buf[]) {
this(buf, 0, buf.length);
}
public BytesStreamInput(byte buf[], int offset, int length, boolean unsafe) {
public BytesStreamInput(byte buf[], int offset, int length) {
this.buf = buf;
this.pos = offset;
this.end = offset + length;
this.unsafe = unsafe;
}
@Override
public BytesReference readBytesReference(int length) throws IOException {
if (unsafe) {
return super.readBytesReference(length);
}
BytesArray bytes = new BytesArray(buf, pos, length);
pos += length;
return bytes;
@ -72,9 +65,6 @@ public class BytesStreamInput extends StreamInput {
@Override
public BytesRef readBytesRef(int length) throws IOException {
if (unsafe) {
return super.readBytesRef(length);
}
BytesRef bytes = new BytesRef(buf, pos, length);
pos += length;
return bytes;

View File

@ -53,7 +53,7 @@ public class PropertiesSettingsLoader implements SettingsLoader {
@Override
public Map<String, String> load(byte[] source) throws IOException {
Properties props = new Properties();
BytesStreamInput stream = new BytesStreamInput(source, false);
BytesStreamInput stream = new BytesStreamInput(source);
try {
props.load(stream);
Map<String, String> result = newHashMap();

View File

@ -65,7 +65,7 @@ public class XContentHelper {
public static XContentParser createParser(byte[] data, int offset, int length) throws IOException {
Compressor compressor = CompressorFactory.compressor(data, offset, length);
if (compressor != null) {
CompressedStreamInput compressedInput = compressor.streamInput(new BytesStreamInput(data, offset, length, false));
CompressedStreamInput compressedInput = compressor.streamInput(new BytesStreamInput(data, offset, length));
XContentType contentType = XContentFactory.xContentType(compressedInput);
compressedInput.resetToBufferStart();
return XContentFactory.xContent(contentType).createParser(compressedInput);
@ -111,7 +111,7 @@ public class XContentHelper {
XContentType contentType;
Compressor compressor = CompressorFactory.compressor(data, offset, length);
if (compressor != null) {
CompressedStreamInput compressedStreamInput = compressor.streamInput(new BytesStreamInput(data, offset, length, false));
CompressedStreamInput compressedStreamInput = compressor.streamInput(new BytesStreamInput(data, offset, length));
contentType = XContentFactory.xContentType(compressedStreamInput);
compressedStreamInput.resetToBufferStart();
parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput);

View File

@ -19,7 +19,9 @@
package org.elasticsearch.index.translog.fs;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.Channels;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.index.translog.TranslogStreams;
@ -130,7 +132,8 @@ public class FsChannelSnapshot implements Translog.Snapshot {
}
cacheBuffer.flip();
position += opSize;
return TranslogStreams.readTranslogOperation(new BytesStreamInput(cacheBuffer.array(), 0, opSize, true));
BytesArray bytesArray = new BytesArray(cacheBuffer.array(), 0, opSize);
return TranslogStreams.readTranslogOperation(new BytesStreamInput(bytesArray.copyBytesArray()));
} catch (IOException e) {
throw new ElasticsearchException("unexpected exception reading from translog snapshot of " + this.channelReference.file(), e);
}

View File

@ -328,7 +328,7 @@ public class FsTranslog extends AbstractIndexShardComponent implements Translog
FsTranslogFile translog = translogForLocation(location);
if (translog != null) {
byte[] data = translog.read(location);
try (BytesStreamInput in = new BytesStreamInput(data, false)) {
try (BytesStreamInput in = new BytesStreamInput(data)) {
// Return the Operation using the current version of the
// stream based on which translog is being read
return translog.getStream().read(in);

View File

@ -225,7 +225,7 @@ public class LocalTransport extends AbstractLifecycleComponent<Transport> implem
Transports.assertTransportThread();
try {
transportServiceAdapter.received(data.length);
StreamInput stream = new BytesStreamInput(data, false);
StreamInput stream = new BytesStreamInput(data);
stream.setVersion(version);
long requestId = stream.readLong();

View File

@ -82,7 +82,7 @@ public class ClusterSerializationTests extends ElasticsearchAllocationTestCase {
BytesStreamOutput outStream = new BytesStreamOutput();
RoutingTable.Builder.writeTo(source, outStream);
BytesStreamInput inStream = new BytesStreamInput(outStream.bytes().toBytes(), false);
BytesStreamInput inStream = new BytesStreamInput(outStream.bytes().toBytes());
RoutingTable target = RoutingTable.Builder.readFrom(inStream);
assertThat(target.prettyPrint(), equalTo(source.prettyPrint()));

View File

@ -281,7 +281,7 @@ public class BytesStreamsTests extends ElasticsearchTestCase {
out.writeGenericValue(doubleArray);
out.writeString("hello");
out.writeString("goodbye");
BytesStreamInput in = new BytesStreamInput(out.bytes().toBytes(), false);
BytesStreamInput in = new BytesStreamInput(out.bytes().toBytes());
assertThat(in.readBoolean(), equalTo(false));
assertThat(in.readByte(), equalTo((byte)1));
assertThat(in.readShort(), equalTo((short)-1));

View File

@ -62,7 +62,7 @@ public class XContentFactoryTests extends ElasticsearchTestCase {
assertThat(XContentFactory.xContentType(builder.bytes()), equalTo(type));
BytesArray bytesArray = builder.bytes().toBytesArray();
assertThat(XContentFactory.xContentType(new BytesStreamInput(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), false)), equalTo(type));
assertThat(XContentFactory.xContentType(new BytesStreamInput(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length())), equalTo(type));
// CBOR is binary, cannot use String
if (type != XContentType.CBOR) {