Clean up BytesReference (#19196)

BytesReference should be a really simple interface, yet it has a gazillion
ways to achieve the same this. Methods like `#hasArray`, `#toBytesArray`, `#copyBytesArray`
`#toBytesRef` `#bytes` are all really duplicates. This change simplifies the interface
dramatically and makes implementations of it much simpler. All array access has been removed
and is streamlined through a single `#toBytesRef` method. Utility methods to materialize a
compact byte array has been added too for convenience.
This commit is contained in:
Simon Willnauer 2016-07-01 16:09:31 +02:00 committed by GitHub
parent 42addb5692
commit 5c8164a561
174 changed files with 886 additions and 1369 deletions

View File

@ -364,7 +364,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
throw new ElasticsearchParseException("failed to parse source for create index", e); throw new ElasticsearchParseException("failed to parse source for create index", e);
} }
} else { } else {
settings(new String(source.toBytes(), StandardCharsets.UTF_8)); settings(source.utf8ToString());
} }
return this; return this;
} }

View File

@ -130,7 +130,7 @@ public final class TermVectorsFields extends Fields {
* @param termVectors Stores the actual term vectors as a {@link BytesRef}. * @param termVectors Stores the actual term vectors as a {@link BytesRef}.
*/ */
public TermVectorsFields(BytesReference headerRef, BytesReference termVectors) throws IOException { public TermVectorsFields(BytesReference headerRef, BytesReference termVectors) throws IOException {
StreamInput header = StreamInput.wrap(headerRef.toBytesArray()); StreamInput header = headerRef.streamInput();
fieldMap = new ObjectLongHashMap<>(); fieldMap = new ObjectLongHashMap<>();
// here we read the header to fill the field offset map // here we read the header to fill the field offset map
String headerString = header.readString(); String headerString = header.readString();
@ -201,7 +201,7 @@ public final class TermVectorsFields extends Fields {
private int docCount; private int docCount;
public TermVector(BytesReference termVectors, long readOffset) throws IOException { public TermVector(BytesReference termVectors, long readOffset) throws IOException {
this.perFieldTermVectorInput = StreamInput.wrap(termVectors.toBytesArray()); this.perFieldTermVectorInput = termVectors.streamInput();
this.readOffset = readOffset; this.readOffset = readOffset;
reset(); reset();
} }

View File

@ -27,6 +27,7 @@ import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.action.support.single.shard.SingleShardRequest;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -157,7 +158,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
this.id = other.id(); this.id = other.id();
this.type = other.type(); this.type = other.type();
if (this.doc != null) { if (this.doc != null) {
this.doc = other.doc().copyBytesArray(); this.doc = new BytesArray(other.doc().toBytesRef(), true);
} }
this.flagsEnum = other.getFlags().clone(); this.flagsEnum = other.getFlags().clone();
this.preference = other.preference(); this.preference = other.preference();

View File

@ -143,8 +143,8 @@ public class TermVectorsResponse extends ActionResponse implements ToXContent {
public Fields getFields() throws IOException { public Fields getFields() throws IOException {
if (hasTermVectors() && isExists()) { if (hasTermVectors() && isExists()) {
if (!sourceCopied) { // make the bytes safe if (!sourceCopied) { // make the bytes safe
headerRef = headerRef.copyBytesArray(); headerRef = new BytesArray(headerRef.toBytesRef(), true);
termVectors = termVectors.copyBytesArray(); termVectors = new BytesArray(termVectors.toBytesRef(), true);
} }
TermVectorsFields termVectorsFields = new TermVectorsFields(headerRef, termVectors); TermVectorsFields termVectorsFields = new TermVectorsFields(headerRef, termVectors);
hasScores = termVectorsFields.hasScores; hasScores = termVectorsFields.hasScores;

View File

@ -42,6 +42,7 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -702,7 +703,7 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
public static byte[] toBytes(ClusterState state) throws IOException { public static byte[] toBytes(ClusterState state) throws IOException {
BytesStreamOutput os = new BytesStreamOutput(); BytesStreamOutput os = new BytesStreamOutput();
state.writeTo(os); state.writeTo(os);
return os.bytes().toBytes(); return BytesReference.toBytes(os.bytes());
} }
/** /**
@ -711,6 +712,7 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
*/ */
public static ClusterState fromBytes(byte[] data, DiscoveryNode localNode) throws IOException { public static ClusterState fromBytes(byte[] data, DiscoveryNode localNode) throws IOException {
return readFrom(StreamInput.wrap(data), localNode); return readFrom(StreamInput.wrap(data), localNode);
} }
/** /**

View File

@ -27,13 +27,12 @@ import java.io.OutputStream;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
public final class BytesArray implements BytesReference { public final class BytesArray extends BytesReference {
public static final BytesArray EMPTY = new BytesArray(BytesRef.EMPTY_BYTES, 0, 0); public static final BytesArray EMPTY = new BytesArray(BytesRef.EMPTY_BYTES, 0, 0);
private final byte[] bytes;
private byte[] bytes; private final int offset;
private int offset; private final int length;
private int length;
public BytesArray(String bytes) { public BytesArray(String bytes) {
BytesRef bytesRef = new BytesRef(bytes); BytesRef bytesRef = new BytesRef(bytes);
@ -89,74 +88,21 @@ public final class BytesArray implements BytesReference {
return new BytesArray(bytes, offset + from, length); return new BytesArray(bytes, offset + from, length);
} }
@Override
public StreamInput streamInput() {
return StreamInput.wrap(bytes, offset, length);
}
@Override
public void writeTo(OutputStream os) throws IOException {
os.write(bytes, offset, length);
}
@Override
public byte[] toBytes() {
if (offset == 0 && bytes.length == length) {
return bytes;
}
return Arrays.copyOfRange(bytes, offset, offset + length);
}
@Override
public BytesArray toBytesArray() {
return this;
}
@Override
public BytesArray copyBytesArray() {
return new BytesArray(Arrays.copyOfRange(bytes, offset, offset + length));
}
@Override
public boolean hasArray() {
return true;
}
@Override
public byte[] array() { public byte[] array() {
return bytes; return bytes;
} }
@Override public int offset() {
public int arrayOffset() {
return offset; return offset;
} }
@Override
public String toUtf8() {
if (length == 0) {
return "";
}
return new String(bytes, offset, length, StandardCharsets.UTF_8);
}
@Override @Override
public BytesRef toBytesRef() { public BytesRef toBytesRef() {
return new BytesRef(bytes, offset, length); return new BytesRef(bytes, offset, length);
} }
@Override @Override
public BytesRef copyBytesRef() { public long ramBytesUsed() {
return new BytesRef(Arrays.copyOfRange(bytes, offset, offset + length)); return bytes.length;
}
@Override
public int hashCode() {
return Helper.bytesHashCode(this);
}
@Override
public boolean equals(Object obj) {
return Helper.bytesEqual(this, (BytesReference) obj);
} }
} }

View File

@ -18,147 +18,74 @@
*/ */
package org.elasticsearch.common.bytes; package org.elasticsearch.common.bytes;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.BytesRefIterator;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.function.ToIntBiFunction;
/** /**
* A reference to bytes. * A reference to bytes.
*/ */
public interface BytesReference { public abstract class BytesReference implements Accountable, Comparable<BytesReference> {
class Helper { private Integer hash = null; // we cache the hash of this reference since it can be quite costly to re-calculated it
public static boolean bytesEqual(BytesReference a, BytesReference b) {
if (a == b) {
return true;
}
if (a.length() != b.length()) {
return false;
}
return bytesEquals(a, b);
}
// pkg-private for testing
static boolean bytesEquals(BytesReference a, BytesReference b) {
assert a.length() == b.length();
for (int i = 0, end = a.length(); i < end; ++i) {
if (a.get(i) != b.get(i)) {
return false;
}
}
return true;
}
public static int bytesHashCode(BytesReference a) {
if (a.hasArray()) {
return hashCode(a.array(), a.arrayOffset(), a.length());
} else {
return slowHashCode(a);
}
}
// pkg-private for testing
static int hashCode(byte[] array, int offset, int length) {
int result = 1;
for (int i = offset, end = offset + length; i < end; ++i) {
result = 31 * result + array[i];
}
return result;
}
// pkg-private for testing
static int slowHashCode(BytesReference a) {
int result = 1;
for (int i = 0, end = a.length(); i < end; ++i) {
result = 31 * result + a.get(i);
}
return result;
}
}
/** /**
* Returns the byte at the specified index. Need to be between 0 and length. * Returns the byte at the specified index. Need to be between 0 and length.
*/ */
byte get(int index); public abstract byte get(int index);
/** /**
* The length. * The length.
*/ */
int length(); public abstract int length();
/** /**
* Slice the bytes from the <tt>from</tt> index up to <tt>length</tt>. * Slice the bytes from the <tt>from</tt> index up to <tt>length</tt>.
*/ */
BytesReference slice(int from, int length); public abstract BytesReference slice(int from, int length);
/** /**
* A stream input of the bytes. * A stream input of the bytes.
*/ */
StreamInput streamInput(); public StreamInput streamInput() {
BytesRef ref = toBytesRef();
return StreamInput.wrap(ref.bytes, ref.offset, ref.length);
}
/** /**
* Writes the bytes directly to the output stream. * Writes the bytes directly to the output stream.
*/ */
void writeTo(OutputStream os) throws IOException; public void writeTo(OutputStream os) throws IOException {
final BytesRefIterator iterator = iterator();
BytesRef ref;
while ((ref = iterator.next()) != null) {
os.write(ref.bytes, ref.offset, ref.length);
}
}
/** /**
* Returns the bytes as a single byte array. * Interprets the referenced bytes as UTF8 bytes, returning the resulting string
*/ */
byte[] toBytes(); public String utf8ToString() {
return toBytesRef().utf8ToString();
/** }
* Returns the bytes as a byte array, possibly sharing the underlying byte buffer.
*/
BytesArray toBytesArray();
/**
* Returns the bytes copied over as a byte array.
*/
BytesArray copyBytesArray();
/**
* Is there an underlying byte array for this bytes reference.
*/
boolean hasArray();
/**
* The underlying byte array (if exists).
*/
byte[] array();
/**
* The offset into the underlying byte array.
*/
int arrayOffset();
/**
* Converts to a string based on utf8.
*/
String toUtf8();
/** /**
* Converts to Lucene BytesRef. * Converts to Lucene BytesRef.
*/ */
BytesRef toBytesRef(); public abstract BytesRef toBytesRef();
/**
* Converts to a copied Lucene BytesRef.
*/
BytesRef copyBytesRef();
/** /**
* Returns a BytesRefIterator for this BytesReference. This method allows * Returns a BytesRefIterator for this BytesReference. This method allows
* access to the internal pages of this reference without copying them. Use with care! * access to the internal pages of this reference without copying them. Use with care!
* @see BytesRefIterator * @see BytesRefIterator
*/ */
default BytesRefIterator iterator() { public BytesRefIterator iterator() {
return new BytesRefIterator() { return new BytesRefIterator() {
BytesRef ref = length() == 0 ? null : toBytesRef(); BytesRef ref = length() == 0 ? null : toBytesRef();
@Override @Override
@ -170,4 +97,115 @@ public interface BytesReference {
}; };
} }
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof BytesReference) {
final BytesReference otherRef = (BytesReference) other;
if (length() != otherRef.length()) {
return false;
}
return compareIterators(this, otherRef, (a, b) ->
a.bytesEquals(b) ? 0 : 1 // this is a call to BytesRef#bytesEquals - this method is the hot one in the comparison
) == 0;
}
return false;
}
@Override
public int hashCode() {
if (hash == null) {
final BytesRefIterator iterator = iterator();
BytesRef ref;
int result = 1;
try {
while ((ref = iterator.next()) != null) {
for (int i = 0; i < ref.length; i++) {
result = 31 * result + ref.bytes[ref.offset + i];
}
}
} catch (IOException ex) {
throw new AssertionError("wont happen", ex);
}
return hash = result;
} else {
return hash.intValue();
}
}
/**
* Returns a compact array from the given BytesReference. The returned array won't be copied unless necessary. If you need
* to modify the returned array use <tt>BytesRef.deepCopyOf(reference.toBytesRef()</tt> instead
*/
public static byte[] toBytes(BytesReference reference) {
final BytesRef bytesRef = reference.toBytesRef();
if (bytesRef.offset == 0 && bytesRef.length == bytesRef.bytes.length) {
return bytesRef.bytes;
}
return BytesRef.deepCopyOf(bytesRef).bytes;
}
@Override
public int compareTo(final BytesReference other) {
return compareIterators(this, other, (a, b) -> a.compareTo(b));
}
/**
* Compares the two references using the given int function.
*/
private static final int compareIterators(final BytesReference a, final BytesReference b, final ToIntBiFunction<BytesRef, BytesRef> f) {
try {
// we use the iterators since it's a 0-copy comparison where possible!
final long lengthToCompare = Math.min(a.length(), b.length());
final BytesRefIterator aIter = a.iterator();
final BytesRefIterator bIter = b.iterator();
BytesRef aRef = aIter.next();
BytesRef bRef = bIter.next();
if (aRef != null && bRef != null) { // do we have any data?
aRef = aRef.clone(); // we clone since we modify the offsets and length in the iteration below
bRef = bRef.clone();
if (aRef.length == a.length() && bRef.length == b.length()) { // is it only one array slice we are comparing?
return f.applyAsInt(aRef, bRef);
} else {
for (int i = 0; i < lengthToCompare;) {
if (aRef.length == 0) {
aRef = aIter.next().clone(); // must be non null otherwise we have a bug
}
if (bRef.length == 0) {
bRef = bIter.next().clone(); // must be non null otherwise we have a bug
}
final int aLength = aRef.length;
final int bLength = bRef.length;
final int length = Math.min(aLength, bLength); // shrink to the same length and use the fast compare in lucene
aRef.length = bRef.length = length;
// now we move to the fast comparison - this is the hot part of the loop
int diff = f.applyAsInt(aRef, bRef);
aRef.length = aLength;
bRef.length = bLength;
if (diff != 0) {
return diff;
}
advance(aRef, length);
advance(bRef, length);
i += length;
}
}
}
// One is a prefix of the other, or, they are equal:
return a.length() - b.length();
} catch (IOException ex) {
throw new AssertionError("can not happen", ex);
}
}
private static final void advance(final BytesRef ref, final int length) {
assert ref.length >= length : " ref.length: " + ref.length + " length: " + length;
assert ref.offset+length < ref.bytes.length || (ref.offset+length == ref.bytes.length && ref.length-length == 0)
: "offset: " + ref.offset + " ref.bytes.length: " + ref.bytes.length + " length: " + length + " ref.length: " + ref.length;
ref.length -= length;
ref.offset += length;
}
} }

View File

@ -21,7 +21,6 @@ package org.elasticsearch.common.bytes;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.common.util.ByteArray;
@ -35,7 +34,7 @@ import java.util.Arrays;
* A page based bytes reference, internally holding the bytes in a paged * A page based bytes reference, internally holding the bytes in a paged
* data structure. * data structure.
*/ */
public class PagedBytesReference implements BytesReference { public class PagedBytesReference extends BytesReference {
private static final int PAGE_SIZE = BigArrays.BYTE_PAGE_SIZE; private static final int PAGE_SIZE = BigArrays.BYTE_PAGE_SIZE;
@ -80,119 +79,6 @@ public class PagedBytesReference implements BytesReference {
return new PagedBytesReferenceStreamInput(bytearray, offset, length); return new PagedBytesReferenceStreamInput(bytearray, offset, length);
} }
@Override
public void writeTo(OutputStream os) throws IOException {
// nothing to do
if (length == 0) {
return;
}
BytesRef ref = new BytesRef();
int written = 0;
// are we a slice?
if (offset != 0) {
// remaining size of page fragment at offset
int fragmentSize = Math.min(length, PAGE_SIZE - (offset % PAGE_SIZE));
bytearray.get(offset, fragmentSize, ref);
os.write(ref.bytes, ref.offset, fragmentSize);
written += fragmentSize;
}
// handle remainder of pages + trailing fragment
while (written < length) {
int remaining = length - written;
int bulkSize = (remaining > PAGE_SIZE) ? PAGE_SIZE : remaining;
bytearray.get(offset + written, bulkSize, ref);
os.write(ref.bytes, ref.offset, bulkSize);
written += bulkSize;
}
}
@Override
public byte[] toBytes() {
if (length == 0) {
return BytesRef.EMPTY_BYTES;
}
BytesRef ref = new BytesRef();
bytearray.get(offset, length, ref);
// undo the single-page optimization by ByteArray.get(), otherwise
// a materialized stream will contain trailing garbage/zeros
byte[] result = ref.bytes;
if (result.length != length || ref.offset != 0) {
result = Arrays.copyOfRange(result, ref.offset, ref.offset + length);
}
return result;
}
@Override
public BytesArray toBytesArray() {
BytesRef ref = new BytesRef();
bytearray.get(offset, length, ref);
return new BytesArray(ref);
}
@Override
public BytesArray copyBytesArray() {
BytesRef ref = new BytesRef();
boolean copied = bytearray.get(offset, length, ref);
if (copied) {
// BigArray has materialized for us, no need to do it again
return new BytesArray(ref.bytes, ref.offset, ref.length);
} else {
// here we need to copy the bytes even when shared
byte[] copy = Arrays.copyOfRange(ref.bytes, ref.offset, ref.offset + ref.length);
return new BytesArray(copy);
}
}
@Override
public boolean hasArray() {
return (offset + length <= PAGE_SIZE);
}
@Override
public byte[] array() {
if (hasArray()) {
if (length == 0) {
return BytesRef.EMPTY_BYTES;
}
BytesRef ref = new BytesRef();
bytearray.get(offset, length, ref);
return ref.bytes;
}
throw new IllegalStateException("array not available");
}
@Override
public int arrayOffset() {
if (hasArray()) {
BytesRef ref = new BytesRef();
bytearray.get(offset, length, ref);
return ref.offset;
}
throw new IllegalStateException("array not available");
}
@Override
public String toUtf8() {
if (length() == 0) {
return "";
}
byte[] bytes = toBytes();
final CharsRefBuilder ref = new CharsRefBuilder();
ref.copyUTF8Bytes(bytes, offset, length);
return ref.toString();
}
@Override @Override
public BytesRef toBytesRef() { public BytesRef toBytesRef() {
BytesRef bref = new BytesRef(); BytesRef bref = new BytesRef();
@ -201,61 +87,6 @@ public class PagedBytesReference implements BytesReference {
return bref; return bref;
} }
@Override
public BytesRef copyBytesRef() {
byte[] bytes = toBytes();
return new BytesRef(bytes, offset, length);
}
@Override
public int hashCode() {
if (hash == 0) {
// TODO: delegate to BigArrays via:
// hash = bigarrays.hashCode(bytearray);
// and for slices:
// hash = bigarrays.hashCode(bytearray, offset, length);
int tmphash = 1;
for (int i = 0; i < length; i++) {
tmphash = 31 * tmphash + bytearray.get(offset + i);
}
hash = tmphash;
}
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof PagedBytesReference)) {
return BytesReference.Helper.bytesEqual(this, (BytesReference) obj);
}
PagedBytesReference other = (PagedBytesReference) obj;
if (length != other.length) {
return false;
}
// TODO: delegate to BigArrays via:
// return bigarrays.equals(bytearray, other.bytearray);
// and for slices:
// return bigarrays.equals(bytearray, start, other.bytearray, otherstart, len);
ByteArray otherArray = other.bytearray;
int otherOffset = other.offset;
for (int i = 0; i < length; i++) {
if (bytearray.get(offset + i) != otherArray.get(otherOffset + i)) {
return false;
}
}
return true;
}
private static class PagedBytesReferenceStreamInput extends StreamInput { private static class PagedBytesReferenceStreamInput extends StreamInput {
private final ByteArray bytearray; private final ByteArray bytearray;
@ -390,4 +221,9 @@ public class PagedBytesReference implements BytesReference {
} }
}; };
} }
@Override
public long ramBytesUsed() {
return bytearray.ramBytesUsed();
}
} }

View File

@ -88,7 +88,7 @@ public final class CompressedXContent {
xcontent.toXContent(builder, params); xcontent.toXContent(builder, params);
builder.endObject(); builder.endObject();
} }
this.bytes = bStream.bytes().toBytes(); this.bytes = BytesReference.toBytes(bStream.bytes());
this.crc32 = (int) crc32.getValue(); this.crc32 = (int) crc32.getValue();
assertConsistent(); assertConsistent();
} }
@ -101,14 +101,14 @@ public final class CompressedXContent {
Compressor compressor = CompressorFactory.compressor(data); Compressor compressor = CompressorFactory.compressor(data);
if (compressor != null) { if (compressor != null) {
// already compressed... // already compressed...
this.bytes = data.toBytes(); this.bytes = BytesReference.toBytes(data);
this.crc32 = crc32(new BytesArray(uncompressed())); this.crc32 = crc32(new BytesArray(uncompressed()));
} else { } else {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
try (OutputStream compressedOutput = CompressorFactory.COMPRESSOR.streamOutput(out)) { try (OutputStream compressedOutput = CompressorFactory.COMPRESSOR.streamOutput(out)) {
data.writeTo(compressedOutput); data.writeTo(compressedOutput);
} }
this.bytes = out.bytes().toBytes(); this.bytes = BytesReference.toBytes(out.bytes());
this.crc32 = crc32(data); this.crc32 = crc32(data);
} }
assertConsistent(); assertConsistent();
@ -140,7 +140,7 @@ public final class CompressedXContent {
/** Return the uncompressed bytes. */ /** Return the uncompressed bytes. */
public byte[] uncompressed() { public byte[] uncompressed() {
try { try {
return CompressorFactory.uncompress(new BytesArray(bytes)).toBytes(); return BytesReference.toBytes(CompressorFactory.uncompress(new BytesArray(bytes)));
} catch (IOException e) { } catch (IOException e) {
throw new IllegalStateException("Cannot decompress compressed string", e); throw new IllegalStateException("Cannot decompress compressed string", e);
} }

View File

@ -805,13 +805,6 @@ public abstract class StreamInput extends InputStream {
return builder; return builder;
} }
public static StreamInput wrap(BytesReference reference) {
if (reference.hasArray() == false) {
reference = reference.toBytesArray();
}
return wrap(reference.array(), reference.arrayOffset(), reference.length());
}
public static StreamInput wrap(byte[] bytes) { public static StreamInput wrap(byte[] bytes) {
return wrap(bytes, 0, bytes.length); return wrap(bytes, 0, bytes.length);
} }

View File

@ -82,13 +82,7 @@ public final class Text implements Comparable<Text> {
* Returns a {@link String} view of the data. * Returns a {@link String} view of the data.
*/ */
public String string() { public String string() {
if (text == null) { return text == null ? bytes.utf8ToString() : text;
if (!bytes.hasArray()) {
bytes = bytes.toBytesArray();
}
text = new String(bytes.array(), bytes.arrayOffset(), bytes.length(), StandardCharsets.UTF_8);
}
return text;
} }
@Override @Override
@ -114,6 +108,6 @@ public final class Text implements Comparable<Text> {
@Override @Override
public int compareTo(Text text) { public int compareTo(Text text) {
return UTF8SortedAsUnicodeComparator.utf8SortedAsUnicodeSortOrder.compare(bytes(), text.bytes()); return bytes().compareTo(text.bytes());
} }
} }

View File

@ -1,77 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.text;
import org.elasticsearch.common.bytes.BytesReference;
import java.util.Comparator;
// LUCENE 4 UPGRADE: Is this the right way of comparing bytesreferences inside Text instances?
// Copied from Lucene's BytesRef comparator
public class UTF8SortedAsUnicodeComparator implements Comparator<BytesReference> {
public final static Comparator<BytesReference> utf8SortedAsUnicodeSortOrder = new UTF8SortedAsUnicodeComparator();
// Only singleton
private UTF8SortedAsUnicodeComparator() {
}
@Override
public int compare(BytesReference a, BytesReference b) {
if (a.hasArray() && b.hasArray()) {
final byte[] aBytes = a.array();
int aUpto = a.arrayOffset();
final byte[] bBytes = b.array();
int bUpto = b.arrayOffset();
final int aStop = aUpto + Math.min(a.length(), b.length());
while (aUpto < aStop) {
int aByte = aBytes[aUpto++] & 0xff;
int bByte = bBytes[bUpto++] & 0xff;
int diff = aByte - bByte;
if (diff != 0) {
return diff;
}
}
// One is a prefix of the other, or, they are equal:
return a.length() - b.length();
} else {
final byte[] aBytes = a.toBytes();
int aUpto = 0;
final byte[] bBytes = b.toBytes();
int bUpto = 0;
final int aStop = aUpto + Math.min(a.length(), b.length());
while (aUpto < aStop) {
int aByte = aBytes[aUpto++] & 0xff;
int bByte = bBytes[bUpto++] & 0xff;
int diff = aByte - bByte;
if (diff != 0) {
return diff;
}
}
// One is a prefix of the other, or, they are equal:
return a.length() - b.length();
}
}
}

View File

@ -24,7 +24,6 @@ import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.RoundingMode; import java.math.RoundingMode;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Calendar; import java.util.Calendar;
import java.util.Collections; import java.util.Collections;
@ -35,7 +34,6 @@ import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.BytesStream; import org.elasticsearch.common.io.BytesStream;
@ -323,10 +321,8 @@ public final class XContentBuilder implements BytesStream, Releasable {
*/ */
public XContentBuilder field(String name, BytesReference value) throws IOException { public XContentBuilder field(String name, BytesReference value) throws IOException {
field(name); field(name);
if (!value.hasArray()) { final BytesRef ref = value.toBytesRef();
value = value.toBytesArray(); generator.writeBinary(ref.bytes, ref.offset, ref.length);
}
generator.writeBinary(value.array(), value.arrayOffset(), value.length());
return this; return this;
} }
@ -342,17 +338,13 @@ public final class XContentBuilder implements BytesStream, Releasable {
public XContentBuilder field(String name, Text value) throws IOException { public XContentBuilder field(String name, Text value) throws IOException {
field(name); field(name);
if (value.hasBytes() && value.bytes().hasArray()) {
generator.writeUTF8String(value.bytes().array(), value.bytes().arrayOffset(), value.bytes().length());
return this;
}
if (value.hasString()) { if (value.hasString()) {
generator.writeString(value.string()); generator.writeString(value.string());
return this; } else {
}
// TODO: TextBytesOptimization we can use a buffer here to convert it? maybe add a request to jackson to support InputStream as well? // TODO: TextBytesOptimization we can use a buffer here to convert it? maybe add a request to jackson to support InputStream as well?
BytesArray bytesArray = value.bytes().toBytesArray(); final BytesRef ref = value.bytes().toBytesRef();
generator.writeUTF8String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length()); generator.writeUTF8String(ref.bytes, ref.offset, ref.length);
}
return this; return this;
} }
@ -701,27 +693,20 @@ public final class XContentBuilder implements BytesStream, Releasable {
if (value == null) { if (value == null) {
return nullValue(); return nullValue();
} }
if (!value.hasArray()) { BytesRef ref = value.toBytesRef();
value = value.toBytesArray(); generator.writeBinary(ref.bytes, ref.offset, ref.length);
}
generator.writeBinary(value.array(), value.arrayOffset(), value.length());
return this; return this;
} }
public XContentBuilder value(Text value) throws IOException { public XContentBuilder value(Text value) throws IOException {
if (value == null) { if (value == null) {
return nullValue(); return nullValue();
} } else if (value.hasString()) {
if (value.hasBytes() && value.bytes().hasArray()) {
generator.writeUTF8String(value.bytes().array(), value.bytes().arrayOffset(), value.bytes().length());
return this;
}
if (value.hasString()) {
generator.writeString(value.string()); generator.writeString(value.string());
return this; } else {
BytesRef bytesRef = value.bytes().toBytesRef();
generator.writeUTF8String(bytesRef.bytes, bytesRef.offset, bytesRef.length);
} }
BytesArray bytesArray = value.bytes().toBytesArray();
generator.writeUTF8String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length());
return this; return this;
} }
@ -800,8 +785,7 @@ public final class XContentBuilder implements BytesStream, Releasable {
*/ */
public String string() throws IOException { public String string() throws IOException {
close(); close();
BytesArray bytesArray = bytes().toBytesArray(); return bytes().utf8ToString();
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), StandardCharsets.UTF_8);
} }
@ -885,13 +869,11 @@ public final class XContentBuilder implements BytesStream, Releasable {
}); });
map.put(Text.class, (g, v) -> { map.put(Text.class, (g, v) -> {
Text text = (Text) v; Text text = (Text) v;
if (text.hasBytes() && text.bytes().hasArray()) { if (text.hasString()) {
g.writeUTF8String(text.bytes().array(), text.bytes().arrayOffset(), text.bytes().length());
} else if (text.hasString()) {
g.writeString(text.string()); g.writeString(text.string());
} else { } else {
BytesArray bytesArray = text.bytes().toBytesArray(); BytesRef ref = text.bytes().toBytesRef();
g.writeUTF8String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length()); g.writeUTF8String(ref.bytes, ref.offset, ref.length);
} }
}); });
MAP = Collections.unmodifiableMap(map); MAP = Collections.unmodifiableMap(map);
@ -934,11 +916,8 @@ public final class XContentBuilder implements BytesStream, Releasable {
} }
private void writeBytesReference(BytesReference value) throws IOException { private void writeBytesReference(BytesReference value) throws IOException {
BytesReference bytes = value; BytesRef ref = value.toBytesRef();
if (!bytes.hasArray()) { generator.writeBinary(ref.bytes, ref.offset, ref.length);
bytes = bytes.toBytesArray();
}
generator.writeBinary(bytes.array(), bytes.arrayOffset(), bytes.length());
} }
private void writeIterable(Iterable<?> value) throws IOException { private void writeIterable(Iterable<?> value) throws IOException {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.common.xcontent;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.Compressor;
@ -31,7 +30,6 @@ import org.elasticsearch.common.xcontent.ToXContent.Params;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
@ -93,13 +91,9 @@ public class XContentHelper {
} }
public static String convertToJson(BytesReference bytes, boolean reformatJson, boolean prettyPrint) throws IOException { public static String convertToJson(BytesReference bytes, boolean reformatJson, boolean prettyPrint) throws IOException {
if (bytes.hasArray()) {
return convertToJson(bytes.array(), bytes.arrayOffset(), bytes.length(), reformatJson, prettyPrint);
}
XContentType xContentType = XContentFactory.xContentType(bytes); XContentType xContentType = XContentFactory.xContentType(bytes);
if (xContentType == XContentType.JSON && !reformatJson) { if (xContentType == XContentType.JSON && !reformatJson) {
BytesArray bytesArray = bytes.toBytesArray(); return bytes.utf8ToString();
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), StandardCharsets.UTF_8);
} }
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(bytes.streamInput())) { try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(bytes.streamInput())) {
parser.nextToken(); parser.nextToken();
@ -112,26 +106,6 @@ public class XContentHelper {
} }
} }
public static String convertToJson(byte[] data, int offset, int length, boolean reformatJson) throws IOException {
return convertToJson(data, offset, length, reformatJson, false);
}
public static String convertToJson(byte[] data, int offset, int length, boolean reformatJson, boolean prettyPrint) throws IOException {
XContentType xContentType = XContentFactory.xContentType(data, offset, length);
if (xContentType == XContentType.JSON && !reformatJson) {
return new String(data, offset, length, StandardCharsets.UTF_8);
}
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(data, offset, length)) {
parser.nextToken();
XContentBuilder builder = XContentFactory.jsonBuilder();
if (prettyPrint) {
builder.prettyPrint();
}
builder.copyCurrentStructure(parser);
return builder.string();
}
}
/** /**
* Writes serialized toXContent to pretty-printed JSON string. * Writes serialized toXContent to pretty-printed JSON string.
* *

View File

@ -96,9 +96,6 @@ public class CborXContent implements XContent {
@Override @Override
public XContentParser createParser(BytesReference bytes) throws IOException { public XContentParser createParser(BytesReference bytes) throws IOException {
if (bytes.hasArray()) {
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
}
return createParser(bytes.streamInput()); return createParser(bytes.streamInput());
} }

View File

@ -118,9 +118,6 @@ public class JsonXContent implements XContent {
@Override @Override
public XContentParser createParser(BytesReference bytes) throws IOException { public XContentParser createParser(BytesReference bytes) throws IOException {
if (bytes.hasArray()) {
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
}
return createParser(bytes.streamInput()); return createParser(bytes.streamInput());
} }

View File

@ -28,6 +28,7 @@ import com.fasterxml.jackson.core.util.DefaultIndenter;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -70,10 +71,6 @@ public class JsonXContentGenerator implements XContentGenerator {
private static final DefaultPrettyPrinter.Indenter INDENTER = new DefaultIndenter(" ", LF.getValue()); private static final DefaultPrettyPrinter.Indenter INDENTER = new DefaultIndenter(" ", LF.getValue());
private boolean prettyPrint = false; private boolean prettyPrint = false;
public JsonXContentGenerator(JsonGenerator jsonGenerator, OutputStream os, String... filters) {
this(jsonGenerator, os, filters, true);
}
public JsonXContentGenerator(JsonGenerator jsonGenerator, OutputStream os, String[] filters, boolean inclusive) { public JsonXContentGenerator(JsonGenerator jsonGenerator, OutputStream os, String[] filters, boolean inclusive) {
if (jsonGenerator instanceof GeneratorBase) { if (jsonGenerator instanceof GeneratorBase) {
this.base = (GeneratorBase) jsonGenerator; this.base = (GeneratorBase) jsonGenerator;
@ -355,18 +352,9 @@ public class JsonXContentGenerator implements XContentGenerator {
} }
protected void copyRawValue(BytesReference content, XContent xContent) throws IOException { protected void copyRawValue(BytesReference content, XContent xContent) throws IOException {
XContentParser parser = null; try (StreamInput input = content.streamInput();
try { XContentParser parser = xContent.createParser(input)) {
if (content.hasArray()) {
parser = xContent.createParser(content.array(), content.arrayOffset(), content.length());
} else {
parser = xContent.createParser(content.streamInput());
}
copyCurrentStructure(parser); copyCurrentStructure(parser);
} finally {
if (parser != null) {
parser.close();
}
} }
} }

View File

@ -97,9 +97,6 @@ public class SmileXContent implements XContent {
@Override @Override
public XContentParser createParser(BytesReference bytes) throws IOException { public XContentParser createParser(BytesReference bytes) throws IOException {
if (bytes.hasArray()) {
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
}
return createParser(bytes.streamInput()); return createParser(bytes.streamInput());
} }

View File

@ -92,9 +92,6 @@ public class YamlXContent implements XContent {
@Override @Override
public XContentParser createParser(BytesReference bytes) throws IOException { public XContentParser createParser(BytesReference bytes) throws IOException {
if (bytes.hasArray()) {
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
}
return createParser(bytes.streamInput()); return createParser(bytes.streamInput());
} }

View File

@ -31,6 +31,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.RoutingService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -318,7 +319,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
Diff diff = clusterState.diff(clusterChangedEvent.previousState()); Diff diff = clusterState.diff(clusterChangedEvent.previousState());
BytesStreamOutput os = new BytesStreamOutput(); BytesStreamOutput os = new BytesStreamOutput();
diff.writeTo(os); diff.writeTo(os);
clusterStateDiffBytes = os.bytes().toBytes(); clusterStateDiffBytes = BytesReference.toBytes(os.bytes());
} }
try { try {
newNodeSpecificClusterState = discovery.lastProcessedClusterState.readDiffFrom(StreamInput.wrap(clusterStateDiffBytes)).apply(discovery.lastProcessedClusterState); newNodeSpecificClusterState = discovery.lastProcessedClusterState.readDiffFrom(StreamInput.wrap(clusterStateDiffBytes)).apply(discovery.lastProcessedClusterState);

View File

@ -116,7 +116,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContent {
if (sourceAsBytes != null) { if (sourceAsBytes != null) {
return sourceAsBytes; return sourceAsBytes;
} }
this.sourceAsBytes = sourceRef().toBytes(); this.sourceAsBytes = BytesReference.toBytes(sourceRef());
return this.sourceAsBytes; return this.sourceAsBytes;
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import java.util.Objects; import java.util.Objects;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
@ -62,7 +63,7 @@ public class SourceToParse {
this.id = Objects.requireNonNull(id); this.id = Objects.requireNonNull(id);
// we always convert back to byte array, since we store it and Field only supports bytes.. // we always convert back to byte array, since we store it and Field only supports bytes..
// so, we might as well do it here, and improve the performance of working with direct byte arrays // so, we might as well do it here, and improve the performance of working with direct byte arrays
this.source = source.toBytesArray(); this.source = new BytesArray(source.toBytesRef());
} }
public Origin origin() { public Origin origin() {

View File

@ -23,6 +23,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
@ -256,10 +257,8 @@ public class SourceFieldMapper extends MetadataFieldMapper {
source = bStream.bytes(); source = bStream.bytes();
} }
if (!source.hasArray()) { BytesRef ref = source.toBytesRef();
source = source.toBytesArray(); fields.add(new StoredField(fieldType().name(), ref.bytes, ref.offset, ref.length));
}
fields.add(new StoredField(fieldType().name(), source.array(), source.arrayOffset(), source.length()));
} }
@Override @Override

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -85,7 +86,7 @@ public class WrapperQueryBuilder extends AbstractQueryBuilder<WrapperQueryBuilde
if (source == null || source.length() == 0) { if (source == null || source.length() == 0) {
throw new IllegalArgumentException("query source text cannot be null or empty"); throw new IllegalArgumentException("query source text cannot be null or empty");
} }
this.source = source.array(); this.source = BytesRef.deepCopyOf(source.toBytesRef()).bytes;
} }
/** /**

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.shard; package org.elasticsearch.index.shard;
import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -116,7 +117,7 @@ public class CommitPoints implements Iterable<CommitPoint> {
builder.endObject(); builder.endObject();
builder.endObject(); builder.endObject();
return builder.bytes().toBytes(); return BytesReference.toBytes(builder.bytes());
} }
public static CommitPoint fromXContent(byte[] data) throws Exception { public static CommitPoint fromXContent(byte[] data) throws Exception {

View File

@ -1312,7 +1312,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
} }
out.flush(); out.flush();
if (corrupt != null) { if (corrupt != null) {
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); logger.warn("check index [failure]\n{}", os.bytes().utf8ToString());
throw corrupt; throw corrupt;
} }
} else { } else {
@ -1327,7 +1327,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
// ignore if closed.... // ignore if closed....
return; return;
} }
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); logger.warn("check index [failure]\n{}", os.bytes().utf8ToString());
if ("fix".equals(checkIndexOnStartup)) { if ("fix".equals(checkIndexOnStartup)) {
if (logger.isDebugEnabled()) { if (logger.isDebugEnabled()) {
logger.debug("fixing index, writing new segments file ..."); logger.debug("fixing index, writing new segments file ...");
@ -1345,7 +1345,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
} }
if (logger.isDebugEnabled()) { if (logger.isDebugEnabled()) {
logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); logger.debug("check index [success]\n{}", os.bytes().utf8ToString());
} }
recoveryState.getVerifyIndex().checkIndexTime(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - timeNS))); recoveryState.getVerifyIndex().checkIndexTime(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - timeNS)));

View File

@ -41,6 +41,7 @@ import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.Lock; import org.apache.lucene.store.Lock;
import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
@ -1339,7 +1340,8 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
out.writeThrowable(exception); out.writeThrowable(exception);
BytesReference bytes = out.bytes(); BytesReference bytes = out.bytes();
output.writeVInt(bytes.length()); output.writeVInt(bytes.length());
output.writeBytes(bytes.array(), bytes.arrayOffset(), bytes.length()); BytesRef ref = bytes.toBytesRef();
output.writeBytes(ref.bytes, ref.offset, ref.length);
CodecUtil.writeFooter(output); CodecUtil.writeFooter(output);
} catch (IOException ex) { } catch (IOException ex) {
logger.warn("Can't mark store as corrupted", ex); logger.warn("Can't mark store as corrupted", ex);

View File

@ -1137,7 +1137,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService>
BytesReference statsRef = cacheShardLevelResult(shard, searcher.getDirectoryReader(), cacheKey, out -> { BytesReference statsRef = cacheShardLevelResult(shard, searcher.getDirectoryReader(), cacheKey, out -> {
out.writeOptionalWriteable(fieldType.stats(searcher.reader())); out.writeOptionalWriteable(fieldType.stats(searcher.reader()));
}); });
try (StreamInput in = StreamInput.wrap(statsRef)) { try (StreamInput in = statsRef.streamInput()) {
return in.readOptionalWriteable(FieldStats::readFrom); return in.readOptionalWriteable(FieldStats::readFrom);
} }
} }

View File

@ -24,6 +24,8 @@ import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
@ -378,10 +380,11 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget
} else { } else {
indexOutput = getOpenIndexOutput(name); indexOutput = getOpenIndexOutput(name);
} }
if (content.hasArray() == false) { BytesRefIterator iterator = content.iterator();
content = content.toBytesArray(); BytesRef scratch;
while((scratch = iterator.next()) != null) { // we iterate over all pages - this is a 0-copy for all core impls
indexOutput.writeBytes(scratch.bytes, scratch.offset, scratch.length);
} }
indexOutput.writeBytes(content.array(), content.arrayOffset(), content.length());
indexState.addRecoveredBytesToFile(name, content.length()); indexState.addRecoveredBytesToFile(name, content.length());
if (indexOutput.getFilePointer() >= fileMetaData.length() || lastChunk) { if (indexOutput.getFilePointer() >= fileMetaData.length() || lastChunk) {
try { try {

View File

@ -53,7 +53,7 @@ public class InternalTemplateService implements TemplateService {
ExecutableScript executableScript = scriptService.executable(compiledScript, model); ExecutableScript executableScript = scriptService.executable(compiledScript, model);
Object result = executableScript.run(); Object result = executableScript.run();
if (result instanceof BytesReference) { if (result instanceof BytesReference) {
return ((BytesReference) result).toUtf8(); return ((BytesReference) result).utf8ToString();
} }
return String.valueOf(result); return String.valueOf(result);
} }

View File

@ -50,7 +50,7 @@ public class RestPutRepositoryAction extends BaseRestHandler {
@Override @Override
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) { public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) {
PutRepositoryRequest putRepositoryRequest = putRepositoryRequest(request.param("repository")); PutRepositoryRequest putRepositoryRequest = putRepositoryRequest(request.param("repository"));
putRepositoryRequest.source(request.content().toUtf8()); putRepositoryRequest.source(request.content().utf8ToString());
putRepositoryRequest.verify(request.paramAsBoolean("verify", true)); putRepositoryRequest.verify(request.paramAsBoolean("verify", true));
putRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRepositoryRequest.masterNodeTimeout())); putRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRepositoryRequest.masterNodeTimeout()));
putRepositoryRequest.timeout(request.paramAsTime("timeout", putRepositoryRequest.timeout())); putRepositoryRequest.timeout(request.paramAsTime("timeout", putRepositoryRequest.timeout()));

View File

@ -49,7 +49,7 @@ public class RestCreateSnapshotAction extends BaseRestHandler {
@Override @Override
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) { public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) {
CreateSnapshotRequest createSnapshotRequest = createSnapshotRequest(request.param("repository"), request.param("snapshot")); CreateSnapshotRequest createSnapshotRequest = createSnapshotRequest(request.param("repository"), request.param("snapshot"));
createSnapshotRequest.source(request.content().toUtf8()); createSnapshotRequest.source(request.content().utf8ToString());
createSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createSnapshotRequest.masterNodeTimeout())); createSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createSnapshotRequest.masterNodeTimeout()));
createSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false)); createSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false));
client.admin().cluster().createSnapshot(createSnapshotRequest, new RestToXContentListener<CreateSnapshotResponse>(channel)); client.admin().cluster().createSnapshot(createSnapshotRequest, new RestToXContentListener<CreateSnapshotResponse>(channel));

View File

@ -49,7 +49,7 @@ public class RestRestoreSnapshotAction extends BaseRestHandler {
RestoreSnapshotRequest restoreSnapshotRequest = restoreSnapshotRequest(request.param("repository"), request.param("snapshot")); RestoreSnapshotRequest restoreSnapshotRequest = restoreSnapshotRequest(request.param("repository"), request.param("snapshot"));
restoreSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", restoreSnapshotRequest.masterNodeTimeout())); restoreSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", restoreSnapshotRequest.masterNodeTimeout()));
restoreSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false)); restoreSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false));
restoreSnapshotRequest.source(request.content().toUtf8()); restoreSnapshotRequest.source(request.content().utf8ToString());
client.admin().cluster().restoreSnapshot(restoreSnapshotRequest, new RestToXContentListener<RestoreSnapshotResponse>(channel)); client.admin().cluster().restoreSnapshot(restoreSnapshotRequest, new RestToXContentListener<RestoreSnapshotResponse>(channel));
} }
} }

View File

@ -87,7 +87,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
XContentType type = RestActions.guessBodyContentType(request); XContentType type = RestActions.guessBodyContentType(request);
if (type == null) { if (type == null) {
if (texts == null || texts.length == 0) { if (texts == null || texts.length == 0) {
texts = new String[]{ RestActions.getRestContent(request).toUtf8() }; texts = new String[]{ RestActions.getRestContent(request).utf8ToString() };
analyzeRequest.text(texts); analyzeRequest.text(texts);
} }
} else { } else {

View File

@ -71,7 +71,7 @@ public class RestPutMappingAction extends BaseRestHandler {
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) { public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) {
PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index")));
putMappingRequest.type(request.param("type")); putMappingRequest.type(request.param("type"));
putMappingRequest.source(request.content().toUtf8()); putMappingRequest.source(request.content().utf8ToString());
putMappingRequest.updateAllTypes(request.paramAsBoolean("update_all_types", false)); putMappingRequest.updateAllTypes(request.paramAsBoolean("update_all_types", false));
putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout())); putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout()));
putMappingRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putMappingRequest.masterNodeTimeout())); putMappingRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putMappingRequest.masterNodeTimeout()));

View File

@ -68,7 +68,7 @@ public class RestUpdateSettingsAction extends BaseRestHandler {
updateSettingsRequest.indicesOptions(IndicesOptions.fromRequest(request, updateSettingsRequest.indicesOptions())); updateSettingsRequest.indicesOptions(IndicesOptions.fromRequest(request, updateSettingsRequest.indicesOptions()));
Settings.Builder updateSettings = Settings.builder(); Settings.Builder updateSettings = Settings.builder();
String bodySettingsStr = request.content().toUtf8(); String bodySettingsStr = request.content().utf8ToString();
if (Strings.hasText(bodySettingsStr)) { if (Strings.hasText(bodySettingsStr)) {
Settings buildSettings = Settings.builder().loadFromSource(bodySettingsStr).build(); Settings buildSettings = Settings.builder().loadFromSource(bodySettingsStr).build();
for (Map.Entry<String, String> entry : buildSettings.getAsMap().entrySet()) { for (Map.Entry<String, String> entry : buildSettings.getAsMap().entrySet()) {

View File

@ -61,7 +61,7 @@ public class RestClearScrollAction extends BaseRestHandler {
if (RestActions.hasBodyContent(request)) { if (RestActions.hasBodyContent(request)) {
XContentType type = RestActions.guessBodyContentType(request); XContentType type = RestActions.guessBodyContentType(request);
if (type == null) { if (type == null) {
scrollIds = RestActions.getRestContent(request).toUtf8(); scrollIds = RestActions.getRestContent(request).utf8ToString();
clearRequest.setScrollIds(Arrays.asList(splitScrollIds(scrollIds))); clearRequest.setScrollIds(Arrays.asList(splitScrollIds(scrollIds)));
} else { } else {
// NOTE: if rest request with xcontent body has request parameters, these parameters does not override xcontent value // NOTE: if rest request with xcontent body has request parameters, these parameters does not override xcontent value

View File

@ -73,7 +73,7 @@ public class RestSearchScrollAction extends BaseRestHandler {
XContentType type = XContentFactory.xContentType(RestActions.getRestContent(request)); XContentType type = XContentFactory.xContentType(RestActions.getRestContent(request));
if (type == null) { if (type == null) {
if (scrollId == null) { if (scrollId == null) {
scrollId = RestActions.getRestContent(request).toUtf8(); scrollId = RestActions.getRestContent(request).utf8ToString();
searchScrollRequest.scrollId(scrollId); searchScrollRequest.scrollId(scrollId);
} }
} else { } else {

View File

@ -376,7 +376,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
"] lang [" + scriptLang + "]", e); "] lang [" + scriptLang + "]", e);
} }
} else { } else {
throw new IllegalArgumentException("Unable to find script in : " + scriptBytes.toUtf8()); throw new IllegalArgumentException("Unable to find script in : " + scriptBytes.utf8ToString());
} }
} catch (IOException e) { } catch (IOException e) {
throw new IllegalArgumentException("failed to parse template script", e); throw new IllegalArgumentException("failed to parse template script", e);

View File

@ -173,7 +173,7 @@ public class Template extends Script {
if (parser.currentToken() == XContentParser.Token.START_OBJECT) { if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
contentType = parser.contentType(); contentType = parser.contentType();
XContentBuilder builder = XContentFactory.contentBuilder(contentType); XContentBuilder builder = XContentFactory.contentBuilder(contentType);
return builder.copyCurrentStructure(parser).bytes().toUtf8(); return builder.copyCurrentStructure(parser).bytes().utf8ToString();
} else { } else {
return parser.text(); return parser.text();
} }

View File

@ -235,7 +235,7 @@ public class InternalSearchHit implements SearchHit {
if (sourceAsBytes != null) { if (sourceAsBytes != null) {
return sourceAsBytes; return sourceAsBytes;
} }
this.sourceAsBytes = sourceRef().toBytes(); this.sourceAsBytes = BytesReference.toBytes(sourceRef());
return this.sourceAsBytes; return this.sourceAsBytes;
} }

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
@ -212,7 +213,7 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
this.innerWriteTo(out, true); this.innerWriteTo(out, true);
// copy it over, most requests are small, we might as well copy to make sure we are not sliced... // copy it over, most requests are small, we might as well copy to make sure we are not sliced...
// we could potentially keep it without copying, but then pay the price of extra unused bytes up to a page // we could potentially keep it without copying, but then pay the price of extra unused bytes up to a page
return out.bytes().copyBytesArray(); return new BytesArray(out.bytes().toBytesRef(), true);// do a deep copy
} }
@Override @Override

View File

@ -92,15 +92,15 @@ public class CompletionSuggestion extends Suggest.Suggestion<CompletionSuggestio
} }
public String getPayloadAsString() { public String getPayloadAsString() {
return payload.toUtf8(); return payload.utf8ToString();
} }
public long getPayloadAsLong() { public long getPayloadAsLong() {
return Long.parseLong(payload.toUtf8()); return Long.parseLong(payload.utf8ToString());
} }
public double getPayloadAsDouble() { public double getPayloadAsDouble() {
return Double.parseDouble(payload.toUtf8()); return Double.parseDouble(payload.utf8ToString());
} }
public Map<String, Object> getPayloadAsMap() { public Map<String, Object> getPayloadAsMap() {
@ -119,7 +119,7 @@ public class CompletionSuggestion extends Suggest.Suggestion<CompletionSuggestio
XContentType contentType = XContentFactory.xContentType(payload); XContentType contentType = XContentFactory.xContentType(payload);
if (contentType == null) { if (contentType == null) {
// must be a string or number // must be a string or number
builder.field("payload", payload.toUtf8()); builder.field("payload", payload.utf8ToString());
} else { } else {
builder.rawField("payload", payload); builder.rawField("payload", payload);
} }

View File

@ -24,6 +24,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -226,7 +227,7 @@ public class LocalTransport extends AbstractLifecycleComponent<Transport> implem
throw new NodeNotConnectedException(node, "Node not connected"); throw new NodeNotConnectedException(node, "Node not connected");
} }
final byte[] data = stream.bytes().toBytes(); final byte[] data = BytesReference.toBytes(stream.bytes());
transportServiceAdapter.sent(data.length); transportServiceAdapter.sent(data.length);
transportServiceAdapter.onRequestSent(node, requestId, action, request, options); transportServiceAdapter.onRequestSent(node, requestId, action, request, options);
targetTransport.workers().execute(() -> { targetTransport.workers().execute(() -> {

View File

@ -20,6 +20,7 @@
package org.elasticsearch.transport.local; package org.elasticsearch.transport.local;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.transport.RemoteTransportException;
@ -84,7 +85,7 @@ public class LocalTransportChannel implements TransportChannel {
status = TransportStatus.setResponse(status); status = TransportStatus.setResponse(status);
stream.writeByte(status); // 0 for request, 1 for response. stream.writeByte(status); // 0 for request, 1 for response.
response.writeTo(stream); response.writeTo(stream);
sendResponseData(stream.bytes().toBytes()); sendResponseData(BytesReference.toBytes(stream.bytes()));
sourceTransportServiceAdapter.onResponseSent(requestId, action, response, options); sourceTransportServiceAdapter.onResponseSent(requestId, action, response, options);
} }
} }
@ -96,7 +97,7 @@ public class LocalTransportChannel implements TransportChannel {
RemoteTransportException tx = new RemoteTransportException(targetTransport.nodeName(), RemoteTransportException tx = new RemoteTransportException(targetTransport.nodeName(),
targetTransport.boundAddress().boundAddresses()[0], action, error); targetTransport.boundAddress().boundAddresses()[0], action, error);
stream.writeThrowable(tx); stream.writeThrowable(tx);
sendResponseData(stream.bytes().toBytes()); sendResponseData(BytesReference.toBytes(stream.bytes()));
sourceTransportServiceAdapter.onResponseSent(requestId, action, error); sourceTransportServiceAdapter.onResponseSent(requestId, action, error);
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.transport.netty; package org.elasticsearch.transport.netty;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffer;
@ -28,111 +27,65 @@ import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
/** final class ChannelBufferBytesReference extends BytesReference {
*/
final class ChannelBufferBytesReference implements BytesReference {
private final ChannelBuffer buffer; private final ChannelBuffer buffer;
private final int size; private final int length;
private final int offset;
ChannelBufferBytesReference(ChannelBuffer buffer, int size) { ChannelBufferBytesReference(ChannelBuffer buffer, int length) {
this.buffer = buffer; this.buffer = buffer;
this.size = size; this.length = length;
assert size <= buffer.readableBytes() : "size[" + size +"] > " + buffer.readableBytes(); this.offset = buffer.readerIndex();
assert length <= buffer.readableBytes() : "length[" + length +"] > " + buffer.readableBytes();
} }
@Override @Override
public byte get(int index) { public byte get(int index) {
return buffer.getByte(buffer.readerIndex() + index); return buffer.getByte(offset + index);
} }
@Override @Override
public int length() { public int length() {
return size; return length;
} }
@Override @Override
public BytesReference slice(int from, int length) { public BytesReference slice(int from, int length) {
return new ChannelBufferBytesReference(buffer.slice(buffer.readerIndex() + from, length), length); return new ChannelBufferBytesReference(buffer.slice(offset + from, length), length);
} }
@Override @Override
public StreamInput streamInput() { public StreamInput streamInput() {
return new ChannelBufferStreamInput(buffer.duplicate(), size); return new ChannelBufferStreamInput(buffer.duplicate(), length);
} }
@Override @Override
public void writeTo(OutputStream os) throws IOException { public void writeTo(OutputStream os) throws IOException {
buffer.getBytes(buffer.readerIndex(), os, size); buffer.getBytes(offset, os, length);
} }
public byte[] toBytes() { ChannelBuffer toChannelBuffer() {
return copyBytesArray().toBytes();
}
@Override
public BytesArray toBytesArray() {
if (buffer.hasArray()) {
return new BytesArray(buffer.array(), buffer.arrayOffset() + buffer.readerIndex(), size);
}
return copyBytesArray();
}
@Override
public BytesArray copyBytesArray() {
byte[] copy = new byte[buffer.readableBytes()];
buffer.getBytes(buffer.readerIndex(), copy);
return new BytesArray(copy);
}
public ChannelBuffer toChannelBuffer() {
return buffer.duplicate(); return buffer.duplicate();
} }
@Override @Override
public boolean hasArray() { public String utf8ToString() {
return buffer.hasArray(); return buffer.toString(offset, length, StandardCharsets.UTF_8);
}
@Override
public byte[] array() {
return buffer.array();
}
@Override
public int arrayOffset() {
return buffer.arrayOffset() + buffer.readerIndex();
}
@Override
public String toUtf8() {
return buffer.toString(StandardCharsets.UTF_8);
} }
@Override @Override
public BytesRef toBytesRef() { public BytesRef toBytesRef() {
if (buffer.hasArray()) { if (buffer.hasArray()) {
return new BytesRef(buffer.array(), buffer.arrayOffset() + buffer.readerIndex(), size); return new BytesRef(buffer.array(), buffer.arrayOffset() + offset, length);
} }
byte[] copy = new byte[buffer.readableBytes()]; final byte[] copy = new byte[length];
buffer.getBytes(buffer.readerIndex(), copy); buffer.getBytes(offset, copy);
return new BytesRef(copy); return new BytesRef(copy);
} }
@Override @Override
public BytesRef copyBytesRef() { public long ramBytesUsed() {
byte[] copy = new byte[size]; return buffer.capacity();
buffer.getBytes(buffer.readerIndex(), copy);
return new BytesRef(copy);
}
@Override
public int hashCode() {
return Helper.bytesHashCode(this);
}
@Override
public boolean equals(Object obj) {
return Helper.bytesEqual(this, (BytesReference) obj);
} }
} }

View File

@ -295,7 +295,7 @@ public class ESExceptionTests extends ESTestCase {
ParsingException ex = new ParsingException(1, 2, "foobar", null); ParsingException ex = new ParsingException(1, 2, "foobar", null);
out.writeThrowable(ex); out.writeThrowable(ex);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
ParsingException e = in.readThrowable(); ParsingException e = in.readThrowable();
assertEquals(ex.getIndex(), e.getIndex()); assertEquals(ex.getIndex(), e.getIndex());
assertEquals(ex.getMessage(), e.getMessage()); assertEquals(ex.getMessage(), e.getMessage());
@ -309,7 +309,7 @@ public class ESExceptionTests extends ESTestCase {
Throwable ex = new Throwable("eggplant", ParsingException); Throwable ex = new Throwable("eggplant", ParsingException);
out.writeThrowable(ex); out.writeThrowable(ex);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
Throwable throwable = in.readThrowable(); Throwable throwable = in.readThrowable();
assertEquals("throwable: eggplant", throwable.getMessage()); assertEquals("throwable: eggplant", throwable.getMessage());
assertTrue(throwable instanceof ElasticsearchException); assertTrue(throwable instanceof ElasticsearchException);
@ -346,7 +346,7 @@ public class ESExceptionTests extends ESTestCase {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
ElasticsearchException ex = new ElasticsearchException("topLevel", t); ElasticsearchException ex = new ElasticsearchException("topLevel", t);
out.writeThrowable(ex); out.writeThrowable(ex);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
ElasticsearchException e = in.readThrowable(); ElasticsearchException e = in.readThrowable();
assertEquals(e.getMessage(), ex.getMessage()); assertEquals(e.getMessage(), ex.getMessage());
assertTrue("Expected: " + e.getCause().getMessage() + " to contain: " + assertTrue("Expected: " + e.getCause().getMessage() + " to contain: " +

View File

@ -223,7 +223,7 @@ public class ExceptionSerializationTests extends ESTestCase {
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(random()), exception); ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(random()), exception);
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
out.writeThrowable(exception); out.writeThrowable(exception);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
return in.readThrowable(); return in.readThrowable();
} }

View File

@ -44,7 +44,7 @@ public class OriginalIndicesTests extends ESTestCase {
out.setVersion(randomVersion(random())); out.setVersion(randomVersion(random()));
OriginalIndices.writeOriginalIndices(originalIndices, out); OriginalIndices.writeOriginalIndices(originalIndices, out);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
in.setVersion(out.getVersion()); in.setVersion(out.getVersion());
OriginalIndices originalIndices2 = OriginalIndices.readOriginalIndices(in); OriginalIndices originalIndices2 = OriginalIndices.readOriginalIndices(in);

View File

@ -205,7 +205,7 @@ public final class ClusterAllocationExplanationTests extends ESTestCase {
"assignedNode", allocationDelay, remainingDelay, null, false, nodeExplanations); "assignedNode", allocationDelay, remainingDelay, null, false, nodeExplanations);
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
cae.writeTo(out); cae.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
ClusterAllocationExplanation cae2 = new ClusterAllocationExplanation(in); ClusterAllocationExplanation cae2 = new ClusterAllocationExplanation(in);
assertEquals(shard, cae2.getShard()); assertEquals(shard, cae2.getShard());
assertTrue(cae2.isPrimary()); assertTrue(cae2.isPrimary());

View File

@ -84,7 +84,7 @@ public class ClusterHealthResponsesTests extends ESTestCase {
if (randomBoolean()) { if (randomBoolean()) {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
clusterHealth.writeTo(out); clusterHealth.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
clusterHealth = ClusterHealthResponse.readResponseFrom(in); clusterHealth = ClusterHealthResponse.readResponseFrom(in);
} }
return clusterHealth; return clusterHealth;

View File

@ -166,7 +166,7 @@ public class ClusterRerouteRequestTests extends ESTestCase {
private ClusterRerouteRequest roundTripThroughBytes(ClusterRerouteRequest original) throws IOException { private ClusterRerouteRequest roundTripThroughBytes(ClusterRerouteRequest original) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) { try (BytesStreamOutput output = new BytesStreamOutput()) {
original.writeTo(output); original.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) {
ClusterRerouteRequest copy = new ClusterRerouteRequest(); ClusterRerouteRequest copy = new ClusterRerouteRequest();
copy.readFrom(in); copy.readFrom(in);
return copy; return copy;

View File

@ -66,7 +66,7 @@ public class ClusterRerouteTests extends ESAllocationTestCase {
BytesReference bytes = out.bytes(); BytesReference bytes = out.bytes();
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry();
new NetworkModule(null, Settings.EMPTY, true, namedWriteableRegistry); new NetworkModule(null, Settings.EMPTY, true, namedWriteableRegistry);
StreamInput wrap = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes.toBytes()), StreamInput wrap = new NamedWriteableAwareStreamInput(bytes.streamInput(),
namedWriteableRegistry); namedWriteableRegistry);
ClusterRerouteRequest deserializedReq = new ClusterRerouteRequest(); ClusterRerouteRequest deserializedReq = new ClusterRerouteRequest();
deserializedReq.readFrom(wrap); deserializedReq.readFrom(wrap);

View File

@ -45,7 +45,7 @@ public class ClusterStateRequestTests extends ESTestCase {
output.setVersion(testVersion); output.setVersion(testVersion);
clusterStateRequest.writeTo(output); clusterStateRequest.writeTo(output);
StreamInput streamInput = StreamInput.wrap(output.bytes()); StreamInput streamInput = output.bytes().streamInput();
streamInput.setVersion(testVersion); streamInput.setVersion(testVersion);
ClusterStateRequest deserializedCSRequest = new ClusterStateRequest(); ClusterStateRequest deserializedCSRequest = new ClusterStateRequest();
deserializedCSRequest.readFrom(streamInput); deserializedCSRequest.readFrom(streamInput);

View File

@ -37,7 +37,7 @@ public class GetStoredScriptRequestTests extends ESTestCase {
out.setVersion(randomVersion(random())); out.setVersion(randomVersion(random()));
request.writeTo(out); request.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
in.setVersion(out.getVersion()); in.setVersion(out.getVersion());
GetStoredScriptRequest request2 = new GetStoredScriptRequest(); GetStoredScriptRequest request2 = new GetStoredScriptRequest();
request2.readFrom(in); request2.readFrom(in);

View File

@ -84,7 +84,7 @@ public class SyncedFlushUnitTests extends ESTestCase {
assertThat(testPlan.result.restStatus(), equalTo(testPlan.totalCounts.failed > 0 ? RestStatus.CONFLICT : RestStatus.OK)); assertThat(testPlan.result.restStatus(), equalTo(testPlan.totalCounts.failed > 0 ? RestStatus.CONFLICT : RestStatus.OK));
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
testPlan.result.writeTo(out); testPlan.result.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
SyncedFlushResponse readResponse = new SyncedFlushResponse(); SyncedFlushResponse readResponse = new SyncedFlushResponse();
readResponse.readFrom(in); readResponse.readFrom(in);
assertThat(readResponse.totalShards(), equalTo(testPlan.totalCounts.total)); assertThat(readResponse.totalShards(), equalTo(testPlan.totalCounts.total));

View File

@ -55,9 +55,9 @@ public class BulkRequestTests extends ESTestCase {
BulkRequest bulkRequest = new BulkRequest(); BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
assertThat(bulkRequest.numberOfActions(), equalTo(3)); assertThat(bulkRequest.numberOfActions(), equalTo(3));
assertThat(((IndexRequest) bulkRequest.requests().get(0)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }").toBytes())); assertThat(((IndexRequest) bulkRequest.requests().get(0)).source(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }")));
assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class)); assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class));
assertThat(((IndexRequest) bulkRequest.requests().get(2)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value3\" }").toBytes())); assertThat(((IndexRequest) bulkRequest.requests().get(2)).source(), equalTo(new BytesArray("{ \"field1\" : \"value3\" }")));
} }
public void testSimpleBulk2() throws Exception { public void testSimpleBulk2() throws Exception {
@ -81,7 +81,7 @@ public class BulkRequestTests extends ESTestCase {
assertThat(bulkRequest.numberOfActions(), equalTo(4)); assertThat(bulkRequest.numberOfActions(), equalTo(4));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1")); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2)); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().toUtf8(), equalTo("{\"field\":\"value\"}")); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().utf8ToString(), equalTo("{\"field\":\"value\"}"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).id(), equalTo("0")); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).id(), equalTo("0"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).type(), equalTo("type1")); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).type(), equalTo("type1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1")); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1"));
@ -93,7 +93,7 @@ public class BulkRequestTests extends ESTestCase {
assertThat(scriptParams, notNullValue()); assertThat(scriptParams, notNullValue());
assertThat(scriptParams.size(), equalTo(1)); assertThat(scriptParams.size(), equalTo(1));
assertThat(((Integer) scriptParams.get("param1")), equalTo(1)); assertThat(((Integer) scriptParams.get("param1")), equalTo(1));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().toUtf8(), equalTo("{\"counter\":1}")); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().utf8ToString(), equalTo("{\"counter\":1}"));
} }
public void testBulkAllowExplicitIndex() throws Exception { public void testBulkAllowExplicitIndex() throws Exception {

View File

@ -70,7 +70,7 @@ public class MultiGetShardRequestTests extends ESTestCase {
out.setVersion(randomVersion(random())); out.setVersion(randomVersion(random()));
multiGetShardRequest.writeTo(out); multiGetShardRequest.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
in.setVersion(out.getVersion()); in.setVersion(out.getVersion());
MultiGetShardRequest multiGetShardRequest2 = new MultiGetShardRequest(); MultiGetShardRequest multiGetShardRequest2 = new MultiGetShardRequest();
multiGetShardRequest2.readFrom(in); multiGetShardRequest2.readFrom(in);

View File

@ -45,7 +45,7 @@ public class SimulateDocumentSimpleResultTests extends ESTestCase {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
simulateDocumentBaseResult.writeTo(out); simulateDocumentBaseResult.writeTo(out);
StreamInput streamInput = StreamInput.wrap(out.bytes()); StreamInput streamInput = out.bytes().streamInput();
SimulateDocumentBaseResult otherSimulateDocumentBaseResult = new SimulateDocumentBaseResult(streamInput); SimulateDocumentBaseResult otherSimulateDocumentBaseResult = new SimulateDocumentBaseResult(streamInput);
if (isFailure) { if (isFailure) {

View File

@ -73,7 +73,7 @@ public class SimulatePipelineResponseTests extends ESTestCase {
SimulatePipelineResponse response = new SimulatePipelineResponse(randomAsciiOfLengthBetween(1, 10), isVerbose, results); SimulatePipelineResponse response = new SimulatePipelineResponse(randomAsciiOfLengthBetween(1, 10), isVerbose, results);
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
response.writeTo(out); response.writeTo(out);
StreamInput streamInput = StreamInput.wrap(out.bytes()); StreamInput streamInput = out.bytes().streamInput();
SimulatePipelineResponse otherResponse = new SimulatePipelineResponse(); SimulatePipelineResponse otherResponse = new SimulatePipelineResponse();
otherResponse.readFrom(streamInput); otherResponse.readFrom(streamInput);

View File

@ -48,7 +48,7 @@ public class SimulateProcessorResultTests extends ESTestCase {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
simulateProcessorResult.writeTo(out); simulateProcessorResult.writeTo(out);
StreamInput streamInput = StreamInput.wrap(out.bytes()); StreamInput streamInput = out.bytes().streamInput();
SimulateProcessorResult otherSimulateProcessorResult = new SimulateProcessorResult(streamInput); SimulateProcessorResult otherSimulateProcessorResult = new SimulateProcessorResult(streamInput);
assertThat(otherSimulateProcessorResult.getProcessorTag(), equalTo(simulateProcessorResult.getProcessorTag())); assertThat(otherSimulateProcessorResult.getProcessorTag(), equalTo(simulateProcessorResult.getProcessorTag()));
if (isFailure) { if (isFailure) {

View File

@ -35,7 +35,7 @@ public class WritePipelineResponseTests extends ESTestCase {
response = new WritePipelineResponse(isAcknowledged); response = new WritePipelineResponse(isAcknowledged);
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
response.writeTo(out); response.writeTo(out);
StreamInput streamInput = StreamInput.wrap(out.bytes()); StreamInput streamInput = out.bytes().streamInput();
WritePipelineResponse otherResponse = new WritePipelineResponse(); WritePipelineResponse otherResponse = new WritePipelineResponse();
otherResponse.readFrom(streamInput); otherResponse.readFrom(streamInput);
@ -46,7 +46,7 @@ public class WritePipelineResponseTests extends ESTestCase {
WritePipelineResponse response = new WritePipelineResponse(); WritePipelineResponse response = new WritePipelineResponse();
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
response.writeTo(out); response.writeTo(out);
StreamInput streamInput = StreamInput.wrap(out.bytes()); StreamInput streamInput = out.bytes().streamInput();
WritePipelineResponse otherResponse = new WritePipelineResponse(); WritePipelineResponse otherResponse = new WritePipelineResponse();
otherResponse.readFrom(streamInput); otherResponse.readFrom(streamInput);

View File

@ -112,7 +112,7 @@ public class WriteableIngestDocumentTests extends ESTestCase {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
writeableIngestDocument.writeTo(out); writeableIngestDocument.writeTo(out);
StreamInput streamInput = StreamInput.wrap(out.bytes()); StreamInput streamInput = out.bytes().streamInput();
WriteableIngestDocument otherWriteableIngestDocument = new WriteableIngestDocument(streamInput); WriteableIngestDocument otherWriteableIngestDocument = new WriteableIngestDocument(streamInput);
assertIngestDocument(otherWriteableIngestDocument.getIngestDocument(), writeableIngestDocument.getIngestDocument()); assertIngestDocument(otherWriteableIngestDocument.getIngestDocument(), writeableIngestDocument.getIngestDocument());
} }

View File

@ -64,7 +64,7 @@ public class MainActionTests extends ESTestCase {
BytesStreamOutput streamOutput = new BytesStreamOutput(); BytesStreamOutput streamOutput = new BytesStreamOutput();
mainResponse.writeTo(streamOutput); mainResponse.writeTo(streamOutput);
final MainResponse serialized = new MainResponse(); final MainResponse serialized = new MainResponse();
serialized.readFrom(new ByteBufferStreamInput(ByteBuffer.wrap(streamOutput.bytes().toBytes()))); serialized.readFrom(streamOutput.bytes().streamInput());
assertThat(serialized.getNodeName(), equalTo(nodeName)); assertThat(serialized.getNodeName(), equalTo(nodeName));
assertThat(serialized.getClusterName(), equalTo(clusterName)); assertThat(serialized.getClusterName(), equalTo(clusterName));

View File

@ -38,7 +38,7 @@ public class IndicesOptionsTests extends ESTestCase {
output.setVersion(outputVersion); output.setVersion(outputVersion);
indicesOptions.writeIndicesOptions(output); indicesOptions.writeIndicesOptions(output);
StreamInput streamInput = StreamInput.wrap(output.bytes()); StreamInput streamInput = output.bytes().streamInput();
streamInput.setVersion(randomVersion(random())); streamInput.setVersion(randomVersion(random()));
IndicesOptions indicesOptions2 = IndicesOptions.readIndicesOptions(streamInput); IndicesOptions indicesOptions2 = IndicesOptions.readIndicesOptions(streamInput);

View File

@ -140,7 +140,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase {
xBuilder.startObject(); xBuilder.startObject();
response.toXContent(xBuilder, null); response.toXContent(xBuilder, null);
xBuilder.endObject(); xBuilder.endObject();
String utf8 = xBuilder.bytes().toUtf8().replaceFirst("\"took\":\\d+,", "");; String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");;
String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\""
+ i + i
+ "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}";
@ -196,7 +196,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase {
xBuilder.startObject(); xBuilder.startObject();
response.toXContent(xBuilder, null); response.toXContent(xBuilder, null);
xBuilder.endObject(); xBuilder.endObject();
String utf8 = xBuilder.bytes().toUtf8().replaceFirst("\"took\":\\d+,", "");; String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");;
String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\""
+ i + i
+ "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}";
@ -255,7 +255,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase {
xBuilder.startObject(); xBuilder.startObject();
response.toXContent(xBuilder, ToXContent.EMPTY_PARAMS); response.toXContent(xBuilder, ToXContent.EMPTY_PARAMS);
xBuilder.endObject(); xBuilder.endObject();
String utf8 = xBuilder.bytes().toUtf8().replaceFirst("\"took\":\\d+,", "");; String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");;
String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\""
+ i + i
+ "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}";

View File

@ -30,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.metadata.RepositoriesMetaData; import org.elasticsearch.cluster.metadata.RepositoriesMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotId;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.DiscoveryNodes;
@ -121,7 +122,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase {
Diff<ClusterState> diffBeforeSerialization = clusterState.diff(previousClusterState); Diff<ClusterState> diffBeforeSerialization = clusterState.diff(previousClusterState);
BytesStreamOutput os = new BytesStreamOutput(); BytesStreamOutput os = new BytesStreamOutput();
diffBeforeSerialization.writeTo(os); diffBeforeSerialization.writeTo(os);
byte[] diffBytes = os.bytes().toBytes(); byte[] diffBytes = BytesReference.toBytes(os.bytes());
Diff<ClusterState> diff; Diff<ClusterState> diff;
try (StreamInput input = StreamInput.wrap(diffBytes)) { try (StreamInput input = StreamInput.wrap(diffBytes)) {
diff = previousClusterStateFromDiffs.readDiffFrom(input); diff = previousClusterStateFromDiffs.readDiffFrom(input);

View File

@ -55,7 +55,7 @@ public class ClusterBlockTests extends ESTestCase {
out.setVersion(version); out.setVersion(version);
clusterBlock.writeTo(out); clusterBlock.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
in.setVersion(version); in.setVersion(version);
ClusterBlock result = ClusterBlock.readClusterBlock(in); ClusterBlock result = ClusterBlock.readClusterBlock(in);

View File

@ -169,7 +169,7 @@ public class ClusterStateHealthTests extends ESTestCase {
if (randomBoolean()) { if (randomBoolean()) {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
clusterStateHealth.writeTo(out); clusterStateHealth.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
clusterStateHealth = new ClusterStateHealth(in); clusterStateHealth = new ClusterStateHealth(in);
} }
return clusterStateHealth; return clusterStateHealth;

View File

@ -60,8 +60,7 @@ public class IndexGraveyardTests extends ESTestCase {
final IndexGraveyard graveyard = createRandom(); final IndexGraveyard graveyard = createRandom();
final BytesStreamOutput out = new BytesStreamOutput(); final BytesStreamOutput out = new BytesStreamOutput();
graveyard.writeTo(out); graveyard.writeTo(out);
final ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytes())); assertThat(IndexGraveyard.fromStream(out.bytes().streamInput()), equalTo(graveyard));
assertThat(IndexGraveyard.fromStream(in), equalTo(graveyard));
} }
public void testXContent() throws IOException { public void testXContent() throws IOException {

View File

@ -69,7 +69,7 @@ public class IndexMetaDataTests extends ESTestCase {
final BytesStreamOutput out = new BytesStreamOutput(); final BytesStreamOutput out = new BytesStreamOutput();
metaData.writeTo(out); metaData.writeTo(out);
IndexMetaData deserialized = IndexMetaData.PROTO.readFrom(StreamInput.wrap(out.bytes())); IndexMetaData deserialized = IndexMetaData.PROTO.readFrom(out.bytes().streamInput());
assertEquals(metaData, deserialized); assertEquals(metaData, deserialized);
assertEquals(metaData.hashCode(), deserialized.hashCode()); assertEquals(metaData.hashCode(), deserialized.hashCode());

View File

@ -22,7 +22,6 @@ package org.elasticsearch.cluster.metadata;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.ByteBufferStreamInput;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
@ -34,7 +33,6 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@ -185,8 +183,7 @@ public class MetaDataTests extends ESTestCase {
final MetaData originalMeta = MetaData.builder().indexGraveyard(graveyard).build(); final MetaData originalMeta = MetaData.builder().indexGraveyard(graveyard).build();
final BytesStreamOutput out = new BytesStreamOutput(); final BytesStreamOutput out = new BytesStreamOutput();
originalMeta.writeTo(out); originalMeta.writeTo(out);
final ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytes())); final MetaData fromStreamMeta = MetaData.PROTO.readFrom(out.bytes().streamInput());
final MetaData fromStreamMeta = MetaData.PROTO.readFrom(in);
assertThat(fromStreamMeta.indexGraveyard(), equalTo(fromStreamMeta.indexGraveyard())); assertThat(fromStreamMeta.indexGraveyard(), equalTo(fromStreamMeta.indexGraveyard()));
} }
} }

View File

@ -206,7 +206,7 @@ public class ShardRoutingTests extends ESTestCase {
if (randomBoolean()) { if (randomBoolean()) {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
routing.writeTo(out); routing.writeTo(out);
routing = new ShardRouting(StreamInput.wrap(out.bytes())); routing = new ShardRouting(out.bytes().streamInput());
} }
if (routing.initializing() || routing.relocating()) { if (routing.initializing() || routing.relocating()) {
assertEquals(routing.toString(), byteSize, routing.getExpectedShardSize()); assertEquals(routing.toString(), byteSize, routing.getExpectedShardSize());

View File

@ -82,7 +82,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
meta.writeTo(out); meta.writeTo(out);
out.close(); out.close();
UnassignedInfo read = new UnassignedInfo(StreamInput.wrap(out.bytes())); UnassignedInfo read = new UnassignedInfo(out.bytes().streamInput());
assertThat(read.getReason(), equalTo(meta.getReason())); assertThat(read.getReason(), equalTo(meta.getReason()));
assertThat(read.getUnassignedTimeInMillis(), equalTo(meta.getUnassignedTimeInMillis())); assertThat(read.getUnassignedTimeInMillis(), equalTo(meta.getUnassignedTimeInMillis()));
assertThat(read.getMessage(), equalTo(meta.getMessage())); assertThat(read.getMessage(), equalTo(meta.getMessage()));

View File

@ -430,7 +430,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
); );
BytesStreamOutput bytes = new BytesStreamOutput(); BytesStreamOutput bytes = new BytesStreamOutput();
AllocationCommands.writeTo(commands, bytes); AllocationCommands.writeTo(commands, bytes);
StreamInput in = StreamInput.wrap(bytes.bytes()); StreamInput in = bytes.bytes().streamInput();
// Since the commands are named writeable we need to register them and wrap the input stream // Since the commands are named writeable we need to register them and wrap the input stream
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry();

View File

@ -80,7 +80,7 @@ public class ClusterSerializationTests extends ESAllocationTestCase {
BytesStreamOutput outStream = new BytesStreamOutput(); BytesStreamOutput outStream = new BytesStreamOutput();
source.writeTo(outStream); source.writeTo(outStream);
StreamInput inStream = StreamInput.wrap(outStream.bytes().toBytes()); StreamInput inStream = outStream.bytes().streamInput();
RoutingTable target = RoutingTable.Builder.readFrom(inStream); RoutingTable target = RoutingTable.Builder.readFrom(inStream);
assertThat(target.prettyPrint(), equalTo(source.prettyPrint())); assertThat(target.prettyPrint(), equalTo(source.prettyPrint()));

View File

@ -310,7 +310,7 @@ public class DiffableTests extends ESTestCase {
logger.debug("--> serializing diff"); logger.debug("--> serializing diff");
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
diffMap.writeTo(out); diffMap.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
logger.debug("--> reading diff back"); logger.debug("--> reading diff back");
diffMap = readDiff(in); diffMap = readDiff(in);
} }

View File

@ -19,14 +19,11 @@
package org.elasticsearch.common; package org.elasticsearch.common;
import org.elasticsearch.common.bytes.ByteBufferBytesReference;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.io.Channels;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.jboss.netty.buffer.ByteBufferBackedChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffer;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
@ -85,7 +82,7 @@ public class ChannelsTests extends ESTestCase {
BytesReference source = new BytesArray(randomBytes, offset + offsetToRead, lengthToRead); BytesReference source = new BytesArray(randomBytes, offset + offsetToRead, lengthToRead);
BytesReference read = new BytesArray(readBytes, offset + offsetToRead, lengthToRead); BytesReference read = new BytesArray(readBytes, offset + offsetToRead, lengthToRead);
assertThat("read bytes didn't match written bytes", source.toBytes(), Matchers.equalTo(read.toBytes())); assertThat("read bytes didn't match written bytes", BytesReference.toBytes(source), Matchers.equalTo(BytesReference.toBytes(read)));
} }
public void testBufferReadPastEOFWithException() throws Exception { public void testBufferReadPastEOFWithException() throws Exception {
@ -157,7 +154,9 @@ public class ChannelsTests extends ESTestCase {
copy.flip(); copy.flip();
BytesReference sourceRef = new BytesArray(randomBytes, offset + offsetToRead, lengthToRead); BytesReference sourceRef = new BytesArray(randomBytes, offset + offsetToRead, lengthToRead);
BytesReference copyRef = new ByteBufferBytesReference(copy); byte[] tmp = new byte[copy.remaining()];
copy.duplicate().get(tmp);
BytesReference copyRef = new BytesArray(tmp);
assertTrue("read bytes didn't match written bytes", sourceRef.equals(copyRef)); assertTrue("read bytes didn't match written bytes", sourceRef.equals(copyRef));
} }

View File

@ -29,7 +29,6 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.common.util.ByteArray;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import java.io.EOFException; import java.io.EOFException;
import java.io.IOException; import java.io.IOException;
@ -66,12 +65,9 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
int sliceLength = Math.max(0, length - sliceOffset - 1); int sliceLength = Math.max(0, length - sliceOffset - 1);
BytesReference slice = pbr.slice(sliceOffset, sliceLength); BytesReference slice = pbr.slice(sliceOffset, sliceLength);
assertEquals(sliceLength, slice.length()); assertEquals(sliceLength, slice.length());
BytesRef singlePageOrNull = getSinglePageOrNull(slice);
if (slice.hasArray()) { if (singlePageOrNull != null) {
assertEquals(sliceOffset, slice.arrayOffset()); assertEquals(sliceOffset, singlePageOrNull.offset);
} else {
expectThrows(IllegalStateException.class, () ->
slice.arrayOffset());
} }
} }
@ -109,7 +105,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
// bulk-read all // bulk-read all
si.readFully(targetBuf); si.readFully(targetBuf);
assertArrayEquals(pbr.toBytes(), targetBuf); assertArrayEquals(BytesReference.toBytes(pbr), targetBuf);
// continuing to read should now fail with EOFException // continuing to read should now fail with EOFException
try { try {
@ -141,7 +137,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
// now do NOT reset the stream - keep the stream's offset! // now do NOT reset the stream - keep the stream's offset!
// buffer to compare remaining bytes against bulk read // buffer to compare remaining bytes against bulk read
byte[] pbrBytesWithOffset = Arrays.copyOfRange(pbr.toBytes(), offset, length); byte[] pbrBytesWithOffset = Arrays.copyOfRange(BytesReference.toBytes(pbr), offset, length);
// randomized target buffer to ensure no stale slots // randomized target buffer to ensure no stale slots
byte[] targetBytes = new byte[pbrBytesWithOffset.length]; byte[] targetBytes = new byte[pbrBytesWithOffset.length];
random().nextBytes(targetBytes); random().nextBytes(targetBytes);
@ -178,7 +174,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
} }
assertEquals(pbr.length(), target.length()); assertEquals(pbr.length(), target.length());
BytesRef targetBytes = target.get(); BytesRef targetBytes = target.get();
assertArrayEquals(pbr.toBytes(), Arrays.copyOfRange(targetBytes.bytes, targetBytes.offset, targetBytes.length)); assertArrayEquals(BytesReference.toBytes(pbr), Arrays.copyOfRange(targetBytes.bytes, targetBytes.offset, targetBytes.length));
} }
public void testSliceStreamInput() throws IOException { public void testSliceStreamInput() throws IOException {
@ -208,11 +204,11 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
assertEquals(sliceInput.available(), 0); assertEquals(sliceInput.available(), 0);
// compare slice content with upper half of original // compare slice content with upper half of original
byte[] pbrSliceBytes = Arrays.copyOfRange(pbr.toBytes(), sliceOffset, length); byte[] pbrSliceBytes = Arrays.copyOfRange(BytesReference.toBytes(pbr), sliceOffset, length);
assertArrayEquals(pbrSliceBytes, sliceBytes); assertArrayEquals(pbrSliceBytes, sliceBytes);
// compare slice bytes with bytes read from slice via streamInput :D // compare slice bytes with bytes read from slice via streamInput :D
byte[] sliceToBytes = slice.toBytes(); byte[] sliceToBytes = BytesReference.toBytes(slice);
assertEquals(sliceBytes.length, sliceToBytes.length); assertEquals(sliceBytes.length, sliceToBytes.length);
assertArrayEquals(sliceBytes, sliceToBytes); assertArrayEquals(sliceBytes, sliceToBytes);
@ -233,7 +229,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
pbr.writeTo(out); pbr.writeTo(out);
assertEquals(pbr.length(), out.size()); assertEquals(pbr.length(), out.size());
assertArrayEquals(pbr.toBytes(), out.bytes().toBytes()); assertArrayEquals(BytesReference.toBytes(pbr), BytesReference.toBytes(out.bytes()));
out.close(); out.close();
} }
@ -246,7 +242,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
BytesStreamOutput sliceOut = new BytesStreamOutput(sliceLength); BytesStreamOutput sliceOut = new BytesStreamOutput(sliceLength);
slice.writeTo(sliceOut); slice.writeTo(sliceOut);
assertEquals(slice.length(), sliceOut.size()); assertEquals(slice.length(), sliceOut.size());
assertArrayEquals(slice.toBytes(), sliceOut.bytes().toBytes()); assertArrayEquals(BytesReference.toBytes(slice), BytesReference.toBytes(sliceOut.bytes()));
sliceOut.close(); sliceOut.close();
} }
@ -254,16 +250,16 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))};
for (int i = 0; i < sizes.length; i++) { for (int i = 0; i < sizes.length; i++) {
BytesReference pbr = newBytesReference(sizes[i]); BytesReference pbr = newBytesReference(sizes[i]);
byte[] bytes = pbr.toBytes(); byte[] bytes = BytesReference.toBytes(pbr);
assertEquals(sizes[i], bytes.length); assertEquals(sizes[i], bytes.length);
} }
} }
public void testToBytesArraySharedPage() throws IOException { public void testToBytesRefSharedPage() throws IOException {
int length = randomIntBetween(10, PAGE_SIZE); int length = randomIntBetween(10, PAGE_SIZE);
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
BytesArray ba = pbr.toBytesArray(); BytesArray ba = new BytesArray(pbr.toBytesRef());
BytesArray ba2 = pbr.toBytesArray(); BytesArray ba2 = new BytesArray(pbr.toBytesRef());
assertNotNull(ba); assertNotNull(ba);
assertNotNull(ba2); assertNotNull(ba2);
assertEquals(pbr.length(), ba.length()); assertEquals(pbr.length(), ba.length());
@ -272,46 +268,46 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
assertSame(ba.array(), ba2.array()); assertSame(ba.array(), ba2.array());
} }
public void testToBytesArrayMaterializedPages() throws IOException { public void testToBytesRefMaterializedPages() throws IOException {
// we need a length != (n * pagesize) to avoid page sharing at boundaries // we need a length != (n * pagesize) to avoid page sharing at boundaries
int length = 0; int length = 0;
while ((length % PAGE_SIZE) == 0) { while ((length % PAGE_SIZE) == 0) {
length = randomIntBetween(PAGE_SIZE, PAGE_SIZE * randomIntBetween(2, 5)); length = randomIntBetween(PAGE_SIZE, PAGE_SIZE * randomIntBetween(2, 5));
} }
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
BytesArray ba = pbr.toBytesArray(); BytesArray ba = new BytesArray(pbr.toBytesRef());
BytesArray ba2 = pbr.toBytesArray(); BytesArray ba2 = new BytesArray(pbr.toBytesRef());
assertNotNull(ba); assertNotNull(ba);
assertNotNull(ba2); assertNotNull(ba2);
assertEquals(pbr.length(), ba.length()); assertEquals(pbr.length(), ba.length());
assertEquals(ba.length(), ba2.length()); assertEquals(ba.length(), ba2.length());
} }
public void testCopyBytesArray() throws IOException { public void testCopyBytesRefSharesBytes() throws IOException {
// small PBR which would normally share the first page // small PBR which would normally share the first page
int length = randomIntBetween(10, PAGE_SIZE); int length = randomIntBetween(10, PAGE_SIZE);
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
BytesArray ba = pbr.copyBytesArray(); BytesArray ba = new BytesArray(pbr.toBytesRef(), true);
BytesArray ba2 = pbr.copyBytesArray(); BytesArray ba2 = new BytesArray(pbr.toBytesRef(), true);
assertNotNull(ba); assertNotNull(ba);
assertNotSame(ba, ba2); assertNotSame(ba, ba2);
assertNotSame(ba.array(), ba2.array()); assertNotSame(ba.array(), ba2.array());
} }
public void testSliceCopyBytesArray() throws IOException { public void testSliceCopyBytesRef() throws IOException {
int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
int sliceOffset = randomIntBetween(0, pbr.length()); int sliceOffset = randomIntBetween(0, pbr.length());
int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset);
BytesReference slice = pbr.slice(sliceOffset, sliceLength); BytesReference slice = pbr.slice(sliceOffset, sliceLength);
BytesArray ba1 = slice.copyBytesArray(); BytesArray ba1 = new BytesArray(slice.toBytesRef(), true);
BytesArray ba2 = slice.copyBytesArray(); BytesArray ba2 = new BytesArray(slice.toBytesRef(), true);
assertNotNull(ba1); assertNotNull(ba1);
assertNotNull(ba2); assertNotNull(ba2);
assertNotSame(ba1.array(), ba2.array()); assertNotSame(ba1.array(), ba2.array());
assertArrayEquals(slice.toBytes(), ba1.array()); assertArrayEquals(BytesReference.toBytes(slice), ba1.array());
assertArrayEquals(slice.toBytes(), ba2.array()); assertArrayEquals(BytesReference.toBytes(slice), ba2.array());
assertArrayEquals(ba1.array(), ba2.array()); assertArrayEquals(ba1.array(), ba2.array());
} }
@ -329,14 +325,14 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
while((ref = iterator.next()) != null) { while((ref = iterator.next()) != null) {
builder.append(ref); builder.append(ref);
} }
assertArrayEquals(pbr.toBytes(), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); assertArrayEquals(BytesReference.toBytes(pbr), BytesRef.deepCopyOf(builder.toBytesRef()).bytes);
} }
public void testSliceIterator() throws IOException { public void testSliceIterator() throws IOException {
int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
int sliceOffset = randomIntBetween(0, pbr.length()); int sliceOffset = randomIntBetween(0, pbr.length());
int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset);
BytesReference slice = pbr.slice(sliceOffset, sliceLength); BytesReference slice = pbr.slice(sliceOffset, sliceLength);
BytesRefIterator iterator = slice.iterator(); BytesRefIterator iterator = slice.iterator();
BytesRef ref = null; BytesRef ref = null;
@ -344,7 +340,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
while((ref = iterator.next()) != null) { while((ref = iterator.next()) != null) {
builder.append(ref); builder.append(ref);
} }
assertArrayEquals(slice.toBytes(), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); assertArrayEquals(BytesReference.toBytes(slice), BytesRef.deepCopyOf(builder.toBytesRef()).bytes);
} }
public void testIteratorRandom() throws IOException { public void testIteratorRandom() throws IOException {
@ -352,12 +348,12 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
if (randomBoolean()) { if (randomBoolean()) {
int sliceOffset = randomIntBetween(0, pbr.length()); int sliceOffset = randomIntBetween(0, pbr.length());
int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset);
pbr = pbr.slice(sliceOffset, sliceLength); pbr = pbr.slice(sliceOffset, sliceLength);
} }
if (randomBoolean()) { if (randomBoolean()) {
pbr = pbr.toBytesArray(); pbr = new BytesArray(pbr.toBytesRef());
} }
BytesRefIterator iterator = pbr.iterator(); BytesRefIterator iterator = pbr.iterator();
BytesRef ref = null; BytesRef ref = null;
@ -365,29 +361,15 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
while((ref = iterator.next()) != null) { while((ref = iterator.next()) != null) {
builder.append(ref); builder.append(ref);
} }
assertArrayEquals(pbr.toBytes(), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); assertArrayEquals(BytesReference.toBytes(pbr), BytesRef.deepCopyOf(builder.toBytesRef()).bytes);
}
public void testArray() throws IOException {
int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))};
for (int i = 0; i < sizes.length; i++) {
BytesReference pbr = newBytesReference(sizes[i]);
byte[] array = pbr.array();
assertNotNull(array);
assertEquals(sizes[i], array.length);
assertSame(array, pbr.array());
}
} }
public void testArrayOffset() throws IOException { public void testArrayOffset() throws IOException {
int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5));
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
if (pbr.hasArray()) { BytesRef singlePageOrNull = getSinglePageOrNull(pbr);
assertEquals(0, pbr.arrayOffset()); if (singlePageOrNull != null) {
} else { assertEquals(0, singlePageOrNull.offset);
expectThrows(IllegalStateException.class, () ->
pbr.arrayOffset());
} }
} }
@ -395,20 +377,24 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5));
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
int sliceOffset = randomIntBetween(0, pbr.length() - 1); // an offset to the end would be len 0 int sliceOffset = randomIntBetween(0, pbr.length() - 1); // an offset to the end would be len 0
int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset);
BytesReference slice = pbr.slice(sliceOffset, sliceLength); BytesReference slice = pbr.slice(sliceOffset, sliceLength);
if (slice.hasArray()) { BytesRef singlePageOrNull = getSinglePageOrNull(slice);
assertEquals(sliceOffset, slice.arrayOffset()); if (singlePageOrNull != null) {
if (getSinglePageOrNull(pbr) == null) {
// original reference has pages
assertEquals(sliceOffset % PAGE_SIZE, singlePageOrNull.offset);
} else { } else {
expectThrows(IllegalStateException.class, () -> // orig ref has no pages ie. BytesArray
slice.arrayOffset()); assertEquals(sliceOffset, singlePageOrNull.offset);
}
} }
} }
public void testToUtf8() throws IOException { public void testToUtf8() throws IOException {
// test empty // test empty
BytesReference pbr = newBytesReference(0); BytesReference pbr = newBytesReference(0);
assertEquals("", pbr.toUtf8()); assertEquals("", pbr.utf8ToString());
// TODO: good way to test? // TODO: good way to test?
} }
@ -417,7 +403,6 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
BytesRef ref = pbr.toBytesRef(); BytesRef ref = pbr.toBytesRef();
assertNotNull(ref); assertNotNull(ref);
assertEquals(pbr.arrayOffset(), ref.offset);
assertEquals(pbr.length(), ref.length); assertEquals(pbr.length(), ref.length);
} }
@ -426,21 +411,13 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
// get a BytesRef from a slice // get a BytesRef from a slice
int sliceOffset = randomIntBetween(0, pbr.length()); int sliceOffset = randomIntBetween(0, pbr.length());
int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset);
BytesRef sliceRef = pbr.slice(sliceOffset, sliceLength).toBytesRef(); BytesRef sliceRef = pbr.slice(sliceOffset, sliceLength).toBytesRef();
// note that these are only true if we have <= than a page, otherwise offset/length are shifted // note that these are only true if we have <= than a page, otherwise offset/length are shifted
assertEquals(sliceOffset, sliceRef.offset); assertEquals(sliceOffset, sliceRef.offset);
assertEquals(sliceLength, sliceRef.length); assertEquals(sliceLength, sliceRef.length);
} }
public void testCopyBytesRef() throws IOException {
int length = randomIntBetween(0, PAGE_SIZE * randomIntBetween(2, 5));
BytesReference pbr = newBytesReference(length);
BytesRef ref = pbr.copyBytesRef();
assertNotNull(ref);
assertEquals(pbr.length(), ref.length);
}
public void testHashCode() throws IOException { public void testHashCode() throws IOException {
// empty content must have hash 1 (JDK compat) // empty content must have hash 1 (JDK compat)
BytesReference pbr = newBytesReference(0); BytesReference pbr = newBytesReference(0);
@ -448,40 +425,36 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
// test with content // test with content
pbr = newBytesReference(randomIntBetween(0, PAGE_SIZE * randomIntBetween(2, 5))); pbr = newBytesReference(randomIntBetween(0, PAGE_SIZE * randomIntBetween(2, 5)));
int jdkHash = Arrays.hashCode(pbr.toBytes()); int jdkHash = Arrays.hashCode(BytesReference.toBytes(pbr));
int pbrHash = pbr.hashCode(); int pbrHash = pbr.hashCode();
assertEquals(jdkHash, pbrHash); assertEquals(jdkHash, pbrHash);
// test hashes of slices // test hashes of slices
int sliceFrom = randomIntBetween(0, pbr.length()); int sliceFrom = randomIntBetween(0, pbr.length());
int sliceLength = randomIntBetween(pbr.length() - sliceFrom, pbr.length() - sliceFrom); int sliceLength = randomIntBetween(0, pbr.length() - sliceFrom);
BytesReference slice = pbr.slice(sliceFrom, sliceLength); BytesReference slice = pbr.slice(sliceFrom, sliceLength);
int sliceJdkHash = Arrays.hashCode(slice.toBytes()); int sliceJdkHash = Arrays.hashCode(BytesReference.toBytes(slice));
int sliceHash = slice.hashCode(); int sliceHash = slice.hashCode();
assertEquals(sliceJdkHash, sliceHash); assertEquals(sliceJdkHash, sliceHash);
} }
public void testEquals() { public void testEquals() throws IOException {
int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)); BytesReference bytesReference = newBytesReference(randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)));
ByteArray ba1 = bigarrays.newByteArray(length, false); BytesReference copy = bytesReference.slice(0, bytesReference.length());
ByteArray ba2 = bigarrays.newByteArray(length, false);
// copy contents
for (long i = 0; i < length; i++) {
ba2.set(i, ba1.get(i));
}
// get refs & compare // get refs & compare
BytesReference pbr = new PagedBytesReference(bigarrays, ba1, length); assertEquals(copy, bytesReference);
BytesReference pbr2 = new PagedBytesReference(bigarrays, ba2, length); int sliceFrom = randomIntBetween(0, bytesReference.length());
assertEquals(pbr, pbr2); int sliceLength = randomIntBetween(0, bytesReference.length() - sliceFrom);
} assertEquals(copy.slice(sliceFrom, sliceLength), bytesReference.slice(sliceFrom, sliceLength));
public void testEqualsPeerClass() throws IOException { BytesRef bytesRef = BytesRef.deepCopyOf(copy.toBytesRef());
int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)); assertEquals(new BytesArray(bytesRef), copy);
BytesReference pbr = newBytesReference(length);
BytesReference ba = new BytesArray(pbr.toBytes()); int offsetToFlip = randomIntBetween(0, bytesRef.length - 1);
assertEquals(pbr, ba); int value = ~Byte.toUnsignedInt(bytesRef.bytes[bytesRef.offset+offsetToFlip]);
bytesRef.bytes[bytesRef.offset+offsetToFlip] = (byte)value;
assertNotEquals(new BytesArray(bytesRef), copy);
} }
public void testSliceEquals() { public void testSliceEquals() {
@ -491,19 +464,118 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase {
// test equality of slices // test equality of slices
int sliceFrom = randomIntBetween(0, pbr.length()); int sliceFrom = randomIntBetween(0, pbr.length());
int sliceLength = randomIntBetween(pbr.length() - sliceFrom, pbr.length() - sliceFrom); int sliceLength = randomIntBetween(0, pbr.length() - sliceFrom);
BytesReference slice1 = pbr.slice(sliceFrom, sliceLength); BytesReference slice1 = pbr.slice(sliceFrom, sliceLength);
BytesReference slice2 = pbr.slice(sliceFrom, sliceLength); BytesReference slice2 = pbr.slice(sliceFrom, sliceLength);
assertArrayEquals(slice1.toBytes(), slice2.toBytes()); assertArrayEquals(BytesReference.toBytes(slice1), BytesReference.toBytes(slice2));
// test a slice with same offset but different length, // test a slice with same offset but different length,
// unless randomized testing gave us a 0-length slice. // unless randomized testing gave us a 0-length slice.
if (sliceLength > 0) { if (sliceLength > 0) {
BytesReference slice3 = pbr.slice(sliceFrom, sliceLength / 2); BytesReference slice3 = pbr.slice(sliceFrom, sliceLength / 2);
assertFalse(Arrays.equals(slice1.toBytes(), slice3.toBytes())); assertFalse(Arrays.equals(BytesReference.toBytes(slice1), BytesReference.toBytes(slice3)));
} }
} }
protected abstract BytesReference newBytesReference(int length) throws IOException; protected abstract BytesReference newBytesReference(int length) throws IOException;
public void testCompareTo() throws IOException {
final int iters = randomIntBetween(5, 10);
for (int i = 0; i < iters; i++) {
int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
BytesReference bytesReference = newBytesReference(length);
assertTrue(bytesReference.compareTo(new BytesArray("")) > 0);
assertTrue(new BytesArray("").compareTo(bytesReference) < 0);
assertEquals(0, bytesReference.compareTo(bytesReference));
int sliceFrom = randomIntBetween(0, bytesReference.length());
int sliceLength = randomIntBetween(0, bytesReference.length() - sliceFrom);
BytesReference slice = bytesReference.slice(sliceFrom, sliceLength);
assertEquals(bytesReference.toBytesRef().compareTo(slice.toBytesRef()),
new BytesArray(bytesReference.toBytesRef(), true).compareTo(new BytesArray(slice.toBytesRef(), true)));
assertEquals(bytesReference.toBytesRef().compareTo(slice.toBytesRef()),
bytesReference.compareTo(slice));
assertEquals(slice.toBytesRef().compareTo(bytesReference.toBytesRef()),
slice.compareTo(bytesReference));
assertEquals(0, slice.compareTo(new BytesArray(slice.toBytesRef())));
assertEquals(0, new BytesArray(slice.toBytesRef()).compareTo(slice));
final int crazyLength = length + randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
ReleasableBytesStreamOutput crazyStream = new ReleasableBytesStreamOutput(length, bigarrays);
final int offset = randomIntBetween(0, crazyLength - length);
for (int j = 0; j < offset; j++) {
crazyStream.writeByte((byte) random().nextInt(1 << 8));
}
bytesReference.writeTo(crazyStream);
for (int j = crazyStream.size(); j < crazyLength; j++) {
crazyStream.writeByte((byte) random().nextInt(1 << 8));
}
PagedBytesReference crazyReference = crazyStream.bytes();
assertFalse(crazyReference.compareTo(bytesReference) == 0);
assertEquals(0, crazyReference.slice(offset, length).compareTo(
bytesReference));
assertEquals(0, bytesReference.compareTo(
crazyReference.slice(offset, length)));
}
}
public static BytesRef getSinglePageOrNull(BytesReference ref) throws IOException {
if (ref.length() > 0) {
BytesRefIterator iterator = ref.iterator();
BytesRef next = iterator.next();
BytesRef retVal = next.clone();
if (iterator.next() == null) {
return retVal;
}
} else {
return new BytesRef();
}
return null;
}
public static int getNumPages(BytesReference ref) throws IOException {
int num = 0;
if (ref.length() > 0) {
BytesRefIterator iterator = ref.iterator();
while(iterator.next() != null) {
num++;
}
}
return num;
}
public void testBasicEquals() {
final int len = randomIntBetween(0, randomBoolean() ? 10: 100000);
final int offset1 = randomInt(5);
final byte[] array1 = new byte[offset1 + len + randomInt(5)];
random().nextBytes(array1);
final int offset2 = randomInt(offset1);
final byte[] array2 = Arrays.copyOfRange(array1, offset1 - offset2, array1.length);
final BytesArray b1 = new BytesArray(array1, offset1, len);
final BytesArray b2 = new BytesArray(array2, offset2, len);
assertEquals(b1, b2);
assertEquals(Arrays.hashCode(BytesReference.toBytes(b1)), b1.hashCode());
assertEquals(Arrays.hashCode(BytesReference.toBytes(b2)), b2.hashCode());
// test same instance
assertEquals(b1, b1);
assertEquals(b2, b2);
if (len > 0) {
// test different length
BytesArray differentLen = new BytesArray(array1, offset1, randomInt(len - 1));
assertNotEquals(b1, differentLen);
// test changed bytes
array1[offset1 + randomInt(len - 1)] += 13;
assertNotEquals(b1, b2);
}
}
} }

View File

@ -32,10 +32,28 @@ public class BytesArrayTests extends AbstractBytesReferenceTestCase {
out.writeByte((byte) random().nextInt(1 << 8)); out.writeByte((byte) random().nextInt(1 << 8));
} }
assertEquals(length, out.size()); assertEquals(length, out.size());
BytesArray ref = out.bytes().toBytesArray(); BytesArray ref = new BytesArray(out.bytes().toBytesRef());
assertEquals(length, ref.length()); assertEquals(length, ref.length());
assertTrue(ref instanceof BytesArray); assertTrue(ref instanceof BytesArray);
assertThat(ref.length(), Matchers.equalTo(length)); assertThat(ref.length(), Matchers.equalTo(length));
return ref; return ref;
} }
public void testArray() throws IOException {
int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))};
for (int i = 0; i < sizes.length; i++) {
BytesArray pbr = (BytesArray) newBytesReference(sizes[i]);
byte[] array = pbr.array();
assertNotNull(array);
assertEquals(sizes[i], array.length);
assertSame(array, pbr.array());
}
}
public void testArrayOffset() throws IOException {
int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5));
BytesArray pbr = (BytesArray) newBytesReference(length);
assertEquals(0, pbr.offset());
}
} }

View File

@ -1,61 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.bytes;
import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
public class BytesReferenceTests extends ESTestCase {
public void testEquals() {
final int len = randomIntBetween(0, randomBoolean() ? 10: 100000);
final int offset1 = randomInt(5);
final byte[] array1 = new byte[offset1 + len + randomInt(5)];
random().nextBytes(array1);
final int offset2 = randomInt(offset1);
final byte[] array2 = Arrays.copyOfRange(array1, offset1 - offset2, array1.length);
final BytesArray b1 = new BytesArray(array1, offset1, len);
final BytesArray b2 = new BytesArray(array2, offset2, len);
assertTrue(BytesReference.Helper.bytesEqual(b1, b2));
assertTrue(BytesReference.Helper.bytesEquals(b1, b2));
assertEquals(Arrays.hashCode(b1.toBytes()), b1.hashCode());
assertEquals(BytesReference.Helper.bytesHashCode(b1), BytesReference.Helper.slowHashCode(b2));
// test same instance
assertTrue(BytesReference.Helper.bytesEqual(b1, b1));
assertTrue(BytesReference.Helper.bytesEquals(b1, b1));
assertEquals(BytesReference.Helper.bytesHashCode(b1), BytesReference.Helper.slowHashCode(b1));
if (len > 0) {
// test different length
BytesArray differentLen = new BytesArray(array1, offset1, randomInt(len - 1));
assertFalse(BytesReference.Helper.bytesEqual(b1, differentLen));
// test changed bytes
array1[offset1 + randomInt(len - 1)] += 13;
assertFalse(BytesReference.Helper.bytesEqual(b1, b2));
assertFalse(BytesReference.Helper.bytesEquals(b1, b2));
}
}
}

View File

@ -50,15 +50,15 @@ public class PagedBytesReferenceTests extends AbstractBytesReferenceTestCase {
return ref; return ref;
} }
public void testToBytesArrayMaterializedPages() throws IOException { public void testToBytesRefMaterializedPages() throws IOException {
// we need a length != (n * pagesize) to avoid page sharing at boundaries // we need a length != (n * pagesize) to avoid page sharing at boundaries
int length = 0; int length = 0;
while ((length % PAGE_SIZE) == 0) { while ((length % PAGE_SIZE) == 0) {
length = randomIntBetween(PAGE_SIZE, PAGE_SIZE * randomIntBetween(2, 5)); length = randomIntBetween(PAGE_SIZE, PAGE_SIZE * randomIntBetween(2, 5));
} }
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
BytesArray ba = pbr.toBytesArray(); BytesArray ba = new BytesArray(pbr.toBytesRef());
BytesArray ba2 = pbr.toBytesArray(); BytesArray ba2 = new BytesArray(pbr.toBytesRef());
assertNotNull(ba); assertNotNull(ba);
assertNotNull(ba2); assertNotNull(ba2);
assertEquals(pbr.length(), ba.length()); assertEquals(pbr.length(), ba.length());
@ -67,23 +67,23 @@ public class PagedBytesReferenceTests extends AbstractBytesReferenceTestCase {
assertNotSame(ba.array(), ba2.array()); assertNotSame(ba.array(), ba2.array());
} }
public void testArray() throws IOException { public void testSinglePage() throws IOException {
int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))};
for (int i = 0; i < sizes.length; i++) { for (int i = 0; i < sizes.length; i++) {
BytesReference pbr = newBytesReference(sizes[i]); BytesReference pbr = newBytesReference(sizes[i]);
// verify that array() is cheap for small payloads // verify that array() is cheap for small payloads
if (sizes[i] <= PAGE_SIZE) { if (sizes[i] <= PAGE_SIZE) {
byte[] array = pbr.array(); BytesRef page = getSinglePageOrNull(pbr);
assertNotNull(page);
byte[] array = page.bytes;
assertNotNull(array); assertNotNull(array);
assertEquals(sizes[i], array.length); assertEquals(sizes[i], array.length);
assertSame(array, pbr.array()); assertSame(array, page.bytes);
} else { } else {
try { BytesRef page = getSinglePageOrNull(pbr);
pbr.array(); if (pbr.length() > 0) {
fail("expected IllegalStateException"); assertNull(page);
} catch (IllegalStateException isx) {
// expected
} }
} }
} }
@ -94,22 +94,42 @@ public class PagedBytesReferenceTests extends AbstractBytesReferenceTestCase {
for (int i = 0; i < sizes.length; i++) { for (int i = 0; i < sizes.length; i++) {
BytesReference pbr = newBytesReference(sizes[i]); BytesReference pbr = newBytesReference(sizes[i]);
byte[] bytes = pbr.toBytes(); byte[] bytes = BytesReference.toBytes(pbr);
assertEquals(sizes[i], bytes.length); assertEquals(sizes[i], bytes.length);
// verify that toBytes() is cheap for small payloads // verify that toBytes() is cheap for small payloads
if (sizes[i] <= PAGE_SIZE) { if (sizes[i] <= PAGE_SIZE) {
assertSame(bytes, pbr.toBytes()); assertSame(bytes, BytesReference.toBytes(pbr));
} else { } else {
assertNotSame(bytes, pbr.toBytes()); assertNotSame(bytes, BytesReference.toBytes(pbr));
} }
} }
} }
public void testHasArray() throws IOException { public void testHasSinglePage() throws IOException {
int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(1, 3)); int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(1, 3));
BytesReference pbr = newBytesReference(length); BytesReference pbr = newBytesReference(length);
// must return true for <= pagesize // must return true for <= pagesize
assertEquals(length <= PAGE_SIZE, pbr.hasArray()); assertEquals(length <= PAGE_SIZE, getNumPages(pbr) == 1);
}
public void testEquals() {
int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5));
ByteArray ba1 = bigarrays.newByteArray(length, false);
ByteArray ba2 = bigarrays.newByteArray(length, false);
// copy contents
for (long i = 0; i < length; i++) {
ba2.set(i, ba1.get(i));
}
// get refs & compare
BytesReference pbr = new PagedBytesReference(bigarrays, ba1, length);
BytesReference pbr2 = new PagedBytesReference(bigarrays, ba2, length);
assertEquals(pbr, pbr2);
int offsetToFlip = randomIntBetween(0, length - 1);
int value = ~Byte.toUnsignedInt(ba1.get(offsetToFlip));
ba2.set(offsetToFlip, (byte)value);
assertNotEquals(pbr, pbr2);
} }
} }

View File

@ -91,8 +91,8 @@ public class DeflateCompressedXContentTests extends ESTestCase {
// of different size are being used // of different size are being used
assertFalse(b1.equals(b2)); assertFalse(b1.equals(b2));
// we used the compressed representation directly and did not recompress // we used the compressed representation directly and did not recompress
assertArrayEquals(b1.toBytes(), new CompressedXContent(b1).compressed()); assertArrayEquals(BytesReference.toBytes(b1), new CompressedXContent(b1).compressed());
assertArrayEquals(b2.toBytes(), new CompressedXContent(b2).compressed()); assertArrayEquals(BytesReference.toBytes(b2), new CompressedXContent(b2).compressed());
// but compressedstring instances are still equal // but compressedstring instances are still equal
assertEquals(new CompressedXContent(b1), new CompressedXContent(b2)); assertEquals(new CompressedXContent(b1), new CompressedXContent(b2));
} }

View File

@ -46,7 +46,7 @@ public class GeoDistanceTests extends ESTestCase {
GeoDistance geoDistance = randomFrom(GeoDistance.PLANE, GeoDistance.FACTOR, GeoDistance.ARC, GeoDistance.SLOPPY_ARC); GeoDistance geoDistance = randomFrom(GeoDistance.PLANE, GeoDistance.FACTOR, GeoDistance.ARC, GeoDistance.SLOPPY_ARC);
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
geoDistance.writeTo(out); geoDistance.writeTo(out);
try (StreamInput in = StreamInput.wrap(out.bytes())) {; try (StreamInput in = out.bytes().streamInput()) {;
GeoDistance copy = GeoDistance.readFromStream(in); GeoDistance copy = GeoDistance.readFromStream(in);
assertEquals(copy.toString() + " vs. " + geoDistance.toString(), copy, geoDistance); assertEquals(copy.toString() + " vs. " + geoDistance.toString(), copy, geoDistance);
} }
@ -60,7 +60,7 @@ public class GeoDistanceTests extends ESTestCase {
} else { } else {
out.writeVInt(randomIntBetween(Integer.MIN_VALUE, -1)); out.writeVInt(randomIntBetween(Integer.MIN_VALUE, -1));
} }
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
GeoDistance.readFromStream(in); GeoDistance.readFromStream(in);
} catch (IOException e) { } catch (IOException e) {
assertThat(e.getMessage(), containsString("Unknown GeoDistance ordinal [")); assertThat(e.getMessage(), containsString("Unknown GeoDistance ordinal ["));

View File

@ -39,21 +39,21 @@ public class ShapeRelationTests extends ESTestCase {
public void testwriteTo() throws Exception { public void testwriteTo() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
ShapeRelation.INTERSECTS.writeTo(out); ShapeRelation.INTERSECTS.writeTo(out);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(0)); assertThat(in.readVInt(), equalTo(0));
} }
} }
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
ShapeRelation.DISJOINT.writeTo(out); ShapeRelation.DISJOINT.writeTo(out);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(1)); assertThat(in.readVInt(), equalTo(1));
} }
} }
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
ShapeRelation.WITHIN.writeTo(out); ShapeRelation.WITHIN.writeTo(out);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(2)); assertThat(in.readVInt(), equalTo(2));
} }
} }
@ -62,19 +62,19 @@ public class ShapeRelationTests extends ESTestCase {
public void testReadFrom() throws Exception { public void testReadFrom() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(0); out.writeVInt(0);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.INTERSECTS)); assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.INTERSECTS));
} }
} }
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(1); out.writeVInt(1);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.DISJOINT)); assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.DISJOINT));
} }
} }
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(2); out.writeVInt(2);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.WITHIN)); assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.WITHIN));
} }
} }
@ -83,7 +83,7 @@ public class ShapeRelationTests extends ESTestCase {
public void testInvalidReadFrom() throws Exception { public void testInvalidReadFrom() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(randomIntBetween(3, Integer.MAX_VALUE)); out.writeVInt(randomIntBetween(3, Integer.MAX_VALUE));
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
ShapeRelation.readFromStream(in); ShapeRelation.readFromStream(in);
fail("Expected IOException"); fail("Expected IOException");
} catch(IOException e) { } catch(IOException e) {

View File

@ -38,14 +38,14 @@ public class SpatialStrategyTests extends ESTestCase {
public void testwriteTo() throws Exception { public void testwriteTo() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
SpatialStrategy.TERM.writeTo(out); SpatialStrategy.TERM.writeTo(out);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(0)); assertThat(in.readVInt(), equalTo(0));
} }
} }
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
SpatialStrategy.RECURSIVE.writeTo(out); SpatialStrategy.RECURSIVE.writeTo(out);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(1)); assertThat(in.readVInt(), equalTo(1));
} }
} }
@ -54,13 +54,13 @@ public class SpatialStrategyTests extends ESTestCase {
public void testReadFrom() throws Exception { public void testReadFrom() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(0); out.writeVInt(0);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(SpatialStrategy.readFromStream(in), equalTo(SpatialStrategy.TERM)); assertThat(SpatialStrategy.readFromStream(in), equalTo(SpatialStrategy.TERM));
} }
} }
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(1); out.writeVInt(1);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(SpatialStrategy.readFromStream(in), equalTo(SpatialStrategy.RECURSIVE)); assertThat(SpatialStrategy.readFromStream(in), equalTo(SpatialStrategy.RECURSIVE));
} }
} }
@ -69,7 +69,7 @@ public class SpatialStrategyTests extends ESTestCase {
public void testInvalidReadFrom() throws Exception { public void testInvalidReadFrom() throws Exception {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE)); out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE));
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
SpatialStrategy.readFromStream(in); SpatialStrategy.readFromStream(in);
fail("Expected IOException"); fail("Expected IOException");
} catch(IOException e) { } catch(IOException e) {

View File

@ -137,7 +137,7 @@ public abstract class AbstractShapeBuilderTestCase<SB extends ShapeBuilder> exte
static ShapeBuilder copyShape(ShapeBuilder original) throws IOException { static ShapeBuilder copyShape(ShapeBuilder original) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) { try (BytesStreamOutput output = new BytesStreamOutput()) {
original.writeTo(output); original.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) {
return namedWriteableRegistry.getReader(ShapeBuilder.class, original.getWriteableName()).read(in); return namedWriteableRegistry.getReader(ShapeBuilder.class, original.getWriteableName()).read(in);
} }
} }

View File

@ -84,7 +84,7 @@ public class StreamsTests extends ESTestCase {
byte stuff[] = new byte[] { 0, 1, 2, 3 }; byte stuff[] = new byte[] { 0, 1, 2, 3 };
BytesRef stuffRef = new BytesRef(stuff, 2, 2); BytesRef stuffRef = new BytesRef(stuff, 2, 2);
BytesArray stuffArray = new BytesArray(stuffRef); BytesArray stuffArray = new BytesArray(stuffRef);
StreamInput input = StreamInput.wrap(stuffArray); StreamInput input = stuffArray.streamInput();
assertEquals(2, input.read()); assertEquals(2, input.read());
assertEquals(3, input.read()); assertEquals(3, input.read());
assertEquals(-1, input.read()); assertEquals(-1, input.read());

View File

@ -60,7 +60,7 @@ public abstract class AbstractWriteableEnumTestCase extends ESTestCase {
protected static void assertWriteToStream(final Writeable writeableEnum, final int ordinal) throws IOException { protected static void assertWriteToStream(final Writeable writeableEnum, final int ordinal) throws IOException {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
writeableEnum.writeTo(out); writeableEnum.writeTo(out);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(in.readVInt(), equalTo(ordinal)); assertThat(in.readVInt(), equalTo(ordinal));
} }
} }
@ -70,7 +70,7 @@ public abstract class AbstractWriteableEnumTestCase extends ESTestCase {
protected void assertReadFromStream(final int ordinal, final Writeable expected) throws IOException { protected void assertReadFromStream(final int ordinal, final Writeable expected) throws IOException {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(ordinal); out.writeVInt(ordinal);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat(reader.read(in), equalTo(expected)); assertThat(reader.read(in), equalTo(expected));
} }
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.common.io.stream;
import org.apache.lucene.util.Constants; import org.apache.lucene.util.Constants;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
@ -48,7 +49,7 @@ public class BytesStreamsTests extends ESTestCase {
// test empty stream to array // test empty stream to array
assertEquals(0, out.size()); assertEquals(0, out.size());
assertEquals(0, out.bytes().toBytes().length); assertEquals(0, out.bytes().length());
out.close(); out.close();
} }
@ -63,7 +64,7 @@ public class BytesStreamsTests extends ESTestCase {
// write single byte // write single byte
out.writeByte(expectedData[0]); out.writeByte(expectedData[0]);
assertEquals(expectedSize, out.size()); assertEquals(expectedSize, out.size());
assertArrayEquals(expectedData, out.bytes().toBytes()); assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes()));
out.close(); out.close();
} }
@ -80,7 +81,7 @@ public class BytesStreamsTests extends ESTestCase {
} }
assertEquals(expectedSize, out.size()); assertEquals(expectedSize, out.size());
assertArrayEquals(expectedData, out.bytes().toBytes()); assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes()));
out.close(); out.close();
} }
@ -108,14 +109,14 @@ public class BytesStreamsTests extends ESTestCase {
byte[] expectedData = randomizedByteArrayWithSize(expectedSize); byte[] expectedData = randomizedByteArrayWithSize(expectedSize);
out.writeBytes(expectedData); out.writeBytes(expectedData);
assertEquals(expectedSize, out.size()); assertEquals(expectedSize, out.size());
assertArrayEquals(expectedData, out.bytes().toBytes()); assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes()));
// bulk-write again with actual bytes // bulk-write again with actual bytes
expectedSize = 10; expectedSize = 10;
expectedData = randomizedByteArrayWithSize(expectedSize); expectedData = randomizedByteArrayWithSize(expectedSize);
out.writeBytes(expectedData); out.writeBytes(expectedData);
assertEquals(expectedSize, out.size()); assertEquals(expectedSize, out.size());
assertArrayEquals(expectedData, out.bytes().toBytes()); assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes()));
out.close(); out.close();
} }
@ -130,7 +131,7 @@ public class BytesStreamsTests extends ESTestCase {
out.writeBytes(expectedData); out.writeBytes(expectedData);
assertEquals(expectedSize, out.size()); assertEquals(expectedSize, out.size());
assertArrayEquals(expectedData, out.bytes().toBytes()); assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes()));
out.close(); out.close();
} }
@ -149,7 +150,7 @@ public class BytesStreamsTests extends ESTestCase {
// now write the rest - more than fits into the remaining first page // now write the rest - more than fits into the remaining first page
out.writeBytes(expectedData, initialOffset, additionalLength); out.writeBytes(expectedData, initialOffset, additionalLength);
assertEquals(expectedData.length, out.size()); assertEquals(expectedData.length, out.size());
assertArrayEquals(expectedData, out.bytes().toBytes()); assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes()));
out.close(); out.close();
} }
@ -168,7 +169,7 @@ public class BytesStreamsTests extends ESTestCase {
// ie. we cross over into a third // ie. we cross over into a third
out.writeBytes(expectedData, initialOffset, additionalLength); out.writeBytes(expectedData, initialOffset, additionalLength);
assertEquals(expectedData.length, out.size()); assertEquals(expectedData.length, out.size());
assertArrayEquals(expectedData, out.bytes().toBytes()); assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes()));
out.close(); out.close();
} }
@ -185,7 +186,7 @@ public class BytesStreamsTests extends ESTestCase {
} }
assertEquals(expectedSize, out.size()); assertEquals(expectedSize, out.size());
assertArrayEquals(expectedData, out.bytes().toBytes()); assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes()));
out.close(); out.close();
} }
@ -202,7 +203,7 @@ public class BytesStreamsTests extends ESTestCase {
} }
assertEquals(expectedSize, out.size()); assertEquals(expectedSize, out.size());
assertArrayEquals(expectedData, out.bytes().toBytes()); assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes()));
out.close(); out.close();
} }
@ -219,7 +220,7 @@ public class BytesStreamsTests extends ESTestCase {
} }
assertEquals(expectedSize, out.size()); assertEquals(expectedSize, out.size());
assertArrayEquals(expectedData, out.bytes().toBytes()); assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes()));
out.close(); out.close();
} }
@ -235,7 +236,7 @@ public class BytesStreamsTests extends ESTestCase {
out.seek(position += BigArrays.BYTE_PAGE_SIZE + 10); out.seek(position += BigArrays.BYTE_PAGE_SIZE + 10);
out.seek(position += BigArrays.BYTE_PAGE_SIZE * 2); out.seek(position += BigArrays.BYTE_PAGE_SIZE * 2);
assertEquals(position, out.position()); assertEquals(position, out.position());
assertEquals(position, out.bytes().toBytes().length); assertEquals(position, BytesReference.toBytes(out.bytes()).length);
out.close(); out.close();
} }
@ -288,8 +289,8 @@ public class BytesStreamsTests extends ESTestCase {
out.writeTimeZone(DateTimeZone.forID("CET")); out.writeTimeZone(DateTimeZone.forID("CET"));
out.writeOptionalTimeZone(DateTimeZone.getDefault()); out.writeOptionalTimeZone(DateTimeZone.getDefault());
out.writeOptionalTimeZone(null); out.writeOptionalTimeZone(null);
final byte[] bytes = out.bytes().toBytes(); final byte[] bytes = BytesReference.toBytes(out.bytes());
StreamInput in = StreamInput.wrap(out.bytes().toBytes()); StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
assertEquals(in.available(), bytes.length); assertEquals(in.available(), bytes.length);
assertThat(in.readBoolean(), equalTo(false)); assertThat(in.readBoolean(), equalTo(false));
assertThat(in.readByte(), equalTo((byte)1)); assertThat(in.readByte(), equalTo((byte)1));
@ -328,7 +329,7 @@ public class BytesStreamsTests extends ESTestCase {
namedWriteableRegistry.register(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new); namedWriteableRegistry.register(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new);
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
out.writeNamedWriteable(namedWriteableIn); out.writeNamedWriteable(namedWriteableIn);
byte[] bytes = out.bytes().toBytes(); byte[] bytes = BytesReference.toBytes(out.bytes());
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry); StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
assertEquals(in.available(), bytes.length); assertEquals(in.available(), bytes.length);
BaseNamedWriteable namedWriteableOut = in.readNamedWriteable(BaseNamedWriteable.class); BaseNamedWriteable namedWriteableOut = in.readNamedWriteable(BaseNamedWriteable.class);
@ -348,7 +349,7 @@ public class BytesStreamsTests extends ESTestCase {
public void testNamedWriteableUnknownCategory() throws IOException { public void testNamedWriteableUnknownCategory() throws IOException {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
out.writeNamedWriteable(new TestNamedWriteable("test1", "test2")); out.writeNamedWriteable(new TestNamedWriteable("test1", "test2"));
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytes()), new NamedWriteableRegistry()); StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), new NamedWriteableRegistry());
//no named writeable registered with given name, can write but cannot read it back //no named writeable registered with given name, can write but cannot read it back
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
assertThat(e.getMessage(), equalTo("unknown named writeable category [" + BaseNamedWriteable.class.getName() + "]")); assertThat(e.getMessage(), equalTo("unknown named writeable category [" + BaseNamedWriteable.class.getName() + "]"));
@ -368,7 +369,7 @@ public class BytesStreamsTests extends ESTestCase {
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
} }
}); });
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytes()), namedWriteableRegistry); StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(BytesReference.toBytes(out.bytes())), namedWriteableRegistry);
try { try {
//no named writeable registered with given name under test category, can write but cannot read it back //no named writeable registered with given name under test category, can write but cannot read it back
in.readNamedWriteable(BaseNamedWriteable.class); in.readNamedWriteable(BaseNamedWriteable.class);
@ -382,7 +383,7 @@ public class BytesStreamsTests extends ESTestCase {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2"); TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2");
out.writeNamedWriteable(testNamedWriteable); out.writeNamedWriteable(testNamedWriteable);
StreamInput in = StreamInput.wrap(out.bytes().toBytes()); StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
try { try {
in.readNamedWriteable(BaseNamedWriteable.class); in.readNamedWriteable(BaseNamedWriteable.class);
fail("Expected UnsupportedOperationException"); fail("Expected UnsupportedOperationException");
@ -397,7 +398,7 @@ public class BytesStreamsTests extends ESTestCase {
namedWriteableRegistry.register(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null); namedWriteableRegistry.register(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null);
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
out.writeNamedWriteable(namedWriteableIn); out.writeNamedWriteable(namedWriteableIn);
byte[] bytes = out.bytes().toBytes(); byte[] bytes = BytesReference.toBytes(out.bytes());
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry); StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
assertEquals(in.available(), bytes.length); assertEquals(in.available(), bytes.length);
IOException e = expectThrows(IOException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class)); IOException e = expectThrows(IOException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
@ -407,7 +408,7 @@ public class BytesStreamsTests extends ESTestCase {
public void testOptionalWriteableReaderReturnsNull() throws IOException { public void testOptionalWriteableReaderReturnsNull() throws IOException {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
out.writeOptionalWriteable(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); out.writeOptionalWriteable(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)));
StreamInput in = StreamInput.wrap(out.bytes().toBytes()); StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
IOException e = expectThrows(IOException.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null)); IOException e = expectThrows(IOException.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null));
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream.")); assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
} }
@ -423,7 +424,7 @@ public class BytesStreamsTests extends ESTestCase {
}); });
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
out.writeNamedWriteable(namedWriteableIn); out.writeNamedWriteable(namedWriteableIn);
byte[] bytes = out.bytes().toBytes(); byte[] bytes = BytesReference.toBytes(out.bytes());
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry); StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
assertEquals(in.available(), bytes.length); assertEquals(in.available(), bytes.length);
AssertionError e = expectThrows(AssertionError.class, () -> in.readNamedWriteable(BaseNamedWriteable.class)); AssertionError e = expectThrows(AssertionError.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
@ -442,7 +443,7 @@ public class BytesStreamsTests extends ESTestCase {
final BytesStreamOutput out = new BytesStreamOutput(); final BytesStreamOutput out = new BytesStreamOutput();
out.writeStreamableList(expected); out.writeStreamableList(expected);
final StreamInput in = StreamInput.wrap(out.bytes().toBytes()); final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
List<TestStreamable> loaded = in.readStreamableList(TestStreamable::new); List<TestStreamable> loaded = in.readStreamableList(TestStreamable::new);
@ -537,7 +538,7 @@ public class BytesStreamsTests extends ESTestCase {
// toByteArray() must fail // toByteArray() must fail
try { try {
out.bytes().toBytes(); BytesReference.toBytes(out.bytes());
fail("expected IllegalStateException: stream closed"); fail("expected IllegalStateException: stream closed");
} }
catch (IllegalStateException iex1) { catch (IllegalStateException iex1) {
@ -558,7 +559,7 @@ public class BytesStreamsTests extends ESTestCase {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble()); GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
out.writeGenericValue(geoPoint); out.writeGenericValue(geoPoint);
StreamInput wrap = StreamInput.wrap(out.bytes()); StreamInput wrap = out.bytes().streamInput();
GeoPoint point = (GeoPoint) wrap.readGenericValue(); GeoPoint point = (GeoPoint) wrap.readGenericValue();
assertEquals(point, geoPoint); assertEquals(point, geoPoint);
} }
@ -566,7 +567,7 @@ public class BytesStreamsTests extends ESTestCase {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble()); GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
out.writeGeoPoint(geoPoint); out.writeGeoPoint(geoPoint);
StreamInput wrap = StreamInput.wrap(out.bytes()); StreamInput wrap = out.bytes().streamInput();
GeoPoint point = wrap.readGeoPoint(); GeoPoint point = wrap.readGeoPoint();
assertEquals(point, geoPoint); assertEquals(point, geoPoint);
} }

View File

@ -19,13 +19,13 @@
package org.elasticsearch.common.io.stream; package org.elasticsearch.common.io.stream;
import org.elasticsearch.common.bytes.ByteBufferBytesReference; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -61,8 +61,8 @@ public class StreamTests extends ESTestCase {
for (Tuple<Long, byte[]> value : values) { for (Tuple<Long, byte[]> value : values) {
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
out.writeZLong(value.v1()); out.writeZLong(value.v1());
assertArrayEquals(Long.toString(value.v1()), value.v2(), out.bytes().toBytes()); assertArrayEquals(Long.toString(value.v1()), value.v2(), BytesReference.toBytes(out.bytes()));
ByteBufferBytesReference bytes = new ByteBufferBytesReference(ByteBuffer.wrap(value.v2())); BytesReference bytes = new BytesArray(value.v2());
assertEquals(Arrays.toString(value.v2()), (long)value.v1(), bytes.streamInput().readZLong()); assertEquals(Arrays.toString(value.v2()), (long)value.v1(), bytes.streamInput().readZLong());
} }
} }

View File

@ -51,7 +51,7 @@ public class BoundTransportAddressTests extends ESTestCase {
// serialize // serialize
BytesStreamOutput streamOutput = new BytesStreamOutput(); BytesStreamOutput streamOutput = new BytesStreamOutput();
transportAddress.writeTo(streamOutput); transportAddress.writeTo(streamOutput);
StreamInput in = ByteBufferStreamInput.wrap(streamOutput.bytes()); StreamInput in = streamOutput.bytes().streamInput();
BoundTransportAddress serializedAddress; BoundTransportAddress serializedAddress;
if (randomBoolean()) { if (randomBoolean()) {

View File

@ -82,7 +82,7 @@ public class DistanceUnitTests extends ESTestCase {
for (DistanceUnit unit : DistanceUnit.values()) { for (DistanceUnit unit : DistanceUnit.values()) {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
unit.writeTo(out); unit.writeTo(out);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = out.bytes().streamInput()) {
assertThat("Roundtrip serialisation failed.", DistanceUnit.readFromStream(in), equalTo(unit)); assertThat("Roundtrip serialisation failed.", DistanceUnit.readFromStream(in), equalTo(unit));
} }
} }

View File

@ -145,7 +145,7 @@ public class FuzzinessTests extends ESTestCase {
private static Fuzziness doSerializeRoundtrip(Fuzziness in) throws IOException { private static Fuzziness doSerializeRoundtrip(Fuzziness in) throws IOException {
BytesStreamOutput output = new BytesStreamOutput(); BytesStreamOutput output = new BytesStreamOutput();
in.writeTo(output); in.writeTo(output);
StreamInput streamInput = StreamInput.wrap(output.bytes()); StreamInput streamInput = output.bytes().streamInput();
return new Fuzziness(streamInput); return new Fuzziness(streamInput);
} }
} }

View File

@ -161,7 +161,7 @@ public class TimeValueTests extends ESTestCase {
value.writeTo(out); value.writeTo(out);
assertEquals(expectedSize, out.size()); assertEquals(expectedSize, out.size());
StreamInput in = StreamInput.wrap(out.bytes()); StreamInput in = out.bytes().streamInput();
TimeValue inValue = new TimeValue(in); TimeValue inValue = new TimeValue(in);
assertThat(inValue, equalTo(value)); assertThat(inValue, equalTo(value));

View File

@ -154,7 +154,7 @@ public class ThreadContextTests extends ESTestCase {
assertNull(threadContext.getTransient("ctx.foo")); assertNull(threadContext.getTransient("ctx.foo"));
assertEquals("1", threadContext.getHeader("default")); assertEquals("1", threadContext.getHeader("default"));
threadContext.readHeaders(StreamInput.wrap(out.bytes())); threadContext.readHeaders(out.bytes().streamInput());
assertEquals("bar", threadContext.getHeader("foo")); assertEquals("bar", threadContext.getHeader("foo"));
assertNull(threadContext.getTransient("ctx.foo")); assertNull(threadContext.getTransient("ctx.foo"));
} }
@ -179,7 +179,7 @@ public class ThreadContextTests extends ESTestCase {
{ {
Settings otherSettings = Settings.builder().put("request.headers.default", "5").build(); Settings otherSettings = Settings.builder().put("request.headers.default", "5").build();
ThreadContext otherhreadContext = new ThreadContext(otherSettings); ThreadContext otherhreadContext = new ThreadContext(otherSettings);
otherhreadContext.readHeaders(StreamInput.wrap(out.bytes())); otherhreadContext.readHeaders(out.bytes().streamInput());
assertEquals("bar", otherhreadContext.getHeader("foo")); assertEquals("bar", otherhreadContext.getHeader("foo"));
assertNull(otherhreadContext.getTransient("ctx.foo")); assertNull(otherhreadContext.getTransient("ctx.foo"));
@ -202,7 +202,7 @@ public class ThreadContextTests extends ESTestCase {
{ {
Settings otherSettings = Settings.builder().put("request.headers.default", "5").build(); Settings otherSettings = Settings.builder().put("request.headers.default", "5").build();
ThreadContext otherhreadContext = new ThreadContext(otherSettings); ThreadContext otherhreadContext = new ThreadContext(otherSettings);
otherhreadContext.readHeaders(StreamInput.wrap(out.bytes())); otherhreadContext.readHeaders(out.bytes().streamInput());
assertEquals("bar", otherhreadContext.getHeader("foo")); assertEquals("bar", otherhreadContext.getHeader("foo"));
assertNull(otherhreadContext.getTransient("ctx.foo")); assertNull(otherhreadContext.getTransient("ctx.foo"));

View File

@ -57,11 +57,10 @@ public class XContentFactoryTests extends ESTestCase {
builder.endObject(); builder.endObject();
assertThat(XContentFactory.xContentType(builder.bytes()), equalTo(type)); assertThat(XContentFactory.xContentType(builder.bytes()), equalTo(type));
BytesArray bytesArray = builder.bytes().toBytesArray(); assertThat(XContentFactory.xContentType(builder.bytes().streamInput()), equalTo(type));
assertThat(XContentFactory.xContentType(StreamInput.wrap(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length())), equalTo(type));
// CBOR is binary, cannot use String // CBOR is binary, cannot use String
if (type != XContentType.CBOR) { if (type != XContentType.CBOR && type != XContentType.SMILE) {
assertThat(XContentFactory.xContentType(builder.string()), equalTo(type)); assertThat(XContentFactory.xContentType(builder.string()), equalTo(type));
} }
} }

View File

@ -94,7 +94,7 @@ public class XContentBuilderTests extends ESTestCase {
xContentBuilder.startObject(); xContentBuilder.startObject();
xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}"));
xContentBuilder.endObject(); xContentBuilder.endObject();
assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"foo\":{\"test\":\"value\"}}")); assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"foo\":{\"test\":\"value\"}}"));
} }
{ {
XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
@ -102,7 +102,7 @@ public class XContentBuilderTests extends ESTestCase {
xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}"));
xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}"));
xContentBuilder.endObject(); xContentBuilder.endObject();
assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}")); assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}"));
} }
{ {
XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
@ -110,7 +110,7 @@ public class XContentBuilderTests extends ESTestCase {
xContentBuilder.field("test", "value"); xContentBuilder.field("test", "value");
xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}"));
xContentBuilder.endObject(); xContentBuilder.endObject();
assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}")); assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}"));
} }
{ {
XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
@ -119,7 +119,7 @@ public class XContentBuilderTests extends ESTestCase {
xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}"));
xContentBuilder.field("test1", "value1"); xContentBuilder.field("test1", "value1");
xContentBuilder.endObject(); xContentBuilder.endObject();
assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}")); assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}"));
} }
{ {
XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
@ -129,7 +129,7 @@ public class XContentBuilderTests extends ESTestCase {
xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}"));
xContentBuilder.field("test1", "value1"); xContentBuilder.field("test1", "value1");
xContentBuilder.endObject(); xContentBuilder.endObject();
assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}")); assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}"));
} }
} }
@ -161,15 +161,14 @@ public class XContentBuilderTests extends ESTestCase {
gen.writeEndObject(); gen.writeEndObject();
gen.close(); gen.close();
byte[] data = bos.bytes().toBytes(); String sData = bos.bytes().utf8ToString();
String sData = new String(data, "UTF8");
assertThat(sData, equalTo("{\"name\":\"something\", source : { test : \"value\" },\"name2\":\"something2\"}")); assertThat(sData, equalTo("{\"name\":\"something\", source : { test : \"value\" },\"name2\":\"something2\"}"));
} }
public void testByteConversion() throws Exception { public void testByteConversion() throws Exception {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject().field("test_name", (Byte)(byte)120).endObject(); builder.startObject().field("test_name", (Byte)(byte)120).endObject();
assertThat(builder.bytes().toUtf8(), equalTo("{\"test_name\":120}")); assertThat(builder.bytes().utf8ToString(), equalTo("{\"test_name\":120}"));
} }
public void testDateTypesConversion() throws Exception { public void testDateTypesConversion() throws Exception {

View File

@ -63,7 +63,8 @@ public class JsonVsCborTests extends ESTestCase {
xsonGen.close(); xsonGen.close();
jsonGen.close(); jsonGen.close();
verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes().toBytes()), XContentFactory.xContent(XContentType.CBOR).createParser(xsonOs.bytes().toBytes())); verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes()),
XContentFactory.xContent(XContentType.CBOR).createParser(xsonOs.bytes()));
} }
private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException {

View File

@ -63,7 +63,8 @@ public class JsonVsSmileTests extends ESTestCase {
xsonGen.close(); xsonGen.close();
jsonGen.close(); jsonGen.close();
verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes().toBytes()), XContentFactory.xContent(XContentType.SMILE).createParser(xsonOs.bytes().toBytes())); verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes()),
XContentFactory.xContent(XContentType.SMILE).createParser(xsonOs.bytes()));
} }
private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException {

Some files were not shown because too many files have changed in this diff Show More