mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-22 21:05:23 +00:00
Move SearchHit and SearchHits to Writeable (#37931)
This allowed to make SearchHits immutable, while quite a few fields in SearchHit have to stay mutable unfortunately. Relates to #34389
This commit is contained in:
parent
ba285a56a7
commit
b91d587275
@ -49,8 +49,7 @@ public class RatedSearchHit implements Writeable, ToXContentObject {
|
||||
}
|
||||
|
||||
RatedSearchHit(StreamInput in) throws IOException {
|
||||
this(SearchHit.readSearchHit(in),
|
||||
in.readBoolean() == true ? OptionalInt.of(in.readVInt()) : OptionalInt.empty());
|
||||
this(new SearchHit(in), in.readBoolean() == true ? OptionalInt.of(in.readVInt()) : OptionalInt.empty());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
@ -80,17 +79,17 @@ import static org.elasticsearch.search.fetch.subphase.highlight.HighlightField.r
|
||||
*
|
||||
* @see SearchHits
|
||||
*/
|
||||
public final class SearchHit implements Streamable, ToXContentObject, Iterable<DocumentField> {
|
||||
public final class SearchHit implements Writeable, ToXContentObject, Iterable<DocumentField> {
|
||||
|
||||
private transient int docId;
|
||||
private final transient int docId;
|
||||
|
||||
private static final float DEFAULT_SCORE = Float.NaN;
|
||||
private float score = DEFAULT_SCORE;
|
||||
|
||||
private Text id;
|
||||
private Text type;
|
||||
private final Text id;
|
||||
private final Text type;
|
||||
|
||||
private NestedIdentity nestedIdentity;
|
||||
private final NestedIdentity nestedIdentity;
|
||||
|
||||
private long version = -1;
|
||||
private long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
@ -98,7 +97,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<D
|
||||
|
||||
private BytesReference source;
|
||||
|
||||
private Map<String, DocumentField> fields = emptyMap();
|
||||
private Map<String, DocumentField> fields;
|
||||
|
||||
private Map<String, HighlightField> highlightFields = null;
|
||||
|
||||
@ -121,10 +120,6 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<D
|
||||
|
||||
private Map<String, SearchHits> innerHits;
|
||||
|
||||
SearchHit() {
|
||||
|
||||
}
|
||||
|
||||
//used only in tests
|
||||
public SearchHit(int docId) {
|
||||
this(docId, null, null, null);
|
||||
@ -146,6 +141,134 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<D
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
public SearchHit(StreamInput in) throws IOException {
|
||||
docId = -1;
|
||||
score = in.readFloat();
|
||||
id = in.readOptionalText();
|
||||
type = in.readOptionalText();
|
||||
nestedIdentity = in.readOptionalWriteable(NestedIdentity::new);
|
||||
version = in.readLong();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_7_0)) {
|
||||
seqNo = in.readZLong();
|
||||
primaryTerm = in.readVLong();
|
||||
}
|
||||
source = in.readBytesReference();
|
||||
if (source.length() == 0) {
|
||||
source = null;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
explanation = readExplanation(in);
|
||||
}
|
||||
int size = in.readVInt();
|
||||
if (size == 0) {
|
||||
fields = emptyMap();
|
||||
} else if (size == 1) {
|
||||
DocumentField hitField = DocumentField.readDocumentField(in);
|
||||
fields = singletonMap(hitField.getName(), hitField);
|
||||
} else {
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
DocumentField hitField = DocumentField.readDocumentField(in);
|
||||
fields.put(hitField.getName(), hitField);
|
||||
}
|
||||
this.fields = unmodifiableMap(fields);
|
||||
}
|
||||
|
||||
size = in.readVInt();
|
||||
if (size == 0) {
|
||||
highlightFields = emptyMap();
|
||||
} else if (size == 1) {
|
||||
HighlightField field = readHighlightField(in);
|
||||
highlightFields = singletonMap(field.name(), field);
|
||||
} else {
|
||||
Map<String, HighlightField> highlightFields = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
HighlightField field = readHighlightField(in);
|
||||
highlightFields.put(field.name(), field);
|
||||
}
|
||||
this.highlightFields = unmodifiableMap(highlightFields);
|
||||
}
|
||||
|
||||
sortValues = new SearchSortValues(in);
|
||||
|
||||
size = in.readVInt();
|
||||
if (size > 0) {
|
||||
matchedQueries = new String[size];
|
||||
for (int i = 0; i < size; i++) {
|
||||
matchedQueries[i] = in.readString();
|
||||
}
|
||||
}
|
||||
// we call the setter here because that also sets the local index parameter
|
||||
shard(in.readOptionalWriteable(SearchShardTarget::new));
|
||||
size = in.readVInt();
|
||||
if (size > 0) {
|
||||
innerHits = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
String key = in.readString();
|
||||
SearchHits value = new SearchHits(in);
|
||||
innerHits.put(key, value);
|
||||
}
|
||||
} else {
|
||||
innerHits = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeFloat(score);
|
||||
out.writeOptionalText(id);
|
||||
out.writeOptionalText(type);
|
||||
out.writeOptionalWriteable(nestedIdentity);
|
||||
out.writeLong(version);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_7_0)) {
|
||||
out.writeZLong(seqNo);
|
||||
out.writeVLong(primaryTerm);
|
||||
}
|
||||
out.writeBytesReference(source);
|
||||
if (explanation == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
writeExplanation(out, explanation);
|
||||
}
|
||||
if (fields == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(fields.size());
|
||||
for (DocumentField hitField : getFields().values()) {
|
||||
hitField.writeTo(out);
|
||||
}
|
||||
}
|
||||
if (highlightFields == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(highlightFields.size());
|
||||
for (HighlightField highlightField : highlightFields.values()) {
|
||||
highlightField.writeTo(out);
|
||||
}
|
||||
}
|
||||
sortValues.writeTo(out);
|
||||
|
||||
if (matchedQueries.length == 0) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(matchedQueries.length);
|
||||
for (String matchedFilter : matchedQueries) {
|
||||
out.writeString(matchedFilter);
|
||||
}
|
||||
}
|
||||
out.writeOptionalWriteable(shard);
|
||||
if (innerHits == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(innerHits.size());
|
||||
for (Map.Entry<String, SearchHits> entry : innerHits.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public int docId() {
|
||||
return this.docId;
|
||||
}
|
||||
@ -771,140 +894,6 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<D
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public static SearchHit readSearchHit(StreamInput in) throws IOException {
|
||||
SearchHit hit = new SearchHit();
|
||||
hit.readFrom(in);
|
||||
return hit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
score = in.readFloat();
|
||||
id = in.readOptionalText();
|
||||
type = in.readOptionalText();
|
||||
nestedIdentity = in.readOptionalWriteable(NestedIdentity::new);
|
||||
version = in.readLong();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_7_0)) {
|
||||
seqNo = in.readZLong();
|
||||
primaryTerm = in.readVLong();
|
||||
}
|
||||
source = in.readBytesReference();
|
||||
if (source.length() == 0) {
|
||||
source = null;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
explanation = readExplanation(in);
|
||||
}
|
||||
int size = in.readVInt();
|
||||
if (size == 0) {
|
||||
fields = emptyMap();
|
||||
} else if (size == 1) {
|
||||
DocumentField hitField = DocumentField.readDocumentField(in);
|
||||
fields = singletonMap(hitField.getName(), hitField);
|
||||
} else {
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
DocumentField hitField = DocumentField.readDocumentField(in);
|
||||
fields.put(hitField.getName(), hitField);
|
||||
}
|
||||
this.fields = unmodifiableMap(fields);
|
||||
}
|
||||
|
||||
size = in.readVInt();
|
||||
if (size == 0) {
|
||||
highlightFields = emptyMap();
|
||||
} else if (size == 1) {
|
||||
HighlightField field = readHighlightField(in);
|
||||
highlightFields = singletonMap(field.name(), field);
|
||||
} else {
|
||||
Map<String, HighlightField> highlightFields = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
HighlightField field = readHighlightField(in);
|
||||
highlightFields.put(field.name(), field);
|
||||
}
|
||||
this.highlightFields = unmodifiableMap(highlightFields);
|
||||
}
|
||||
|
||||
sortValues = new SearchSortValues(in);
|
||||
|
||||
size = in.readVInt();
|
||||
if (size > 0) {
|
||||
matchedQueries = new String[size];
|
||||
for (int i = 0; i < size; i++) {
|
||||
matchedQueries[i] = in.readString();
|
||||
}
|
||||
}
|
||||
// we call the setter here because that also sets the local index parameter
|
||||
shard(in.readOptionalWriteable(SearchShardTarget::new));
|
||||
size = in.readVInt();
|
||||
if (size > 0) {
|
||||
innerHits = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
String key = in.readString();
|
||||
SearchHits value = SearchHits.readSearchHits(in);
|
||||
innerHits.put(key, value);
|
||||
}
|
||||
} else {
|
||||
innerHits = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeFloat(score);
|
||||
out.writeOptionalText(id);
|
||||
out.writeOptionalText(type);
|
||||
out.writeOptionalWriteable(nestedIdentity);
|
||||
out.writeLong(version);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_7_0)) {
|
||||
out.writeZLong(seqNo);
|
||||
out.writeVLong(primaryTerm);
|
||||
}
|
||||
out.writeBytesReference(source);
|
||||
if (explanation == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
writeExplanation(out, explanation);
|
||||
}
|
||||
if (fields == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(fields.size());
|
||||
for (DocumentField hitField : getFields().values()) {
|
||||
hitField.writeTo(out);
|
||||
}
|
||||
}
|
||||
if (highlightFields == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(highlightFields.size());
|
||||
for (HighlightField highlightField : highlightFields.values()) {
|
||||
highlightField.writeTo(out);
|
||||
}
|
||||
}
|
||||
sortValues.writeTo(out);
|
||||
|
||||
if (matchedQueries.length == 0) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(matchedQueries.length);
|
||||
for (String matchedFilter : matchedQueries) {
|
||||
out.writeString(matchedFilter);
|
||||
}
|
||||
}
|
||||
out.writeOptionalWriteable(shard);
|
||||
if (innerHits == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(innerHits.size());
|
||||
for (Map.Entry<String, SearchHits> entry : innerHits.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
|
@ -26,7 +26,6 @@ import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
@ -43,7 +42,7 @@ import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||
|
||||
public final class SearchHits implements Streamable, ToXContentFragment, Iterable<SearchHit> {
|
||||
public final class SearchHits implements Writeable, ToXContentFragment, Iterable<SearchHit> {
|
||||
public static SearchHits empty() {
|
||||
return empty(true);
|
||||
}
|
||||
@ -55,22 +54,15 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl
|
||||
|
||||
public static final SearchHit[] EMPTY = new SearchHit[0];
|
||||
|
||||
private SearchHit[] hits;
|
||||
|
||||
private Total totalHits;
|
||||
|
||||
private float maxScore;
|
||||
|
||||
private final SearchHit[] hits;
|
||||
private final Total totalHits;
|
||||
private final float maxScore;
|
||||
@Nullable
|
||||
private SortField[] sortFields;
|
||||
private final SortField[] sortFields;
|
||||
@Nullable
|
||||
private String collapseField;
|
||||
private final String collapseField;
|
||||
@Nullable
|
||||
private Object[] collapseValues;
|
||||
|
||||
SearchHits() {
|
||||
|
||||
}
|
||||
private final Object[] collapseValues;
|
||||
|
||||
public SearchHits(SearchHit[] hits, @Nullable TotalHits totalHits, float maxScore) {
|
||||
this(hits, totalHits, maxScore, null, null, null);
|
||||
@ -86,6 +78,55 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl
|
||||
this.collapseValues = collapseValues;
|
||||
}
|
||||
|
||||
public SearchHits(StreamInput in) throws IOException {
|
||||
if (in.readBoolean()) {
|
||||
totalHits = new Total(in);
|
||||
} else {
|
||||
// track_total_hits is false
|
||||
totalHits = null;
|
||||
}
|
||||
maxScore = in.readFloat();
|
||||
int size = in.readVInt();
|
||||
if (size == 0) {
|
||||
hits = EMPTY;
|
||||
} else {
|
||||
hits = new SearchHit[size];
|
||||
for (int i = 0; i < hits.length; i++) {
|
||||
hits[i] = new SearchHit(in);
|
||||
}
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
|
||||
sortFields = in.readOptionalArray(Lucene::readSortField, SortField[]::new);
|
||||
collapseField = in.readOptionalString();
|
||||
collapseValues = in.readOptionalArray(Lucene::readSortValue, Object[]::new);
|
||||
} else {
|
||||
sortFields = null;
|
||||
collapseField = null;
|
||||
collapseValues = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
final boolean hasTotalHits = totalHits != null;
|
||||
out.writeBoolean(hasTotalHits);
|
||||
if (hasTotalHits) {
|
||||
totalHits.writeTo(out);
|
||||
}
|
||||
out.writeFloat(maxScore);
|
||||
out.writeVInt(hits.length);
|
||||
if (hits.length > 0) {
|
||||
for (SearchHit hit : hits) {
|
||||
hit.writeTo(out);
|
||||
}
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
|
||||
out.writeOptionalArray(Lucene::writeSortField, sortFields);
|
||||
out.writeOptionalString(collapseField);
|
||||
out.writeOptionalArray(Lucene::writeSortValue, collapseValues);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The total number of hits for the query or null if the tracking of total hits
|
||||
* is disabled in the request.
|
||||
@ -222,58 +263,6 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl
|
||||
return new SearchHits(hits.toArray(new SearchHit[0]), totalHits, maxScore);
|
||||
}
|
||||
|
||||
public static SearchHits readSearchHits(StreamInput in) throws IOException {
|
||||
SearchHits hits = new SearchHits();
|
||||
hits.readFrom(in);
|
||||
return hits;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
if (in.readBoolean()) {
|
||||
totalHits = new Total(in);
|
||||
} else {
|
||||
// track_total_hits is false
|
||||
totalHits = null;
|
||||
}
|
||||
maxScore = in.readFloat();
|
||||
int size = in.readVInt();
|
||||
if (size == 0) {
|
||||
hits = EMPTY;
|
||||
} else {
|
||||
hits = new SearchHit[size];
|
||||
for (int i = 0; i < hits.length; i++) {
|
||||
hits[i] = SearchHit.readSearchHit(in);
|
||||
}
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
|
||||
sortFields = in.readOptionalArray(Lucene::readSortField, SortField[]::new);
|
||||
collapseField = in.readOptionalString();
|
||||
collapseValues = in.readOptionalArray(Lucene::readSortValue, Object[]::new);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
final boolean hasTotalHits = totalHits != null;
|
||||
out.writeBoolean(hasTotalHits);
|
||||
if (hasTotalHits) {
|
||||
totalHits.writeTo(out);
|
||||
}
|
||||
out.writeFloat(maxScore);
|
||||
out.writeVInt(hits.length);
|
||||
if (hits.length > 0) {
|
||||
for (SearchHit hit : hits) {
|
||||
hit.writeTo(out);
|
||||
}
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
|
||||
out.writeOptionalArray(Lucene::writeSortField, sortFields);
|
||||
out.writeOptionalString(collapseField);
|
||||
out.writeOptionalArray(Lucene::writeSortValue, collapseValues);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
|
@ -65,7 +65,7 @@ public class InternalTopHits extends InternalAggregation implements TopHits {
|
||||
from = in.readVInt();
|
||||
size = in.readVInt();
|
||||
topDocs = Lucene.readTopDocs(in);
|
||||
searchHits = SearchHits.readSearchHits(in);
|
||||
searchHits = new SearchHits(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -22,9 +22,9 @@ package org.elasticsearch.search.fetch;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.SearchPhaseResult;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.query.QuerySearchResult;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -92,7 +92,7 @@ public final class FetchSearchResult extends SearchPhaseResult {
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
requestId = in.readLong();
|
||||
hits = SearchHits.readSearchHits(in);
|
||||
hits = new SearchHits(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -51,7 +51,7 @@ public class InternalSearchResponse extends SearchResponseSections implements Wr
|
||||
|
||||
public InternalSearchResponse(StreamInput in) throws IOException {
|
||||
super(
|
||||
SearchHits.readSearchHits(in),
|
||||
new SearchHits(in),
|
||||
in.readBoolean() ? InternalAggregations.readAggregations(in) : null,
|
||||
in.readBoolean() ? new Suggest(in) : null,
|
||||
in.readBoolean(),
|
||||
|
@ -292,7 +292,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
|
||||
super(in);
|
||||
this.doc = Lucene.readScoreDoc(in);
|
||||
if (in.readBoolean()) {
|
||||
this.hit = SearchHit.readSearchHit(in);
|
||||
this.hit = new SearchHit(in);
|
||||
}
|
||||
int contextSize = in.readInt();
|
||||
this.contexts = new LinkedHashMap<>(contextSize);
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -39,7 +40,7 @@ import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.search.SearchHit.NestedIdentity;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightFieldTests;
|
||||
import org.elasticsearch.test.AbstractStreamableTestCase;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
import org.elasticsearch.test.RandomObjects;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
@ -59,7 +60,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class SearchHitTests extends AbstractStreamableTestCase<SearchHit> {
|
||||
public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
||||
public static SearchHit createTestItem(boolean withOptionalInnerHits, boolean withShardTarget) {
|
||||
return createTestItem(randomFrom(XContentType.values()), withOptionalInnerHits, withShardTarget);
|
||||
}
|
||||
@ -139,8 +140,8 @@ public class SearchHitTests extends AbstractStreamableTestCase<SearchHit> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SearchHit createBlankInstance() {
|
||||
return new SearchHit();
|
||||
protected Writeable.Reader<SearchHit> instanceReader() {
|
||||
return SearchHit::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -246,7 +247,7 @@ public class SearchHitTests extends AbstractStreamableTestCase<SearchHit> {
|
||||
SearchHits hits = new SearchHits(new SearchHit[]{hit1, hit2}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f);
|
||||
|
||||
Version version = VersionUtils.randomVersion(random());
|
||||
SearchHits results = copyStreamable(hits, getNamedWriteableRegistry(), SearchHits::new, version);
|
||||
SearchHits results = copyWriteable(hits, getNamedWriteableRegistry(), SearchHits::new, version);
|
||||
SearchShardTarget deserializedTarget = results.getAt(0).getShard();
|
||||
assertThat(deserializedTarget, equalTo(target));
|
||||
assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue());
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.action.OriginalIndices;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.LuceneTests;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
@ -37,7 +38,7 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -45,7 +46,7 @@ import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class SearchHitsTests extends AbstractStreamableXContentTestCase<SearchHits> {
|
||||
public class SearchHitsTests extends AbstractSerializingTestCase<SearchHits> {
|
||||
|
||||
public static SearchHits createTestItem(boolean withOptionalInnerHits, boolean withShardTarget) {
|
||||
return createTestItem(randomFrom(XContentType.values()), withOptionalInnerHits, withShardTarget);
|
||||
@ -171,8 +172,8 @@ public class SearchHitsTests extends AbstractStreamableXContentTestCase<SearchHi
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SearchHits createBlankInstance() {
|
||||
return new SearchHits();
|
||||
protected Writeable.Reader<SearchHits> instanceReader() {
|
||||
return SearchHits::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -274,8 +275,7 @@ public class SearchHitsTests extends AbstractStreamableXContentTestCase<SearchHi
|
||||
public void testReadFromPre6_6_0() throws IOException {
|
||||
try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode("AQC/gAAAAAA="))) {
|
||||
in.setVersion(VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_6_6_0)));
|
||||
SearchHits searchHits = new SearchHits();
|
||||
searchHits.readFrom(in);
|
||||
SearchHits searchHits = new SearchHits(in);
|
||||
assertEquals(0, searchHits.getHits().length);
|
||||
assertNotNull(searchHits.getTotalHits());
|
||||
assertEquals(0L, searchHits.getTotalHits().value);
|
||||
|
Loading…
x
Reference in New Issue
Block a user