Search hit refactoring (#41656) (#54584)

Refactor SearchHit to have separate document and meta fields.
This is a part of bigger refactoring of issue #24422 to remove
dependency on MapperService to check if a field is metafield.

Relates to PR: #38373
Relates to issue #24422

Co-authored-by: sandmannn <bohdanpukalskyi@gmail.com>
This commit is contained in:
Mayya Sharipova 2020-04-01 15:19:00 -04:00 committed by GitHub
parent 2cd35bf696
commit bf4857d9e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 231 additions and 124 deletions

View File

@ -43,7 +43,7 @@ public class EqlSearchResponseTests extends AbstractResponseTestCase<org.elastic
if (randomBoolean()) { if (randomBoolean()) {
hits = new ArrayList<>(); hits = new ArrayList<>();
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
hits.add(new SearchHit(i, randomAlphaOfLength(10), null, new HashMap<>())); hits.add(new SearchHit(i, randomAlphaOfLength(10), null, new HashMap<>(), new HashMap<>()));
} }
} }
if (randomBoolean()) { if (randomBoolean()) {

View File

@ -165,7 +165,7 @@ public class SearchTemplateResponseTests extends AbstractXContentTestCase<Search
} }
public void testSearchResponseToXContent() throws IOException { public void testSearchResponseToXContent() throws IOException {
SearchHit hit = new SearchHit(1, "id", new Text("type"), Collections.emptyMap()); SearchHit hit = new SearchHit(1, "id", new Text("type"), Collections.emptyMap(), Collections.emptyMap());
hit.score(2.0f); hit.score(2.0f);
SearchHit[] hits = new SearchHit[] { hit }; SearchHit[] hits = new SearchHit[] { hit };

View File

@ -102,7 +102,7 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
shardContext.lookup().source().setSegmentAndDocument(percolatorLeafReaderContext, slot); shardContext.lookup().source().setSegmentAndDocument(percolatorLeafReaderContext, slot);
shardContext.lookup().source().setSource(document); shardContext.lookup().source().setSource(document);
hitContext.reset( hitContext.reset(
new SearchHit(slot, "unknown", new Text(hit.getType()), Collections.emptyMap()), new SearchHit(slot, "unknown", new Text(hit.getType()), Collections.emptyMap(), Collections.emptyMap()),
percolatorLeafReaderContext, slot, percolatorIndexSearcher percolatorLeafReaderContext, slot, percolatorIndexSearcher
); );
hitContext.cache().clear(); hitContext.cache().clear();

View File

@ -110,7 +110,7 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {
hit.fields(fields); hit.fields(fields);
} }
IntStream slots = convertTopDocsToSlots(topDocs, rootDocsBySlot); IntStream slots = convertTopDocsToSlots(topDocs, rootDocsBySlot);
fields.put(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList()))); hit.setField(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList())));
} }
} }
} }

View File

@ -74,7 +74,8 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
SearchHit[] hits = new SearchHit[6]; SearchHit[] hits = new SearchHit[6];
for (int i = 0; i < 6; i++) { for (int i = 0; i < 6; i++) {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME),
Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
} }
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
@ -124,7 +125,8 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
} }
} }
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME),
Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
} }
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
@ -181,7 +183,8 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
// only create four hits // only create four hits
SearchHit[] hits = new SearchHit[4]; SearchHit[] hits = new SearchHit[4];
for (int i = 0; i < 4; i++) { for (int i = 0; i < 4; i++) {
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME),
Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
} }
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();

View File

@ -117,7 +117,8 @@ public class ExpectedReciprocalRankTests extends ESTestCase {
if (relevanceRatings[i] != null) { if (relevanceRatings[i] != null) {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
} }
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME),
Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
} }
return hits; return hits;

View File

@ -204,7 +204,7 @@ public class MeanReciprocalRankTests extends ESTestCase {
private static SearchHit[] createSearchHits(int from, int to, String index) { private static SearchHit[] createSearchHits(int from, int to, String index) {
SearchHit[] hits = new SearchHit[to + 1 - from]; SearchHit[] hits = new SearchHit[to + 1 - from];
for (int i = from; i <= to; i++) { for (int i = from; i <= to; i++) {
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap()); hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
} }
return hits; return hits;

View File

@ -114,7 +114,7 @@ public class PrecisionAtKTests extends ESTestCase {
rated.add(createRatedDoc("test", "1", RELEVANT_RATING)); rated.add(createRatedDoc("test", "1", RELEVANT_RATING));
// add an unlabeled search hit // add an unlabeled search hit
SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3); SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3);
searchHits[2] = new SearchHit(2, "2", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); searchHits[2] = new SearchHit(2, "2", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap());
searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated);
@ -133,7 +133,8 @@ public class PrecisionAtKTests extends ESTestCase {
public void testNoRatedDocs() throws Exception { public void testNoRatedDocs() throws Exception {
SearchHit[] hits = new SearchHit[5]; SearchHit[] hits = new SearchHit[5];
for (int i = 0; i < 5; i++) { for (int i = 0; i < 5; i++) {
hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME),
Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
} }
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList());
@ -255,7 +256,7 @@ public class PrecisionAtKTests extends ESTestCase {
private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) { private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) {
SearchHit[] hits = new SearchHit[rated.size()]; SearchHit[] hits = new SearchHit[rated.size()];
for (int i = 0; i < rated.size(); i++) { for (int i = 0; i < rated.size(); i++) {
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap()); hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
} }
return hits; return hits;

View File

@ -181,7 +181,8 @@ public class RankEvalResponseTests extends ESTestCase {
} }
private static RatedSearchHit searchHit(String index, int docId, Integer rating) { private static RatedSearchHit searchHit(String index, int docId, Integer rating) {
SearchHit hit = new SearchHit(docId, docId + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); SearchHit hit = new SearchHit(docId, docId + "", new Text(MapperService.SINGLE_MAPPING_NAME),
Collections.emptyMap(), Collections.emptyMap());
hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
hit.score(1.0f); hit.score(1.0f);
return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty()); return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty());

View File

@ -42,7 +42,7 @@ public class RatedSearchHitTests extends ESTestCase {
OptionalInt rating = randomBoolean() ? OptionalInt.empty() OptionalInt rating = randomBoolean() ? OptionalInt.empty()
: OptionalInt.of(randomIntBetween(0, 5)); : OptionalInt.of(randomIntBetween(0, 5));
SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10), SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10),
new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap());
RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating); RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating);
return ratedSearchHit; return ratedSearchHit;
} }
@ -56,7 +56,7 @@ public class RatedSearchHitTests extends ESTestCase {
break; break;
case 1: case 1:
hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10),
new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap());
break; break;
default: default:
throw new IllegalStateException("The test should only allow two parameters mutated"); throw new IllegalStateException("The test should only allow two parameters mutated");

View File

@ -115,7 +115,7 @@ public class RecallAtKTests extends ESTestCase {
int k = 5; int k = 5;
SearchHit[] hits = new SearchHit[k]; SearchHit[] hits = new SearchHit[k];
for (int i = 0; i < k; i++) { for (int i = 0; i < k; i++) {
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap()); hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
} }
@ -237,7 +237,7 @@ public class RecallAtKTests extends ESTestCase {
private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) { private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) {
SearchHit[] hits = new SearchHit[rated.size()]; SearchHit[] hits = new SearchHit[rated.size()];
for (int i = 0; i < rated.size(); i++) { for (int i = 0; i < rated.size(); i++) {
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap()); hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
} }
return hits; return hits;

View File

@ -504,7 +504,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
action.start(); action.start();
// create a simulated response. // create a simulated response.
SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap()).sourceRef(new BytesArray("{}")); SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
SearchHits hits = new SearchHits(IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new), SearchHits hits = new SearchHits(IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new),
new TotalHits(0, TotalHits.Relation.EQUAL_TO),0); new TotalHits(0, TotalHits.Relation.EQUAL_TO),0);
InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1);

View File

@ -160,7 +160,7 @@ public class ClientScrollableHitSourceTests extends ESTestCase {
private SearchResponse createSearchResponse() { private SearchResponse createSearchResponse() {
// create a simulated response. // create a simulated response.
SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap()).sourceRef(new BytesArray("{}")); SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
SearchHits hits = new SearchHits(IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new), SearchHits hits = new SearchHits(IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new),
new TotalHits(0, TotalHits.Relation.EQUAL_TO),0); new TotalHits(0, TotalHits.Relation.EQUAL_TO),0);
InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1);

View File

@ -96,7 +96,8 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
private BytesReference source; private BytesReference source;
private Map<String, DocumentField> fields; private Map<String, DocumentField> documentFields;
private Map<String, DocumentField> metaFields;
private Map<String, HighlightField> highlightFields = null; private Map<String, HighlightField> highlightFields = null;
@ -121,14 +122,15 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
//used only in tests //used only in tests
public SearchHit(int docId) { public SearchHit(int docId) {
this(docId, null, null, null); this(docId, null, null, null, null);
} }
public SearchHit(int docId, String id, Text type, Map<String, DocumentField> fields) { public SearchHit(int docId, String id, Text type, Map<String, DocumentField> documentFields, Map<String, DocumentField> metaFields) {
this(docId, id, type, null, fields); this(docId, id, type, null, documentFields, metaFields);
} }
public SearchHit(int nestedTopDocId, String id, Text type, NestedIdentity nestedIdentity, Map<String, DocumentField> fields) { public SearchHit(int nestedTopDocId, String id, Text type, NestedIdentity nestedIdentity,
Map<String, DocumentField> documentFields, Map<String, DocumentField> metaFields) {
this.docId = nestedTopDocId; this.docId = nestedTopDocId;
if (id != null) { if (id != null) {
this.id = new Text(id); this.id = new Text(id);
@ -137,7 +139,15 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
} }
this.type = type; this.type = type;
this.nestedIdentity = nestedIdentity; this.nestedIdentity = nestedIdentity;
this.fields = fields; this.documentFields = documentFields;
if (this.documentFields == null) {
this.documentFields = new HashMap<>();
}
this.metaFields = metaFields;
if (this.metaFields == null) {
this.metaFields = new HashMap<>();
}
} }
public SearchHit(StreamInput in) throws IOException { public SearchHit(StreamInput in) throws IOException {
@ -158,22 +168,17 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
if (in.readBoolean()) { if (in.readBoolean()) {
explanation = readExplanation(in); explanation = readExplanation(in);
} }
int size = in.readVInt(); if (in.getVersion().onOrAfter(Version.V_7_8_0)) {
if (size == 0) { documentFields = in.readMap(StreamInput::readString, DocumentField::new);
fields = emptyMap(); metaFields = in.readMap(StreamInput::readString, DocumentField::new);
} else if (size == 1) {
DocumentField hitField = new DocumentField(in);
fields = singletonMap(hitField.getName(), hitField);
} else { } else {
Map<String, DocumentField> fields = new HashMap<>(); Map<String, DocumentField> fields = readFields(in);
for (int i = 0; i < size; i++) { documentFields = new HashMap<>();
DocumentField hitField = new DocumentField(in); metaFields = new HashMap<>();
fields.put(hitField.getName(), hitField); SearchHit.splitFieldsByMetadata(fields, documentFields, metaFields);
}
this.fields = unmodifiableMap(fields);
} }
size = in.readVInt(); int size = in.readVInt();
if (size == 0) { if (size == 0) {
highlightFields = emptyMap(); highlightFields = emptyMap();
} else if (size == 1) { } else if (size == 1) {
@ -212,6 +217,36 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
} }
} }
private Map<String, DocumentField> readFields(StreamInput in) throws IOException {
Map<String, DocumentField> fields;
int size = in.readVInt();
if (size == 0) {
fields = emptyMap();
} else if (size == 1) {
DocumentField hitField = new DocumentField(in);
fields = singletonMap(hitField.getName(), hitField);
} else {
fields = new HashMap<>(size);
for (int i = 0; i < size; i++) {
DocumentField field = new DocumentField(in);
fields.put(field.getName(), field);
}
fields = unmodifiableMap(fields);
}
return fields;
}
private void writeFields(StreamOutput out, Map<String, DocumentField> fields) throws IOException {
if (fields == null) {
out.writeVInt(0);
} else {
out.writeVInt(fields.size());
for (DocumentField field : fields.values()) {
field.writeTo(out);
}
}
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeFloat(score); out.writeFloat(score);
@ -230,13 +265,11 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
out.writeBoolean(true); out.writeBoolean(true);
writeExplanation(out, explanation); writeExplanation(out, explanation);
} }
if (fields == null) { if (out.getVersion().onOrAfter(Version.V_7_8_0)) {
out.writeVInt(0); out.writeMap(documentFields, StreamOutput::writeString, (stream, documentField) -> documentField.writeTo(stream));
out.writeMap(metaFields, StreamOutput::writeString, (stream, documentField) -> documentField.writeTo(stream));
} else { } else {
out.writeVInt(fields.size()); writeFields(out, this.getFields());
for (DocumentField hitField : getFields().values()) {
hitField.writeTo(out);
}
} }
if (highlightFields == null) { if (highlightFields == null) {
out.writeVInt(0); out.writeVInt(0);
@ -415,7 +448,9 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
@Override @Override
public Iterator<DocumentField> iterator() { public Iterator<DocumentField> iterator() {
return fields.values().iterator(); // need to join the fields and metadata fields
Map<String, DocumentField> allFields = this.getFields();
return allFields.values().iterator();
} }
/** /**
@ -425,21 +460,45 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
return getFields().get(fieldName); return getFields().get(fieldName);
} }
/*
* Adds a new DocumentField to the map in case both parameters are not null.
* */
public void setField(String fieldName, DocumentField field) {
if (fieldName == null || field == null) return;
if (field.isMetadataField()) {
this.metaFields.put(fieldName, field);
} else {
this.documentFields.put(fieldName, field);
}
}
/** /**
* A map of hit fields (from field name to hit fields) if additional fields * A map of hit fields (from field name to hit fields) if additional fields
* were required to be loaded. * were required to be loaded.
*/ */
public Map<String, DocumentField> getFields() { public Map<String, DocumentField> getFields() {
return fields == null ? emptyMap() : fields; Map<String, DocumentField> fields = new HashMap<>();
fields.putAll(metaFields);
fields.putAll(documentFields);
return fields;
} }
// returns the fields without handling null cases // returns the fields without handling null cases
public Map<String, DocumentField> fieldsOrNull() { public Map<String, DocumentField> fieldsOrNull() {
return fields; return getFields();
} }
public void fields(Map<String, DocumentField> fields) { public void fields(Map<String, DocumentField> fields) {
this.fields = fields; Objects.requireNonNull(fields);
this.metaFields = new HashMap<>();
this.documentFields = new HashMap<>();
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
if (fieldEntry.getValue().isMetadataField()) {
this.metaFields.put(fieldEntry.getKey(), fieldEntry.getValue());
} else {
this.documentFields.put(fieldEntry.getKey(), fieldEntry.getValue());
}
}
} }
/** /**
@ -538,6 +597,22 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
this.innerHits = innerHits; this.innerHits = innerHits;
} }
public static void splitFieldsByMetadata(Map<String, DocumentField> fields,
Map<String, DocumentField> documentFields,
Map<String, DocumentField> metaFields) {
// documentFields and metaFields must be non-empty maps
if (fields == null) {
return;
}
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
if (fieldEntry.getValue().isMetadataField()) {
metaFields.put(fieldEntry.getKey(), fieldEntry.getValue());
} else {
documentFields.put(fieldEntry.getKey(), fieldEntry.getValue());
}
}
}
public static class Fields { public static class Fields {
static final String _INDEX = "_index"; static final String _INDEX = "_index";
static final String _TYPE = "_type"; static final String _TYPE = "_type";
@ -559,6 +634,12 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
static final String _NODE = "_node"; static final String _NODE = "_node";
} }
// Following are the keys for storing the metadata fields and regular fields in the aggregation map.
// These do not influence the structure of json serialization: document fields are still stored
// under FIELDS and metadata are still scattered at the root level.
static final String DOCUMENT_FIELDS = "document_fields";
static final String METADATA_FIELDS = "metadata_fields";
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
@ -569,21 +650,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
// public because we render hit as part of completion suggestion option // public because we render hit as part of completion suggestion option
public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException {
List<DocumentField> metaFields = new ArrayList<>();
List<DocumentField> otherFields = new ArrayList<>();
if (fields != null && !fields.isEmpty()) {
for (DocumentField field : fields.values()) {
if (field.getValues().isEmpty()) {
continue;
}
if (field.isMetadataField()) {
metaFields.add(field);
} else {
otherFields.add(field);
}
}
}
// For inner_hit hits shard is null and that is ok, because the parent search hit has all this information. // For inner_hit hits shard is null and that is ok, because the parent search hit has all this information.
// Even if this was included in the inner_hit hits this would be the same, so better leave it out. // Even if this was included in the inner_hit hits this would be the same, so better leave it out.
if (getExplanation() != null && shard != null) { if (getExplanation() != null && shard != null) {
@ -616,7 +682,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
} else { } else {
builder.field(Fields._SCORE, score); builder.field(Fields._SCORE, score);
} }
for (DocumentField field : metaFields) { for (DocumentField field : metaFields.values()) {
// _ignored is the only multi-valued meta field // _ignored is the only multi-valued meta field
// TODO: can we avoid having an exception here? // TODO: can we avoid having an exception here?
if (field.getName().equals(IgnoredFieldMapper.NAME)) { if (field.getName().equals(IgnoredFieldMapper.NAME)) {
@ -628,9 +694,9 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
if (source != null) { if (source != null) {
XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params); XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params);
} }
if (!otherFields.isEmpty()) { if (!documentFields.isEmpty()) {
builder.startObject(Fields.FIELDS); builder.startObject(Fields.FIELDS);
for (DocumentField field : otherFields) { for (DocumentField field : documentFields.values()) {
field.toXContent(builder, params); field.toXContent(builder, params);
} }
builder.endObject(); builder.endObject();
@ -706,7 +772,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
parser.declareObject((map, value) -> { parser.declareObject((map, value) -> {
Map<String, DocumentField> fieldMap = get(Fields.FIELDS, map, new HashMap<String, DocumentField>()); Map<String, DocumentField> fieldMap = get(Fields.FIELDS, map, new HashMap<String, DocumentField>());
fieldMap.putAll(value); fieldMap.putAll(value);
map.put(Fields.FIELDS, fieldMap); map.put(DOCUMENT_FIELDS, fieldMap);
}, (p, c) -> parseFields(p), new ParseField(Fields.FIELDS)); }, (p, c) -> parseFields(p), new ParseField(Fields.FIELDS));
parser.declareObject((map, value) -> map.put(Fields._EXPLANATION, value), (p, c) -> parseExplanation(p), parser.declareObject((map, value) -> map.put(Fields._EXPLANATION, value), (p, c) -> parseExplanation(p),
new ParseField(Fields._EXPLANATION)); new ParseField(Fields._EXPLANATION));
@ -723,9 +789,10 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
String id = get(Fields._ID, values, null); String id = get(Fields._ID, values, null);
Text type = get(Fields._TYPE, values, null); Text type = get(Fields._TYPE, values, null);
NestedIdentity nestedIdentity = get(NestedIdentity._NESTED, values, null); NestedIdentity nestedIdentity = get(NestedIdentity._NESTED, values, null);
Map<String, DocumentField> fields = get(Fields.FIELDS, values, Collections.emptyMap()); Map<String, DocumentField> metaFields = get(METADATA_FIELDS, values, Collections.emptyMap());
Map<String, DocumentField> documentFields = get(DOCUMENT_FIELDS, values, Collections.emptyMap());
SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, fields); SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, documentFields, metaFields);
String index = get(Fields._INDEX, values, null); String index = get(Fields._INDEX, values, null);
String clusterAlias = null; String clusterAlias = null;
if (index != null) { if (index != null) {
@ -790,13 +857,17 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
* handled individually. All other fields are parsed to an entry in the fields map * handled individually. All other fields are parsed to an entry in the fields map
*/ */
private static void declareMetadataFields(ObjectParser<Map<String, Object>, Void> parser) { private static void declareMetadataFields(ObjectParser<Map<String, Object>, Void> parser) {
/* TODO: This method and its usage in declareInnerHitsParseFields() must be replaced by
calling an UnknownFieldConsumer. All fields on the root level of the parsed SearhHit
should be interpreted as metadata fields.
*/
for (String metadatafield : MapperService.getAllMetaFields()) { for (String metadatafield : MapperService.getAllMetaFields()) {
if (metadatafield.equals(Fields._ID) == false && metadatafield.equals(Fields._INDEX) == false if (metadatafield.equals(Fields._ID) == false && metadatafield.equals(Fields._INDEX) == false
&& metadatafield.equals(Fields._TYPE) == false) { && metadatafield.equals(Fields._TYPE) == false) {
if (metadatafield.equals(IgnoredFieldMapper.NAME)) { if (metadatafield.equals(IgnoredFieldMapper.NAME)) {
parser.declareObjectArray((map, list) -> { parser.declareObjectArray((map, list) -> {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(Fields.FIELDS, Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(METADATA_FIELDS,
v -> new HashMap<String, DocumentField>()); v -> new HashMap<String, DocumentField>());
DocumentField field = new DocumentField(metadatafield, list); DocumentField field = new DocumentField(metadatafield, list);
fieldMap.put(field.getName(), field); fieldMap.put(field.getName(), field);
@ -805,7 +876,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
} else { } else {
parser.declareField((map, field) -> { parser.declareField((map, field) -> {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(Fields.FIELDS, Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(METADATA_FIELDS,
v -> new HashMap<String, DocumentField>()); v -> new HashMap<String, DocumentField>());
fieldMap.put(field.getName(), field); fieldMap.put(field.getName(), field);
}, (p, c) -> new DocumentField(metadatafield, Collections.singletonList(parseFieldsValue(p))), }, (p, c) -> new DocumentField(metadatafield, Collections.singletonList(parseFieldsValue(p))),
@ -906,7 +977,8 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
&& Objects.equals(seqNo, other.seqNo) && Objects.equals(seqNo, other.seqNo)
&& Objects.equals(primaryTerm, other.primaryTerm) && Objects.equals(primaryTerm, other.primaryTerm)
&& Objects.equals(source, other.source) && Objects.equals(source, other.source)
&& Objects.equals(getFields(), other.getFields()) && Objects.equals(documentFields, other.documentFields)
&& Objects.equals(metaFields, other.metaFields)
&& Objects.equals(getHighlightFields(), other.getHighlightFields()) && Objects.equals(getHighlightFields(), other.getHighlightFields())
&& Arrays.equals(matchedQueries, other.matchedQueries) && Arrays.equals(matchedQueries, other.matchedQueries)
&& Objects.equals(explanation, other.explanation) && Objects.equals(explanation, other.explanation)
@ -918,7 +990,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(id, type, nestedIdentity, version, seqNo, primaryTerm, source, fields, getHighlightFields(), return Objects.hash(id, nestedIdentity, version, seqNo, primaryTerm, source, documentFields, metaFields, getHighlightFields(),
Arrays.hashCode(matchedQueries), explanation, shard, innerHits, index, clusterAlias); Arrays.hashCode(matchedQueries), explanation, shard, innerHits, index, clusterAlias);
} }

View File

@ -209,13 +209,17 @@ public class FetchPhase implements SearchPhase {
DocumentMapper documentMapper = context.mapperService().documentMapper(); DocumentMapper documentMapper = context.mapperService().documentMapper();
Text typeText = documentMapper.typeText(); Text typeText = documentMapper.typeText();
if (fieldsVisitor == null) { if (fieldsVisitor == null) {
return new SearchHit(docId, null, typeText, null); return new SearchHit(docId, null, typeText, null, null);
} }
Map<String, DocumentField> searchFields = getSearchFields(context, fieldsVisitor, subDocId, Map<String, DocumentField> searchFields = getSearchFields(context, fieldsVisitor, subDocId,
storedToRequestedFields, subReaderContext); storedToRequestedFields, subReaderContext);
SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields); Map<String, DocumentField> metaFields = new HashMap<>();
Map<String, DocumentField> documentFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(searchFields, documentFields, metaFields);
SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, documentFields, metaFields);
// Set _source if requested. // Set _source if requested.
SourceLookup sourceLookup = context.lookup().source(); SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument(subReaderContext, subDocId); sourceLookup.setSegmentAndDocument(subReaderContext, subDocId);
@ -341,7 +345,10 @@ public class FetchPhase implements SearchPhase {
XContentType contentType = tuple.v1(); XContentType contentType = tuple.v1();
context.lookup().source().setSourceContentType(contentType); context.lookup().source().setSourceContentType(contentType);
} }
return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, searchFields); Map<String, DocumentField> metaFields = new HashMap<>();
Map<String, DocumentField> documentFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(searchFields, documentFields, metaFields);
return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, documentFields, metaFields);
} }
private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId, private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId,

View File

@ -148,7 +148,7 @@ public final class FetchDocValuesPhase implements FetchSubPhase {
DocumentField hitField = hit.getFields().get(field); DocumentField hitField = hit.getFields().get(field);
if (hitField == null) { if (hitField == null) {
hitField = new DocumentField(field, new ArrayList<>(2)); hitField = new DocumentField(field, new ArrayList<>(2));
hit.getFields().put(field, hitField); hit.setField(field, hitField);
} }
final List<Object> values = hitField.getValues(); final List<Object> values = hitField.getValues();

View File

@ -85,7 +85,8 @@ public final class ScriptFieldsPhase implements FetchSubPhase {
values = Collections.singletonList(value); values = Collections.singletonList(value);
} }
hitField = new DocumentField(scriptFieldName, values); hitField = new DocumentField(scriptFieldName, values);
hit.getFields().put(scriptFieldName, hitField); hit.setField(scriptFieldName, hitField);
} }
} }
} }

View File

@ -52,8 +52,8 @@ public class ExpandSearchPhaseTests extends ESTestCase {
List<SearchHits> collapsedHits = new ArrayList<>(numInnerHits); List<SearchHits> collapsedHits = new ArrayList<>(numInnerHits);
for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(innerHitNum, "ID", new Text("type"), SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(innerHitNum, "ID", new Text("type"),
Collections.emptyMap()), new SearchHit(innerHitNum + 1, "ID", new Text("type"), Collections.emptyMap(), Collections.emptyMap()), new SearchHit(innerHitNum + 1, "ID", new Text("type"),
Collections.emptyMap())}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0F); Collections.emptyMap(), Collections.emptyMap())}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0F);
collapsedHits.add(hits); collapsedHits.add(hits);
} }
@ -103,8 +103,8 @@ public class ExpandSearchPhaseTests extends ESTestCase {
}; };
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"), SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"),
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))))}, Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))),
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1);
ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null); ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null);
@ -126,8 +126,8 @@ public class ExpandSearchPhaseTests extends ESTestCase {
AtomicBoolean executedMultiSearch = new AtomicBoolean(false); AtomicBoolean executedMultiSearch = new AtomicBoolean(false);
SearchHits collapsedHits = new SearchHits(new SearchHit[]{new SearchHit(2, "ID", new Text("type"), SearchHits collapsedHits = new SearchHits(new SearchHit[]{new SearchHit(2, "ID", new Text("type"),
Collections.emptyMap()), new SearchHit(3, "ID", new Text("type"), Collections.emptyMap(), Collections.emptyMap()), new SearchHit(3, "ID", new Text("type"),
Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); Collections.emptyMap(), Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1);
String collapseValue = randomBoolean() ? null : "boom"; String collapseValue = randomBoolean() ? null : "boom";
mockSearchPhaseContext.getRequest().source(new SearchSourceBuilder() mockSearchPhaseContext.getRequest().source(new SearchSourceBuilder()
@ -149,9 +149,9 @@ public class ExpandSearchPhaseTests extends ESTestCase {
}; };
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"), SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"),
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue)))), Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))),
new SearchHit(2, "ID2", new Text("type"), Collections.emptyMap()), new SearchHit(2, "ID2", new Text("type"), Collections.singletonMap("someField",
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))))}, new DocumentField("someField", Collections.singletonList(collapseValue))), Collections.emptyMap())},
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1);
ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null); ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null);
@ -172,9 +172,9 @@ public class ExpandSearchPhaseTests extends ESTestCase {
}; };
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"), SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"),
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null)))), Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))),
new SearchHit(2, "ID2", new Text("type"), Collections.emptyMap()), new SearchHit(2, "ID2", new Text("type"), Collections.singletonMap("someField",
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))))}, new DocumentField("someField", Collections.singletonList(null))), Collections.emptyMap())},
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1);
ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null); ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null);

View File

@ -321,7 +321,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
List<SearchHit> searchHits = new ArrayList<>(); List<SearchHit> searchHits = new ArrayList<>();
for (ScoreDoc scoreDoc : mergedSearchDocs) { for (ScoreDoc scoreDoc : mergedSearchDocs) {
if (scoreDoc.shardIndex == shardIndex) { if (scoreDoc.shardIndex == shardIndex) {
searchHits.add(new SearchHit(scoreDoc.doc, "", new Text(""), Collections.emptyMap())); searchHits.add(new SearchHit(scoreDoc.doc, "", new Text(""), Collections.emptyMap(), Collections.emptyMap()));
if (scoreDoc.score > maxScore) { if (scoreDoc.score > maxScore) {
maxScore = scoreDoc.score; maxScore = scoreDoc.score;
} }
@ -332,7 +332,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
for (CompletionSuggestion.Entry.Option option : ((CompletionSuggestion) suggestion).getOptions()) { for (CompletionSuggestion.Entry.Option option : ((CompletionSuggestion) suggestion).getOptions()) {
ScoreDoc doc = option.getDoc(); ScoreDoc doc = option.getDoc();
if (doc.shardIndex == shardIndex) { if (doc.shardIndex == shardIndex) {
searchHits.add(new SearchHit(doc.doc, "", new Text(""), Collections.emptyMap())); searchHits.add(new SearchHit(doc.doc, "", new Text(""), Collections.emptyMap(), Collections.emptyMap()));
if (doc.score > maxScore) { if (doc.score > maxScore) {
maxScore = doc.score; maxScore = doc.score;
} }

View File

@ -208,7 +208,7 @@ public class SearchResponseTests extends ESTestCase {
} }
public void testToXContent() { public void testToXContent() {
SearchHit hit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()); SearchHit hit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap());
hit.score(2.0f); hit.score(2.0f);
SearchHit[] hits = new SearchHit[] { hit }; SearchHit[] hits = new SearchHit[] { hit };
{ {

View File

@ -73,14 +73,24 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
if (randomBoolean()) { if (randomBoolean()) {
nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2)); nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2));
} }
Map<String, DocumentField> fields = null; Map<String, DocumentField> fields = new HashMap<>();
if (frequently()) { if (frequently()) {
fields = new HashMap<>(); fields = new HashMap<>();
if (randomBoolean()) { if (randomBoolean()) {
fields = GetResultTests.randomDocumentFields(xContentType).v2(); fields = GetResultTests.randomDocumentFields(xContentType).v2();
} }
} }
SearchHit hit = new SearchHit(internalId, uid, type, nestedIdentity, fields); HashMap<String, DocumentField> metaFields = new HashMap<>();
HashMap<String, DocumentField> documentFields = new HashMap<>();
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
if (fieldEntry.getValue().isMetadataField()) {
metaFields.put(fieldEntry.getKey(), fieldEntry.getValue());
} else {
documentFields.put(fieldEntry.getKey(), fieldEntry.getValue());
}
}
SearchHit hit = new SearchHit(internalId, uid, type, nestedIdentity, documentFields, metaFields);
if (frequently()) { if (frequently()) {
if (rarely()) { if (rarely()) {
hit.score(Float.NaN); hit.score(Float.NaN);
@ -213,7 +223,7 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
} }
public void testToXContent() throws IOException { public void testToXContent() throws IOException {
SearchHit searchHit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()); SearchHit searchHit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap());
searchHit.score(1.5f); searchHit.score(1.5f);
XContentBuilder builder = JsonXContent.contentBuilder(); XContentBuilder builder = JsonXContent.contentBuilder();
searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS); searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
@ -226,25 +236,25 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
clusterAlias, OriginalIndices.NONE); clusterAlias, OriginalIndices.NONE);
Map<String, SearchHits> innerHits = new HashMap<>(); Map<String, SearchHits> innerHits = new HashMap<>();
SearchHit innerHit1 = new SearchHit(0, "_id", new Text("_type"), null); SearchHit innerHit1 = new SearchHit(0, "_id", new Text("_type"), null, null);
innerHit1.shard(target); innerHit1.shard(target);
SearchHit innerInnerHit2 = new SearchHit(0, "_id", new Text("_type"), null); SearchHit innerInnerHit2 = new SearchHit(0, "_id", new Text("_type"), null, null);
innerInnerHit2.shard(target); innerInnerHit2.shard(target);
innerHits.put("1", new SearchHits(new SearchHit[]{innerInnerHit2}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); innerHits.put("1", new SearchHits(new SearchHit[]{innerInnerHit2}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f));
innerHit1.setInnerHits(innerHits); innerHit1.setInnerHits(innerHits);
SearchHit innerHit2 = new SearchHit(0, "_id", new Text("_type"), null); SearchHit innerHit2 = new SearchHit(0, "_id", new Text("_type"), null, null);
innerHit2.shard(target); innerHit2.shard(target);
SearchHit innerHit3 = new SearchHit(0, "_id", new Text("_type"), null); SearchHit innerHit3 = new SearchHit(0, "_id", new Text("_type"), null, null);
innerHit3.shard(target); innerHit3.shard(target);
innerHits = new HashMap<>(); innerHits = new HashMap<>();
SearchHit hit1 = new SearchHit(0, "_id", new Text("_type"), null); SearchHit hit1 = new SearchHit(0, "_id", new Text("_type"), null, null);
innerHits.put("1", new SearchHits(new SearchHit[]{innerHit1, innerHit2}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); innerHits.put("1", new SearchHits(new SearchHit[]{innerHit1, innerHit2}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f));
innerHits.put("2", new SearchHits(new SearchHit[]{innerHit3}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); innerHits.put("2", new SearchHits(new SearchHit[]{innerHit3}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f));
hit1.shard(target); hit1.shard(target);
hit1.setInnerHits(innerHits); hit1.setInnerHits(innerHits);
SearchHit hit2 = new SearchHit(0, "_id", new Text("_type"), null); SearchHit hit2 = new SearchHit(0, "_id", new Text("_type"), null, null);
hit2.shard(target); hit2.shard(target);
SearchHits hits = new SearchHits(new SearchHit[]{hit1, hit2}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f); SearchHits hits = new SearchHits(new SearchHit[]{hit1, hit2}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f);
@ -271,7 +281,7 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
} }
public void testNullSource() { public void testNullSource() {
SearchHit searchHit = new SearchHit(0, "_id", new Text("_type"), null); SearchHit searchHit = new SearchHit(0, "_id", new Text("_type"), null, null);
assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceAsMap(), nullValue());
assertThat(searchHit.getSourceRef(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue());

View File

@ -208,8 +208,8 @@ public class SearchHitsTests extends AbstractSerializingTestCase<SearchHits> {
public void testToXContent() throws IOException { public void testToXContent() throws IOException {
SearchHit[] hits = new SearchHit[] { SearchHit[] hits = new SearchHit[] {
new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()), new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
new SearchHit(2, "id2", new Text("type"), Collections.emptyMap()) }; new SearchHit(2, "id2", new Text("type"), Collections.emptyMap(), Collections.emptyMap()) };
long totalHits = 1000; long totalHits = 1000;
float maxScore = 1.5f; float maxScore = 1.5f;
@ -226,9 +226,9 @@ public class SearchHitsTests extends AbstractSerializingTestCase<SearchHits> {
public void testFromXContentWithShards() throws IOException { public void testFromXContentWithShards() throws IOException {
for (boolean withExplanation : new boolean[] {true, false}) { for (boolean withExplanation : new boolean[] {true, false}) {
final SearchHit[] hits = new SearchHit[]{ final SearchHit[] hits = new SearchHit[]{
new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()), new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
new SearchHit(2, "id2", new Text("type"), Collections.emptyMap()), new SearchHit(2, "id2", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
new SearchHit(10, "id10", new Text("type"), Collections.emptyMap()) new SearchHit(10, "id10", new Text("type"), Collections.emptyMap(), Collections.emptyMap())
}; };
for (SearchHit hit : hits) { for (SearchHit hit : hits) {

View File

@ -50,6 +50,7 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -114,7 +115,7 @@ public class InternalTopHitsTests extends InternalAggregationTestCase<InternalTo
} else { } else {
scoreDocs[i] = new ScoreDoc(docId, score); scoreDocs[i] = new ScoreDoc(docId, score);
} }
hits[i] = new SearchHit(docId, Integer.toString(i), new Text("test"), searchHitFields); hits[i] = new SearchHit(docId, Integer.toString(i), new Text("test"), searchHitFields, Collections.emptyMap());
hits[i].score(score); hits[i].score(score);
} }
int totalHits = between(actualSize, 500000); int totalHits = between(actualSize, 500000);

View File

@ -131,7 +131,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
DocumentField hitField = hitContext.hit().getFields().get(NAME); DocumentField hitField = hitContext.hit().getFields().get(NAME);
if (hitField == null) { if (hitField == null) {
hitField = new DocumentField(NAME, new ArrayList<>(1)); hitField = new DocumentField(NAME, new ArrayList<>(1));
hitContext.hit().getFields().put(NAME, hitField); hitContext.hit().setField(NAME, hitField);
} }
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(), TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(),
hitContext.hit().getType(), hitContext.hit().getId()); hitContext.hit().getType(), hitContext.hit().getId());

View File

@ -152,7 +152,7 @@ public class FetchSourcePhaseTests extends ESTestCase {
SearchContext searchContext = new FetchSourcePhaseTestSearchContext(fetchSourceContext, SearchContext searchContext = new FetchSourcePhaseTestSearchContext(fetchSourceContext,
source == null ? null : BytesReference.bytes(source)); source == null ? null : BytesReference.bytes(source));
FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
final SearchHit searchHit = new SearchHit(1, null, null, nestedIdentity, null); final SearchHit searchHit = new SearchHit(1, null, null, nestedIdentity, null, null);
hitContext.reset(searchHit, null, 1, null); hitContext.reset(searchHit, null, 1, null);
FetchSourcePhase phase = new FetchSourcePhase(); FetchSourcePhase phase = new FetchSourcePhase();
phase.hitExecute(searchContext, hitContext); phase.hitExecute(searchContext, hitContext);

View File

@ -261,7 +261,13 @@ public class EnrichShardMultiSearchAction extends ActionType<MultiSearchResponse
visitor.reset(); visitor.reset();
searcher.doc(scoreDoc.doc, visitor); searcher.doc(scoreDoc.doc, visitor);
visitor.postProcess(mapperService); visitor.postProcess(mapperService);
final SearchHit hit = new SearchHit(scoreDoc.doc, visitor.uid().id(), typeText, Collections.emptyMap()); final SearchHit hit = new SearchHit(
scoreDoc.doc,
visitor.uid().id(),
typeText,
Collections.emptyMap(),
Collections.emptyMap()
);
hit.sourceRef(filterSource(fetchSourceContext, visitor.source())); hit.sourceRef(filterSource(fetchSourceContext, visitor.source()));
hits[j] = hit; hits[j] = hit;
} }

View File

@ -176,6 +176,7 @@ public class GeoMatchProcessorTests extends ESTestCase {
randomInt(100), randomInt(100),
e.getKey(), e.getKey(),
new Text(MapperService.SINGLE_MAPPING_NAME), new Text(MapperService.SINGLE_MAPPING_NAME),
Collections.emptyMap(),
Collections.emptyMap() Collections.emptyMap()
); );
try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) { try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) {

View File

@ -386,6 +386,7 @@ public class MatchProcessorTests extends ESTestCase {
randomInt(100), randomInt(100),
e.getKey().toString(), e.getKey().toString(),
new Text(MapperService.SINGLE_MAPPING_NAME), new Text(MapperService.SINGLE_MAPPING_NAME),
Collections.emptyMap(),
Collections.emptyMap() Collections.emptyMap()
); );
try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) { try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) {

View File

@ -24,7 +24,7 @@ public class EqlSearchResponseTests extends AbstractSerializingTestCase<EqlSearc
if (randomBoolean()) { if (randomBoolean()) {
hits = new ArrayList<>(); hits = new ArrayList<>();
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
hits.add(new SearchHit(i, randomAlphaOfLength(10), null, new HashMap<>())); hits.add(new SearchHit(i, randomAlphaOfLength(10), null, new HashMap<>(), new HashMap<>()));
} }
} }
if (randomBoolean()) { if (randomBoolean()) {

View File

@ -235,7 +235,8 @@ public class DataFrameRowsJoinerTests extends ESTestCase {
} }
private static SearchHit newHit(String json) { private static SearchHit newHit(String json) {
SearchHit hit = new SearchHit(randomInt(), randomAlphaOfLength(10), new Text("doc"), Collections.emptyMap()); SearchHit hit = new SearchHit(randomInt(), randomAlphaOfLength(10), new Text("doc"),
Collections.emptyMap(), Collections.emptyMap());
hit.sourceRef(new BytesArray(json)); hit.sourceRef(new BytesArray(json));
return hit; return hit;
} }

View File

@ -908,7 +908,7 @@ public class JobResultsProviderTests extends ESTestCase {
fields.put("field_1", new DocumentField("field_1", Collections.singletonList("foo"))); fields.put("field_1", new DocumentField("field_1", Collections.singletonList("foo")));
fields.put("field_2", new DocumentField("field_2", Collections.singletonList("foo"))); fields.put("field_2", new DocumentField("field_2", Collections.singletonList("foo")));
SearchHit hit = new SearchHit(123, String.valueOf(map.hashCode()), new Text("foo"), fields) SearchHit hit = new SearchHit(123, String.valueOf(map.hashCode()), new Text("foo"), fields, Collections.emptyMap())
.sourceRef(BytesReference.bytes(XContentFactory.jsonBuilder().map(_source))); .sourceRef(BytesReference.bytes(XContentFactory.jsonBuilder().map(_source)));
list.add(hit); list.add(hit);

View File

@ -39,6 +39,7 @@ import org.mockito.ArgumentCaptor;
import org.mockito.invocation.InvocationOnMock; import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -190,7 +191,7 @@ public class MockClientBuilder {
SearchHit hits [] = new SearchHit[fields.size()]; SearchHit hits [] = new SearchHit[fields.size()];
for (int i=0; i<hits.length; i++) { for (int i=0; i<hits.length; i++) {
SearchHit hit = new SearchHit(10, null, null, fields.get(i)); SearchHit hit = new SearchHit(10, null, null, fields.get(i), Collections.emptyMap());
hits[i] = hit; hits[i] = hit;
} }

View File

@ -242,7 +242,7 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase {
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, source.streamInput()).map(); .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, source.streamInput()).map();
final Map<String, Object> accessToken = (Map<String, Object>) sourceMap.get("access_token"); final Map<String, Object> accessToken = (Map<String, Object>) sourceMap.get("access_token");
final Map<String, Object> userToken = (Map<String, Object>) accessToken.get("user_token"); final Map<String, Object> userToken = (Map<String, Object>) accessToken.get("user_token");
final SearchHit hit = new SearchHit(idx, "token_" + userToken.get("id"), null, null); final SearchHit hit = new SearchHit(idx, "token_" + userToken.get("id"), null, null, null);
hit.sourceRef(source); hit.sourceRef(source);
return hit; return hit;
} catch (IOException e) { } catch (IOException e) {

View File

@ -340,7 +340,7 @@ public class NativePrivilegeStoreTests extends ESTestCase {
final SearchHit[] hits = new SearchHit[sourcePrivileges.size()]; final SearchHit[] hits = new SearchHit[sourcePrivileges.size()];
for (int i = 0; i < hits.length; i++) { for (int i = 0; i < hits.length; i++) {
final ApplicationPrivilegeDescriptor p = sourcePrivileges.get(i); final ApplicationPrivilegeDescriptor p = sourcePrivileges.get(i);
hits[i] = new SearchHit(i, "application-privilege_" + p.getApplication() + ":" + p.getName(), null, null); hits[i] = new SearchHit(i, "application-privilege_" + p.getApplication() + ":" + p.getName(), null, null, null);
hits[i].sourceRef(new BytesArray(Strings.toString(p))); hits[i].sourceRef(new BytesArray(Strings.toString(p)));
} }
return hits; return hits;

View File

@ -95,8 +95,8 @@ public class TopHitsAggExtractorTests extends AbstractSqlWireSerializingTestCase
private SearchHits searchHitsOf(Object value) { private SearchHits searchHitsOf(Object value) {
TotalHits totalHits = new TotalHits(10, TotalHits.Relation.EQUAL_TO); TotalHits totalHits = new TotalHits(10, TotalHits.Relation.EQUAL_TO);
return new SearchHits(new SearchHit[] {new SearchHit(1, "docId", null, return new SearchHits(new SearchHit[] {new SearchHit(1, "docId", null, Collections.singletonMap("topHitsAgg",
Collections.singletonMap("topHitsAgg", new DocumentField("field", Collections.singletonList(value))))}, new DocumentField("field", Collections.singletonList(value))), Collections.emptyMap())},
totalHits, 0.0f); totalHits, 0.0f);
} }
} }

View File

@ -180,7 +180,7 @@ public class WatcherServiceTests extends ESTestCase {
SearchHit[] hits = new SearchHit[count]; SearchHit[] hits = new SearchHit[count];
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
String id = String.valueOf(i); String id = String.valueOf(i);
SearchHit hit = new SearchHit(1, id, new Text("watch"), Collections.emptyMap()); SearchHit hit = new SearchHit(1, id, new Text("watch"), Collections.emptyMap(), Collections.emptyMap());
hit.version(1L); hit.version(1L);
hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever", OriginalIndices.NONE)); hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever", OriginalIndices.NONE));
hits[i] = hit; hits[i] = hit;

View File

@ -76,7 +76,7 @@ public class CompareConditionSearchTests extends AbstractWatcherIntegrationTestC
public void testExecuteAccessHits() throws Exception { public void testExecuteAccessHits() throws Exception {
CompareCondition condition = new CompareCondition("ctx.payload.hits.hits.0._score", CompareCondition.Op.EQ, 1, CompareCondition condition = new CompareCondition("ctx.payload.hits.hits.0._score", CompareCondition.Op.EQ, 1,
Clock.systemUTC()); Clock.systemUTC());
SearchHit hit = new SearchHit(0, "1", new Text("type"), null); SearchHit hit = new SearchHit(0, "1", new Text("type"), null, null);
hit.score(1f); hit.score(1f);
hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null, OriginalIndices.NONE)); hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null, OriginalIndices.NONE));

View File

@ -216,7 +216,7 @@ public class TriggeredWatchStoreTests extends ESTestCase {
when(searchResponse1.getSuccessfulShards()).thenReturn(1); when(searchResponse1.getSuccessfulShards()).thenReturn(1);
when(searchResponse1.getTotalShards()).thenReturn(1); when(searchResponse1.getTotalShards()).thenReturn(1);
BytesArray source = new BytesArray("{}"); BytesArray source = new BytesArray("{}");
SearchHit hit = new SearchHit(0, "first_foo", new Text(SINGLE_MAPPING_NAME), null); SearchHit hit = new SearchHit(0, "first_foo", new Text(SINGLE_MAPPING_NAME), null, null);
hit.version(1L); hit.version(1L);
hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE)); hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE));
hit.sourceRef(source); hit.sourceRef(source);
@ -230,7 +230,7 @@ public class TriggeredWatchStoreTests extends ESTestCase {
}).when(client).execute(eq(SearchAction.INSTANCE), any(), any()); }).when(client).execute(eq(SearchAction.INSTANCE), any(), any());
// First return a scroll response with a single hit and then with no hits // First return a scroll response with a single hit and then with no hits
hit = new SearchHit(0, "second_foo", new Text(SINGLE_MAPPING_NAME), null); hit = new SearchHit(0, "second_foo", new Text(SINGLE_MAPPING_NAME), null, null);
hit.version(1L); hit.version(1L);
hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE)); hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE));
hit.sourceRef(source); hit.sourceRef(source);