Refactor SearchHit to have separate document and meta fields. This is a part of bigger refactoring of issue #24422 to remove dependency on MapperService to check if a field is metafield. Relates to PR: #38373 Relates to issue #24422 Co-authored-by: sandmannn <bohdanpukalskyi@gmail.com>
This commit is contained in:
parent
2cd35bf696
commit
bf4857d9e0
|
@ -43,7 +43,7 @@ public class EqlSearchResponseTests extends AbstractResponseTestCase<org.elastic
|
|||
if (randomBoolean()) {
|
||||
hits = new ArrayList<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
hits.add(new SearchHit(i, randomAlphaOfLength(10), null, new HashMap<>()));
|
||||
hits.add(new SearchHit(i, randomAlphaOfLength(10), null, new HashMap<>(), new HashMap<>()));
|
||||
}
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -165,7 +165,7 @@ public class SearchTemplateResponseTests extends AbstractXContentTestCase<Search
|
|||
}
|
||||
|
||||
public void testSearchResponseToXContent() throws IOException {
|
||||
SearchHit hit = new SearchHit(1, "id", new Text("type"), Collections.emptyMap());
|
||||
SearchHit hit = new SearchHit(1, "id", new Text("type"), Collections.emptyMap(), Collections.emptyMap());
|
||||
hit.score(2.0f);
|
||||
SearchHit[] hits = new SearchHit[] { hit };
|
||||
|
||||
|
|
|
@ -102,7 +102,7 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
|
|||
shardContext.lookup().source().setSegmentAndDocument(percolatorLeafReaderContext, slot);
|
||||
shardContext.lookup().source().setSource(document);
|
||||
hitContext.reset(
|
||||
new SearchHit(slot, "unknown", new Text(hit.getType()), Collections.emptyMap()),
|
||||
new SearchHit(slot, "unknown", new Text(hit.getType()), Collections.emptyMap(), Collections.emptyMap()),
|
||||
percolatorLeafReaderContext, slot, percolatorIndexSearcher
|
||||
);
|
||||
hitContext.cache().clear();
|
||||
|
|
|
@ -110,7 +110,7 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {
|
|||
hit.fields(fields);
|
||||
}
|
||||
IntStream slots = convertTopDocsToSlots(topDocs, rootDocsBySlot);
|
||||
fields.put(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList())));
|
||||
hit.setField(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,7 +74,8 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
SearchHit[] hits = new SearchHit[6];
|
||||
for (int i = 0; i < 6; i++) {
|
||||
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
|
@ -124,7 +125,8 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
|
||||
}
|
||||
}
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
|
@ -181,7 +183,8 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
// only create four hits
|
||||
SearchHit[] hits = new SearchHit[4];
|
||||
for (int i = 0; i < 4; i++) {
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
|
|
|
@ -117,7 +117,8 @@ public class ExpectedReciprocalRankTests extends ESTestCase {
|
|||
if (relevanceRatings[i] != null) {
|
||||
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
|
||||
}
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
return hits;
|
||||
|
|
|
@ -204,7 +204,7 @@ public class MeanReciprocalRankTests extends ESTestCase {
|
|||
private static SearchHit[] createSearchHits(int from, int to, String index) {
|
||||
SearchHit[] hits = new SearchHit[to + 1 - from];
|
||||
for (int i = from; i <= to; i++) {
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
return hits;
|
||||
|
|
|
@ -114,7 +114,7 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
rated.add(createRatedDoc("test", "1", RELEVANT_RATING));
|
||||
// add an unlabeled search hit
|
||||
SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3);
|
||||
searchHits[2] = new SearchHit(2, "2", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap());
|
||||
searchHits[2] = new SearchHit(2, "2", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap());
|
||||
searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
|
||||
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated);
|
||||
|
@ -133,7 +133,8 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
public void testNoRatedDocs() throws Exception {
|
||||
SearchHit[] hits = new SearchHit[5];
|
||||
for (int i = 0; i < 5; i++) {
|
||||
hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList());
|
||||
|
@ -255,7 +256,7 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) {
|
||||
SearchHit[] hits = new SearchHit[rated.size()];
|
||||
for (int i = 0; i < rated.size(); i++) {
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
return hits;
|
||||
|
|
|
@ -181,7 +181,8 @@ public class RankEvalResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static RatedSearchHit searchHit(String index, int docId, Integer rating) {
|
||||
SearchHit hit = new SearchHit(docId, docId + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap());
|
||||
SearchHit hit = new SearchHit(docId, docId + "", new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(), Collections.emptyMap());
|
||||
hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
|
||||
hit.score(1.0f);
|
||||
return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty());
|
||||
|
|
|
@ -42,7 +42,7 @@ public class RatedSearchHitTests extends ESTestCase {
|
|||
OptionalInt rating = randomBoolean() ? OptionalInt.empty()
|
||||
: OptionalInt.of(randomIntBetween(0, 5));
|
||||
SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10),
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap());
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap());
|
||||
RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating);
|
||||
return ratedSearchHit;
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ public class RatedSearchHitTests extends ESTestCase {
|
|||
break;
|
||||
case 1:
|
||||
hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10),
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap());
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap());
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||
|
|
|
@ -115,7 +115,7 @@ public class RecallAtKTests extends ESTestCase {
|
|||
int k = 5;
|
||||
SearchHit[] hits = new SearchHit[k];
|
||||
for (int i = 0; i < k; i++) {
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
|
||||
|
@ -237,7 +237,7 @@ public class RecallAtKTests extends ESTestCase {
|
|||
private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) {
|
||||
SearchHit[] hits = new SearchHit[rated.size()];
|
||||
for (int i = 0; i < rated.size(); i++) {
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
return hits;
|
||||
|
|
|
@ -504,7 +504,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
|
|||
action.start();
|
||||
|
||||
// create a simulated response.
|
||||
SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap()).sourceRef(new BytesArray("{}"));
|
||||
SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
|
||||
SearchHits hits = new SearchHits(IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new),
|
||||
new TotalHits(0, TotalHits.Relation.EQUAL_TO),0);
|
||||
InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1);
|
||||
|
|
|
@ -160,7 +160,7 @@ public class ClientScrollableHitSourceTests extends ESTestCase {
|
|||
|
||||
private SearchResponse createSearchResponse() {
|
||||
// create a simulated response.
|
||||
SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap()).sourceRef(new BytesArray("{}"));
|
||||
SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
|
||||
SearchHits hits = new SearchHits(IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new),
|
||||
new TotalHits(0, TotalHits.Relation.EQUAL_TO),0);
|
||||
InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1);
|
||||
|
|
|
@ -96,7 +96,8 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
|
||||
private BytesReference source;
|
||||
|
||||
private Map<String, DocumentField> fields;
|
||||
private Map<String, DocumentField> documentFields;
|
||||
private Map<String, DocumentField> metaFields;
|
||||
|
||||
private Map<String, HighlightField> highlightFields = null;
|
||||
|
||||
|
@ -121,14 +122,15 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
|
||||
//used only in tests
|
||||
public SearchHit(int docId) {
|
||||
this(docId, null, null, null);
|
||||
this(docId, null, null, null, null);
|
||||
}
|
||||
|
||||
public SearchHit(int docId, String id, Text type, Map<String, DocumentField> fields) {
|
||||
this(docId, id, type, null, fields);
|
||||
public SearchHit(int docId, String id, Text type, Map<String, DocumentField> documentFields, Map<String, DocumentField> metaFields) {
|
||||
this(docId, id, type, null, documentFields, metaFields);
|
||||
}
|
||||
|
||||
public SearchHit(int nestedTopDocId, String id, Text type, NestedIdentity nestedIdentity, Map<String, DocumentField> fields) {
|
||||
public SearchHit(int nestedTopDocId, String id, Text type, NestedIdentity nestedIdentity,
|
||||
Map<String, DocumentField> documentFields, Map<String, DocumentField> metaFields) {
|
||||
this.docId = nestedTopDocId;
|
||||
if (id != null) {
|
||||
this.id = new Text(id);
|
||||
|
@ -137,7 +139,15 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
}
|
||||
this.type = type;
|
||||
this.nestedIdentity = nestedIdentity;
|
||||
this.fields = fields;
|
||||
this.documentFields = documentFields;
|
||||
if (this.documentFields == null) {
|
||||
this.documentFields = new HashMap<>();
|
||||
}
|
||||
|
||||
this.metaFields = metaFields;
|
||||
if (this.metaFields == null) {
|
||||
this.metaFields = new HashMap<>();
|
||||
}
|
||||
}
|
||||
|
||||
public SearchHit(StreamInput in) throws IOException {
|
||||
|
@ -158,22 +168,17 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
if (in.readBoolean()) {
|
||||
explanation = readExplanation(in);
|
||||
}
|
||||
int size = in.readVInt();
|
||||
if (size == 0) {
|
||||
fields = emptyMap();
|
||||
} else if (size == 1) {
|
||||
DocumentField hitField = new DocumentField(in);
|
||||
fields = singletonMap(hitField.getName(), hitField);
|
||||
if (in.getVersion().onOrAfter(Version.V_7_8_0)) {
|
||||
documentFields = in.readMap(StreamInput::readString, DocumentField::new);
|
||||
metaFields = in.readMap(StreamInput::readString, DocumentField::new);
|
||||
} else {
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
DocumentField hitField = new DocumentField(in);
|
||||
fields.put(hitField.getName(), hitField);
|
||||
}
|
||||
this.fields = unmodifiableMap(fields);
|
||||
Map<String, DocumentField> fields = readFields(in);
|
||||
documentFields = new HashMap<>();
|
||||
metaFields = new HashMap<>();
|
||||
SearchHit.splitFieldsByMetadata(fields, documentFields, metaFields);
|
||||
}
|
||||
|
||||
size = in.readVInt();
|
||||
int size = in.readVInt();
|
||||
if (size == 0) {
|
||||
highlightFields = emptyMap();
|
||||
} else if (size == 1) {
|
||||
|
@ -212,6 +217,36 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
}
|
||||
}
|
||||
|
||||
private Map<String, DocumentField> readFields(StreamInput in) throws IOException {
|
||||
Map<String, DocumentField> fields;
|
||||
int size = in.readVInt();
|
||||
if (size == 0) {
|
||||
fields = emptyMap();
|
||||
} else if (size == 1) {
|
||||
DocumentField hitField = new DocumentField(in);
|
||||
fields = singletonMap(hitField.getName(), hitField);
|
||||
} else {
|
||||
fields = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
DocumentField field = new DocumentField(in);
|
||||
fields.put(field.getName(), field);
|
||||
}
|
||||
fields = unmodifiableMap(fields);
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
private void writeFields(StreamOutput out, Map<String, DocumentField> fields) throws IOException {
|
||||
if (fields == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(fields.size());
|
||||
for (DocumentField field : fields.values()) {
|
||||
field.writeTo(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeFloat(score);
|
||||
|
@ -230,13 +265,11 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
out.writeBoolean(true);
|
||||
writeExplanation(out, explanation);
|
||||
}
|
||||
if (fields == null) {
|
||||
out.writeVInt(0);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_8_0)) {
|
||||
out.writeMap(documentFields, StreamOutput::writeString, (stream, documentField) -> documentField.writeTo(stream));
|
||||
out.writeMap(metaFields, StreamOutput::writeString, (stream, documentField) -> documentField.writeTo(stream));
|
||||
} else {
|
||||
out.writeVInt(fields.size());
|
||||
for (DocumentField hitField : getFields().values()) {
|
||||
hitField.writeTo(out);
|
||||
}
|
||||
writeFields(out, this.getFields());
|
||||
}
|
||||
if (highlightFields == null) {
|
||||
out.writeVInt(0);
|
||||
|
@ -415,7 +448,9 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
|
||||
@Override
|
||||
public Iterator<DocumentField> iterator() {
|
||||
return fields.values().iterator();
|
||||
// need to join the fields and metadata fields
|
||||
Map<String, DocumentField> allFields = this.getFields();
|
||||
return allFields.values().iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -425,21 +460,45 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
return getFields().get(fieldName);
|
||||
}
|
||||
|
||||
/*
|
||||
* Adds a new DocumentField to the map in case both parameters are not null.
|
||||
* */
|
||||
public void setField(String fieldName, DocumentField field) {
|
||||
if (fieldName == null || field == null) return;
|
||||
if (field.isMetadataField()) {
|
||||
this.metaFields.put(fieldName, field);
|
||||
} else {
|
||||
this.documentFields.put(fieldName, field);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A map of hit fields (from field name to hit fields) if additional fields
|
||||
* were required to be loaded.
|
||||
*/
|
||||
public Map<String, DocumentField> getFields() {
|
||||
return fields == null ? emptyMap() : fields;
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
fields.putAll(metaFields);
|
||||
fields.putAll(documentFields);
|
||||
return fields;
|
||||
}
|
||||
|
||||
// returns the fields without handling null cases
|
||||
public Map<String, DocumentField> fieldsOrNull() {
|
||||
return fields;
|
||||
return getFields();
|
||||
}
|
||||
|
||||
public void fields(Map<String, DocumentField> fields) {
|
||||
this.fields = fields;
|
||||
Objects.requireNonNull(fields);
|
||||
this.metaFields = new HashMap<>();
|
||||
this.documentFields = new HashMap<>();
|
||||
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
|
||||
if (fieldEntry.getValue().isMetadataField()) {
|
||||
this.metaFields.put(fieldEntry.getKey(), fieldEntry.getValue());
|
||||
} else {
|
||||
this.documentFields.put(fieldEntry.getKey(), fieldEntry.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -538,6 +597,22 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
this.innerHits = innerHits;
|
||||
}
|
||||
|
||||
public static void splitFieldsByMetadata(Map<String, DocumentField> fields,
|
||||
Map<String, DocumentField> documentFields,
|
||||
Map<String, DocumentField> metaFields) {
|
||||
// documentFields and metaFields must be non-empty maps
|
||||
if (fields == null) {
|
||||
return;
|
||||
}
|
||||
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
|
||||
if (fieldEntry.getValue().isMetadataField()) {
|
||||
metaFields.put(fieldEntry.getKey(), fieldEntry.getValue());
|
||||
} else {
|
||||
documentFields.put(fieldEntry.getKey(), fieldEntry.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class Fields {
|
||||
static final String _INDEX = "_index";
|
||||
static final String _TYPE = "_type";
|
||||
|
@ -559,6 +634,12 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
static final String _NODE = "_node";
|
||||
}
|
||||
|
||||
// Following are the keys for storing the metadata fields and regular fields in the aggregation map.
|
||||
// These do not influence the structure of json serialization: document fields are still stored
|
||||
// under FIELDS and metadata are still scattered at the root level.
|
||||
static final String DOCUMENT_FIELDS = "document_fields";
|
||||
static final String METADATA_FIELDS = "metadata_fields";
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
@ -569,21 +650,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
|
||||
// public because we render hit as part of completion suggestion option
|
||||
public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
List<DocumentField> metaFields = new ArrayList<>();
|
||||
List<DocumentField> otherFields = new ArrayList<>();
|
||||
if (fields != null && !fields.isEmpty()) {
|
||||
for (DocumentField field : fields.values()) {
|
||||
if (field.getValues().isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
if (field.isMetadataField()) {
|
||||
metaFields.add(field);
|
||||
} else {
|
||||
otherFields.add(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For inner_hit hits shard is null and that is ok, because the parent search hit has all this information.
|
||||
// Even if this was included in the inner_hit hits this would be the same, so better leave it out.
|
||||
if (getExplanation() != null && shard != null) {
|
||||
|
@ -616,7 +682,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
} else {
|
||||
builder.field(Fields._SCORE, score);
|
||||
}
|
||||
for (DocumentField field : metaFields) {
|
||||
for (DocumentField field : metaFields.values()) {
|
||||
// _ignored is the only multi-valued meta field
|
||||
// TODO: can we avoid having an exception here?
|
||||
if (field.getName().equals(IgnoredFieldMapper.NAME)) {
|
||||
|
@ -628,9 +694,9 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
if (source != null) {
|
||||
XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params);
|
||||
}
|
||||
if (!otherFields.isEmpty()) {
|
||||
if (!documentFields.isEmpty()) {
|
||||
builder.startObject(Fields.FIELDS);
|
||||
for (DocumentField field : otherFields) {
|
||||
for (DocumentField field : documentFields.values()) {
|
||||
field.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
|
@ -706,7 +772,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
parser.declareObject((map, value) -> {
|
||||
Map<String, DocumentField> fieldMap = get(Fields.FIELDS, map, new HashMap<String, DocumentField>());
|
||||
fieldMap.putAll(value);
|
||||
map.put(Fields.FIELDS, fieldMap);
|
||||
map.put(DOCUMENT_FIELDS, fieldMap);
|
||||
}, (p, c) -> parseFields(p), new ParseField(Fields.FIELDS));
|
||||
parser.declareObject((map, value) -> map.put(Fields._EXPLANATION, value), (p, c) -> parseExplanation(p),
|
||||
new ParseField(Fields._EXPLANATION));
|
||||
|
@ -723,9 +789,10 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
String id = get(Fields._ID, values, null);
|
||||
Text type = get(Fields._TYPE, values, null);
|
||||
NestedIdentity nestedIdentity = get(NestedIdentity._NESTED, values, null);
|
||||
Map<String, DocumentField> fields = get(Fields.FIELDS, values, Collections.emptyMap());
|
||||
Map<String, DocumentField> metaFields = get(METADATA_FIELDS, values, Collections.emptyMap());
|
||||
Map<String, DocumentField> documentFields = get(DOCUMENT_FIELDS, values, Collections.emptyMap());
|
||||
|
||||
SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, fields);
|
||||
SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, documentFields, metaFields);
|
||||
String index = get(Fields._INDEX, values, null);
|
||||
String clusterAlias = null;
|
||||
if (index != null) {
|
||||
|
@ -790,13 +857,17 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
* handled individually. All other fields are parsed to an entry in the fields map
|
||||
*/
|
||||
private static void declareMetadataFields(ObjectParser<Map<String, Object>, Void> parser) {
|
||||
/* TODO: This method and its usage in declareInnerHitsParseFields() must be replaced by
|
||||
calling an UnknownFieldConsumer. All fields on the root level of the parsed SearhHit
|
||||
should be interpreted as metadata fields.
|
||||
*/
|
||||
for (String metadatafield : MapperService.getAllMetaFields()) {
|
||||
if (metadatafield.equals(Fields._ID) == false && metadatafield.equals(Fields._INDEX) == false
|
||||
&& metadatafield.equals(Fields._TYPE) == false) {
|
||||
if (metadatafield.equals(IgnoredFieldMapper.NAME)) {
|
||||
parser.declareObjectArray((map, list) -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(Fields.FIELDS,
|
||||
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(METADATA_FIELDS,
|
||||
v -> new HashMap<String, DocumentField>());
|
||||
DocumentField field = new DocumentField(metadatafield, list);
|
||||
fieldMap.put(field.getName(), field);
|
||||
|
@ -805,7 +876,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
} else {
|
||||
parser.declareField((map, field) -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(Fields.FIELDS,
|
||||
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(METADATA_FIELDS,
|
||||
v -> new HashMap<String, DocumentField>());
|
||||
fieldMap.put(field.getName(), field);
|
||||
}, (p, c) -> new DocumentField(metadatafield, Collections.singletonList(parseFieldsValue(p))),
|
||||
|
@ -906,7 +977,8 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
&& Objects.equals(seqNo, other.seqNo)
|
||||
&& Objects.equals(primaryTerm, other.primaryTerm)
|
||||
&& Objects.equals(source, other.source)
|
||||
&& Objects.equals(getFields(), other.getFields())
|
||||
&& Objects.equals(documentFields, other.documentFields)
|
||||
&& Objects.equals(metaFields, other.metaFields)
|
||||
&& Objects.equals(getHighlightFields(), other.getHighlightFields())
|
||||
&& Arrays.equals(matchedQueries, other.matchedQueries)
|
||||
&& Objects.equals(explanation, other.explanation)
|
||||
|
@ -918,7 +990,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, type, nestedIdentity, version, seqNo, primaryTerm, source, fields, getHighlightFields(),
|
||||
return Objects.hash(id, nestedIdentity, version, seqNo, primaryTerm, source, documentFields, metaFields, getHighlightFields(),
|
||||
Arrays.hashCode(matchedQueries), explanation, shard, innerHits, index, clusterAlias);
|
||||
}
|
||||
|
||||
|
|
|
@ -209,13 +209,17 @@ public class FetchPhase implements SearchPhase {
|
|||
DocumentMapper documentMapper = context.mapperService().documentMapper();
|
||||
Text typeText = documentMapper.typeText();
|
||||
if (fieldsVisitor == null) {
|
||||
return new SearchHit(docId, null, typeText, null);
|
||||
return new SearchHit(docId, null, typeText, null, null);
|
||||
}
|
||||
|
||||
Map<String, DocumentField> searchFields = getSearchFields(context, fieldsVisitor, subDocId,
|
||||
storedToRequestedFields, subReaderContext);
|
||||
|
||||
SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields);
|
||||
Map<String, DocumentField> metaFields = new HashMap<>();
|
||||
Map<String, DocumentField> documentFields = new HashMap<>();
|
||||
SearchHit.splitFieldsByMetadata(searchFields, documentFields, metaFields);
|
||||
|
||||
SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, documentFields, metaFields);
|
||||
// Set _source if requested.
|
||||
SourceLookup sourceLookup = context.lookup().source();
|
||||
sourceLookup.setSegmentAndDocument(subReaderContext, subDocId);
|
||||
|
@ -341,7 +345,10 @@ public class FetchPhase implements SearchPhase {
|
|||
XContentType contentType = tuple.v1();
|
||||
context.lookup().source().setSourceContentType(contentType);
|
||||
}
|
||||
return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, searchFields);
|
||||
Map<String, DocumentField> metaFields = new HashMap<>();
|
||||
Map<String, DocumentField> documentFields = new HashMap<>();
|
||||
SearchHit.splitFieldsByMetadata(searchFields, documentFields, metaFields);
|
||||
return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, documentFields, metaFields);
|
||||
}
|
||||
|
||||
private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId,
|
||||
|
|
|
@ -148,7 +148,7 @@ public final class FetchDocValuesPhase implements FetchSubPhase {
|
|||
DocumentField hitField = hit.getFields().get(field);
|
||||
if (hitField == null) {
|
||||
hitField = new DocumentField(field, new ArrayList<>(2));
|
||||
hit.getFields().put(field, hitField);
|
||||
hit.setField(field, hitField);
|
||||
}
|
||||
final List<Object> values = hitField.getValues();
|
||||
|
||||
|
|
|
@ -85,7 +85,8 @@ public final class ScriptFieldsPhase implements FetchSubPhase {
|
|||
values = Collections.singletonList(value);
|
||||
}
|
||||
hitField = new DocumentField(scriptFieldName, values);
|
||||
hit.getFields().put(scriptFieldName, hitField);
|
||||
hit.setField(scriptFieldName, hitField);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,8 +52,8 @@ public class ExpandSearchPhaseTests extends ESTestCase {
|
|||
List<SearchHits> collapsedHits = new ArrayList<>(numInnerHits);
|
||||
for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
|
||||
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(innerHitNum, "ID", new Text("type"),
|
||||
Collections.emptyMap()), new SearchHit(innerHitNum + 1, "ID", new Text("type"),
|
||||
Collections.emptyMap())}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||
Collections.emptyMap(), Collections.emptyMap()), new SearchHit(innerHitNum + 1, "ID", new Text("type"),
|
||||
Collections.emptyMap(), Collections.emptyMap())}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||
collapsedHits.add(hits);
|
||||
}
|
||||
|
||||
|
@ -103,8 +103,8 @@ public class ExpandSearchPhaseTests extends ESTestCase {
|
|||
};
|
||||
|
||||
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))))},
|
||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))),
|
||||
Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1);
|
||||
ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null);
|
||||
|
||||
|
@ -126,8 +126,8 @@ public class ExpandSearchPhaseTests extends ESTestCase {
|
|||
AtomicBoolean executedMultiSearch = new AtomicBoolean(false);
|
||||
|
||||
SearchHits collapsedHits = new SearchHits(new SearchHit[]{new SearchHit(2, "ID", new Text("type"),
|
||||
Collections.emptyMap()), new SearchHit(3, "ID", new Text("type"),
|
||||
Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||
Collections.emptyMap(), Collections.emptyMap()), new SearchHit(3, "ID", new Text("type"),
|
||||
Collections.emptyMap(), Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1);
|
||||
String collapseValue = randomBoolean() ? null : "boom";
|
||||
mockSearchPhaseContext.getRequest().source(new SearchSourceBuilder()
|
||||
|
@ -149,9 +149,9 @@ public class ExpandSearchPhaseTests extends ESTestCase {
|
|||
};
|
||||
|
||||
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue)))),
|
||||
new SearchHit(2, "ID2", new Text("type"),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))))},
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))),
|
||||
Collections.emptyMap()), new SearchHit(2, "ID2", new Text("type"), Collections.singletonMap("someField",
|
||||
new DocumentField("someField", Collections.singletonList(collapseValue))), Collections.emptyMap())},
|
||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1);
|
||||
ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null);
|
||||
|
@ -172,9 +172,9 @@ public class ExpandSearchPhaseTests extends ESTestCase {
|
|||
};
|
||||
|
||||
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null)))),
|
||||
new SearchHit(2, "ID2", new Text("type"),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))))},
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))),
|
||||
Collections.emptyMap()), new SearchHit(2, "ID2", new Text("type"), Collections.singletonMap("someField",
|
||||
new DocumentField("someField", Collections.singletonList(null))), Collections.emptyMap())},
|
||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1);
|
||||
ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null);
|
||||
|
|
|
@ -321,7 +321,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
|||
List<SearchHit> searchHits = new ArrayList<>();
|
||||
for (ScoreDoc scoreDoc : mergedSearchDocs) {
|
||||
if (scoreDoc.shardIndex == shardIndex) {
|
||||
searchHits.add(new SearchHit(scoreDoc.doc, "", new Text(""), Collections.emptyMap()));
|
||||
searchHits.add(new SearchHit(scoreDoc.doc, "", new Text(""), Collections.emptyMap(), Collections.emptyMap()));
|
||||
if (scoreDoc.score > maxScore) {
|
||||
maxScore = scoreDoc.score;
|
||||
}
|
||||
|
@ -332,7 +332,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
|||
for (CompletionSuggestion.Entry.Option option : ((CompletionSuggestion) suggestion).getOptions()) {
|
||||
ScoreDoc doc = option.getDoc();
|
||||
if (doc.shardIndex == shardIndex) {
|
||||
searchHits.add(new SearchHit(doc.doc, "", new Text(""), Collections.emptyMap()));
|
||||
searchHits.add(new SearchHit(doc.doc, "", new Text(""), Collections.emptyMap(), Collections.emptyMap()));
|
||||
if (doc.score > maxScore) {
|
||||
maxScore = doc.score;
|
||||
}
|
||||
|
|
|
@ -208,7 +208,7 @@ public class SearchResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testToXContent() {
|
||||
SearchHit hit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap());
|
||||
SearchHit hit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap());
|
||||
hit.score(2.0f);
|
||||
SearchHit[] hits = new SearchHit[] { hit };
|
||||
{
|
||||
|
|
|
@ -73,14 +73,24 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
if (randomBoolean()) {
|
||||
nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2));
|
||||
}
|
||||
Map<String, DocumentField> fields = null;
|
||||
Map<String, DocumentField> fields = new HashMap<>();
|
||||
if (frequently()) {
|
||||
fields = new HashMap<>();
|
||||
if (randomBoolean()) {
|
||||
fields = GetResultTests.randomDocumentFields(xContentType).v2();
|
||||
}
|
||||
}
|
||||
SearchHit hit = new SearchHit(internalId, uid, type, nestedIdentity, fields);
|
||||
HashMap<String, DocumentField> metaFields = new HashMap<>();
|
||||
HashMap<String, DocumentField> documentFields = new HashMap<>();
|
||||
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
|
||||
if (fieldEntry.getValue().isMetadataField()) {
|
||||
metaFields.put(fieldEntry.getKey(), fieldEntry.getValue());
|
||||
} else {
|
||||
documentFields.put(fieldEntry.getKey(), fieldEntry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
SearchHit hit = new SearchHit(internalId, uid, type, nestedIdentity, documentFields, metaFields);
|
||||
if (frequently()) {
|
||||
if (rarely()) {
|
||||
hit.score(Float.NaN);
|
||||
|
@ -213,7 +223,7 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
}
|
||||
|
||||
public void testToXContent() throws IOException {
|
||||
SearchHit searchHit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap());
|
||||
SearchHit searchHit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap());
|
||||
searchHit.score(1.5f);
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
|
@ -226,25 +236,25 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
clusterAlias, OriginalIndices.NONE);
|
||||
|
||||
Map<String, SearchHits> innerHits = new HashMap<>();
|
||||
SearchHit innerHit1 = new SearchHit(0, "_id", new Text("_type"), null);
|
||||
SearchHit innerHit1 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
innerHit1.shard(target);
|
||||
SearchHit innerInnerHit2 = new SearchHit(0, "_id", new Text("_type"), null);
|
||||
SearchHit innerInnerHit2 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
innerInnerHit2.shard(target);
|
||||
innerHits.put("1", new SearchHits(new SearchHit[]{innerInnerHit2}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f));
|
||||
innerHit1.setInnerHits(innerHits);
|
||||
SearchHit innerHit2 = new SearchHit(0, "_id", new Text("_type"), null);
|
||||
SearchHit innerHit2 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
innerHit2.shard(target);
|
||||
SearchHit innerHit3 = new SearchHit(0, "_id", new Text("_type"), null);
|
||||
SearchHit innerHit3 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
innerHit3.shard(target);
|
||||
|
||||
innerHits = new HashMap<>();
|
||||
SearchHit hit1 = new SearchHit(0, "_id", new Text("_type"), null);
|
||||
SearchHit hit1 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
innerHits.put("1", new SearchHits(new SearchHit[]{innerHit1, innerHit2}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f));
|
||||
innerHits.put("2", new SearchHits(new SearchHit[]{innerHit3}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f));
|
||||
hit1.shard(target);
|
||||
hit1.setInnerHits(innerHits);
|
||||
|
||||
SearchHit hit2 = new SearchHit(0, "_id", new Text("_type"), null);
|
||||
SearchHit hit2 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
hit2.shard(target);
|
||||
|
||||
SearchHits hits = new SearchHits(new SearchHit[]{hit1, hit2}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f);
|
||||
|
@ -271,7 +281,7 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
}
|
||||
|
||||
public void testNullSource() {
|
||||
SearchHit searchHit = new SearchHit(0, "_id", new Text("_type"), null);
|
||||
SearchHit searchHit = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
|
||||
assertThat(searchHit.getSourceAsMap(), nullValue());
|
||||
assertThat(searchHit.getSourceRef(), nullValue());
|
||||
|
|
|
@ -208,8 +208,8 @@ public class SearchHitsTests extends AbstractSerializingTestCase<SearchHits> {
|
|||
|
||||
public void testToXContent() throws IOException {
|
||||
SearchHit[] hits = new SearchHit[] {
|
||||
new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()),
|
||||
new SearchHit(2, "id2", new Text("type"), Collections.emptyMap()) };
|
||||
new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(2, "id2", new Text("type"), Collections.emptyMap(), Collections.emptyMap()) };
|
||||
|
||||
long totalHits = 1000;
|
||||
float maxScore = 1.5f;
|
||||
|
@ -226,9 +226,9 @@ public class SearchHitsTests extends AbstractSerializingTestCase<SearchHits> {
|
|||
public void testFromXContentWithShards() throws IOException {
|
||||
for (boolean withExplanation : new boolean[] {true, false}) {
|
||||
final SearchHit[] hits = new SearchHit[]{
|
||||
new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()),
|
||||
new SearchHit(2, "id2", new Text("type"), Collections.emptyMap()),
|
||||
new SearchHit(10, "id10", new Text("type"), Collections.emptyMap())
|
||||
new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(2, "id2", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(10, "id10", new Text("type"), Collections.emptyMap(), Collections.emptyMap())
|
||||
};
|
||||
|
||||
for (SearchHit hit : hits) {
|
||||
|
|
|
@ -50,6 +50,7 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -114,7 +115,7 @@ public class InternalTopHitsTests extends InternalAggregationTestCase<InternalTo
|
|||
} else {
|
||||
scoreDocs[i] = new ScoreDoc(docId, score);
|
||||
}
|
||||
hits[i] = new SearchHit(docId, Integer.toString(i), new Text("test"), searchHitFields);
|
||||
hits[i] = new SearchHit(docId, Integer.toString(i), new Text("test"), searchHitFields, Collections.emptyMap());
|
||||
hits[i].score(score);
|
||||
}
|
||||
int totalHits = between(actualSize, 500000);
|
||||
|
|
|
@ -131,7 +131,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
|
|||
DocumentField hitField = hitContext.hit().getFields().get(NAME);
|
||||
if (hitField == null) {
|
||||
hitField = new DocumentField(NAME, new ArrayList<>(1));
|
||||
hitContext.hit().getFields().put(NAME, hitField);
|
||||
hitContext.hit().setField(NAME, hitField);
|
||||
}
|
||||
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(),
|
||||
hitContext.hit().getType(), hitContext.hit().getId());
|
||||
|
|
|
@ -152,7 +152,7 @@ public class FetchSourcePhaseTests extends ESTestCase {
|
|||
SearchContext searchContext = new FetchSourcePhaseTestSearchContext(fetchSourceContext,
|
||||
source == null ? null : BytesReference.bytes(source));
|
||||
FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
|
||||
final SearchHit searchHit = new SearchHit(1, null, null, nestedIdentity, null);
|
||||
final SearchHit searchHit = new SearchHit(1, null, null, nestedIdentity, null, null);
|
||||
hitContext.reset(searchHit, null, 1, null);
|
||||
FetchSourcePhase phase = new FetchSourcePhase();
|
||||
phase.hitExecute(searchContext, hitContext);
|
||||
|
|
|
@ -261,7 +261,13 @@ public class EnrichShardMultiSearchAction extends ActionType<MultiSearchResponse
|
|||
visitor.reset();
|
||||
searcher.doc(scoreDoc.doc, visitor);
|
||||
visitor.postProcess(mapperService);
|
||||
final SearchHit hit = new SearchHit(scoreDoc.doc, visitor.uid().id(), typeText, Collections.emptyMap());
|
||||
final SearchHit hit = new SearchHit(
|
||||
scoreDoc.doc,
|
||||
visitor.uid().id(),
|
||||
typeText,
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
hit.sourceRef(filterSource(fetchSourceContext, visitor.source()));
|
||||
hits[j] = hit;
|
||||
}
|
||||
|
|
|
@ -176,6 +176,7 @@ public class GeoMatchProcessorTests extends ESTestCase {
|
|||
randomInt(100),
|
||||
e.getKey(),
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) {
|
||||
|
|
|
@ -386,6 +386,7 @@ public class MatchProcessorTests extends ESTestCase {
|
|||
randomInt(100),
|
||||
e.getKey().toString(),
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) {
|
||||
|
|
|
@ -24,7 +24,7 @@ public class EqlSearchResponseTests extends AbstractSerializingTestCase<EqlSearc
|
|||
if (randomBoolean()) {
|
||||
hits = new ArrayList<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
hits.add(new SearchHit(i, randomAlphaOfLength(10), null, new HashMap<>()));
|
||||
hits.add(new SearchHit(i, randomAlphaOfLength(10), null, new HashMap<>(), new HashMap<>()));
|
||||
}
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -235,7 +235,8 @@ public class DataFrameRowsJoinerTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static SearchHit newHit(String json) {
|
||||
SearchHit hit = new SearchHit(randomInt(), randomAlphaOfLength(10), new Text("doc"), Collections.emptyMap());
|
||||
SearchHit hit = new SearchHit(randomInt(), randomAlphaOfLength(10), new Text("doc"),
|
||||
Collections.emptyMap(), Collections.emptyMap());
|
||||
hit.sourceRef(new BytesArray(json));
|
||||
return hit;
|
||||
}
|
||||
|
|
|
@ -908,7 +908,7 @@ public class JobResultsProviderTests extends ESTestCase {
|
|||
fields.put("field_1", new DocumentField("field_1", Collections.singletonList("foo")));
|
||||
fields.put("field_2", new DocumentField("field_2", Collections.singletonList("foo")));
|
||||
|
||||
SearchHit hit = new SearchHit(123, String.valueOf(map.hashCode()), new Text("foo"), fields)
|
||||
SearchHit hit = new SearchHit(123, String.valueOf(map.hashCode()), new Text("foo"), fields, Collections.emptyMap())
|
||||
.sourceRef(BytesReference.bytes(XContentFactory.jsonBuilder().map(_source)));
|
||||
|
||||
list.add(hit);
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.mockito.ArgumentCaptor;
|
|||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -190,7 +191,7 @@ public class MockClientBuilder {
|
|||
|
||||
SearchHit hits [] = new SearchHit[fields.size()];
|
||||
for (int i=0; i<hits.length; i++) {
|
||||
SearchHit hit = new SearchHit(10, null, null, fields.get(i));
|
||||
SearchHit hit = new SearchHit(10, null, null, fields.get(i), Collections.emptyMap());
|
||||
hits[i] = hit;
|
||||
}
|
||||
|
||||
|
|
|
@ -242,7 +242,7 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase {
|
|||
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, source.streamInput()).map();
|
||||
final Map<String, Object> accessToken = (Map<String, Object>) sourceMap.get("access_token");
|
||||
final Map<String, Object> userToken = (Map<String, Object>) accessToken.get("user_token");
|
||||
final SearchHit hit = new SearchHit(idx, "token_" + userToken.get("id"), null, null);
|
||||
final SearchHit hit = new SearchHit(idx, "token_" + userToken.get("id"), null, null, null);
|
||||
hit.sourceRef(source);
|
||||
return hit;
|
||||
} catch (IOException e) {
|
||||
|
|
|
@ -340,7 +340,7 @@ public class NativePrivilegeStoreTests extends ESTestCase {
|
|||
final SearchHit[] hits = new SearchHit[sourcePrivileges.size()];
|
||||
for (int i = 0; i < hits.length; i++) {
|
||||
final ApplicationPrivilegeDescriptor p = sourcePrivileges.get(i);
|
||||
hits[i] = new SearchHit(i, "application-privilege_" + p.getApplication() + ":" + p.getName(), null, null);
|
||||
hits[i] = new SearchHit(i, "application-privilege_" + p.getApplication() + ":" + p.getName(), null, null, null);
|
||||
hits[i].sourceRef(new BytesArray(Strings.toString(p)));
|
||||
}
|
||||
return hits;
|
||||
|
|
|
@ -95,8 +95,8 @@ public class TopHitsAggExtractorTests extends AbstractSqlWireSerializingTestCase
|
|||
|
||||
private SearchHits searchHitsOf(Object value) {
|
||||
TotalHits totalHits = new TotalHits(10, TotalHits.Relation.EQUAL_TO);
|
||||
return new SearchHits(new SearchHit[] {new SearchHit(1, "docId", null,
|
||||
Collections.singletonMap("topHitsAgg", new DocumentField("field", Collections.singletonList(value))))},
|
||||
return new SearchHits(new SearchHit[] {new SearchHit(1, "docId", null, Collections.singletonMap("topHitsAgg",
|
||||
new DocumentField("field", Collections.singletonList(value))), Collections.emptyMap())},
|
||||
totalHits, 0.0f);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -180,7 +180,7 @@ public class WatcherServiceTests extends ESTestCase {
|
|||
SearchHit[] hits = new SearchHit[count];
|
||||
for (int i = 0; i < count; i++) {
|
||||
String id = String.valueOf(i);
|
||||
SearchHit hit = new SearchHit(1, id, new Text("watch"), Collections.emptyMap());
|
||||
SearchHit hit = new SearchHit(1, id, new Text("watch"), Collections.emptyMap(), Collections.emptyMap());
|
||||
hit.version(1L);
|
||||
hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever", OriginalIndices.NONE));
|
||||
hits[i] = hit;
|
||||
|
|
|
@ -76,7 +76,7 @@ public class CompareConditionSearchTests extends AbstractWatcherIntegrationTestC
|
|||
public void testExecuteAccessHits() throws Exception {
|
||||
CompareCondition condition = new CompareCondition("ctx.payload.hits.hits.0._score", CompareCondition.Op.EQ, 1,
|
||||
Clock.systemUTC());
|
||||
SearchHit hit = new SearchHit(0, "1", new Text("type"), null);
|
||||
SearchHit hit = new SearchHit(0, "1", new Text("type"), null, null);
|
||||
hit.score(1f);
|
||||
hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null, OriginalIndices.NONE));
|
||||
|
||||
|
|
|
@ -216,7 +216,7 @@ public class TriggeredWatchStoreTests extends ESTestCase {
|
|||
when(searchResponse1.getSuccessfulShards()).thenReturn(1);
|
||||
when(searchResponse1.getTotalShards()).thenReturn(1);
|
||||
BytesArray source = new BytesArray("{}");
|
||||
SearchHit hit = new SearchHit(0, "first_foo", new Text(SINGLE_MAPPING_NAME), null);
|
||||
SearchHit hit = new SearchHit(0, "first_foo", new Text(SINGLE_MAPPING_NAME), null, null);
|
||||
hit.version(1L);
|
||||
hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE));
|
||||
hit.sourceRef(source);
|
||||
|
@ -230,7 +230,7 @@ public class TriggeredWatchStoreTests extends ESTestCase {
|
|||
}).when(client).execute(eq(SearchAction.INSTANCE), any(), any());
|
||||
|
||||
// First return a scroll response with a single hit and then with no hits
|
||||
hit = new SearchHit(0, "second_foo", new Text(SINGLE_MAPPING_NAME), null);
|
||||
hit = new SearchHit(0, "second_foo", new Text(SINGLE_MAPPING_NAME), null, null);
|
||||
hit.version(1L);
|
||||
hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE));
|
||||
hit.sourceRef(source);
|
||||
|
|
Loading…
Reference in New Issue