Avoid reloading _source for every inner hit. (#60632)

Previously if an inner_hits block required _ source, we would reload and parse
the root document's source for every hit. This PR adds a shared SourceLookup to
the inner hits context that allows inner hits to reuse parsed source if it's
already available. This matches our approach for sharing the root document ID.

Relates to #32818.
This commit is contained in:
Julie Tibshirani 2020-08-03 17:12:27 -07:00 committed by GitHub
parent 80584d266d
commit f99584c6f3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 212 additions and 178 deletions

View File

@ -97,83 +97,75 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder {
}
@Override
public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
Weight innerHitQueryWeight = createInnerHitQueryWeight();
TopDocsAndMaxScore[] result = new TopDocsAndMaxScore[hits.length];
for (int i = 0; i < hits.length; i++) {
SearchHit hit = hits[i];
String joinName = getSortedDocValue(joinFieldMapper.name(), context, hit.docId());
if (joinName == null) {
result[i] = new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN);
continue;
}
public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException {
Weight innerHitQueryWeight = getInnerHitQueryWeight();
String joinName = getSortedDocValue(joinFieldMapper.name(), context, hit.docId());
if (joinName == null) {
return new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN);
}
QueryShardContext qsc = context.getQueryShardContext();
ParentIdFieldMapper parentIdFieldMapper =
joinFieldMapper.getParentIdFieldMapper(typeName, fetchChildInnerHits == false);
if (parentIdFieldMapper == null) {
result[i] = new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN);
continue;
}
QueryShardContext qsc = context.getQueryShardContext();
ParentIdFieldMapper parentIdFieldMapper =
joinFieldMapper.getParentIdFieldMapper(typeName, fetchChildInnerHits == false);
if (parentIdFieldMapper == null) {
return new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN);
}
Query q;
if (fetchChildInnerHits) {
Query hitQuery = parentIdFieldMapper.fieldType().termQuery(hit.getId(), qsc);
q = new BooleanQuery.Builder()
// Only include child documents that have the current hit as parent:
.add(hitQuery, BooleanClause.Occur.FILTER)
// and only include child documents of a single relation:
.add(joinFieldMapper.fieldType().termQuery(typeName, qsc), BooleanClause.Occur.FILTER)
.build();
} else {
String parentId = getSortedDocValue(parentIdFieldMapper.name(), context, hit.docId());
if (parentId == null) {
result[i] = new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN);
continue;
}
q = context.mapperService().fieldType(IdFieldMapper.NAME).termQuery(parentId, qsc);
Query q;
if (fetchChildInnerHits) {
Query hitQuery = parentIdFieldMapper.fieldType().termQuery(hit.getId(), qsc);
q = new BooleanQuery.Builder()
// Only include child documents that have the current hit as parent:
.add(hitQuery, BooleanClause.Occur.FILTER)
// and only include child documents of a single relation:
.add(joinFieldMapper.fieldType().termQuery(typeName, qsc), BooleanClause.Occur.FILTER)
.build();
} else {
String parentId = getSortedDocValue(parentIdFieldMapper.name(), context, hit.docId());
if (parentId == null) {
return new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN);
}
q = context.mapperService().fieldType(IdFieldMapper.NAME).termQuery(parentId, qsc);
}
Weight weight = context.searcher().createWeight(context.searcher().rewrite(q), ScoreMode.COMPLETE_NO_SCORES, 1f);
if (size() == 0) {
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) {
intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx);
}
result[i] = new TopDocsAndMaxScore(
new TopDocs(
new TotalHits(totalHitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO),
Lucene.EMPTY_SCORE_DOCS
), Float.NaN);
} else {
int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
TopDocsCollector<?> topDocsCollector;
MaxScoreCollector maxScoreCollector = null;
if (sort() != null) {
topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE);
if (trackScores()) {
maxScoreCollector = new MaxScoreCollector();
}
} else {
topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE);
Weight weight = context.searcher().createWeight(context.searcher().rewrite(q), ScoreMode.COMPLETE_NO_SCORES, 1f);
if (size() == 0) {
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) {
intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx);
}
return new TopDocsAndMaxScore(
new TopDocs(
new TotalHits(totalHitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO),
Lucene.EMPTY_SCORE_DOCS
), Float.NaN);
} else {
int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
TopDocsCollector<?> topDocsCollector;
MaxScoreCollector maxScoreCollector = null;
if (sort() != null) {
topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE);
if (trackScores()) {
maxScoreCollector = new MaxScoreCollector();
}
try {
for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) {
intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx);
}
} finally {
clearReleasables(Lifetime.COLLECTION);
}
TopDocs topDocs = topDocsCollector.topDocs(from(), size());
float maxScore = Float.NaN;
if (maxScoreCollector != null) {
maxScore = maxScoreCollector.getMaxScore();
}
result[i] = new TopDocsAndMaxScore(topDocs, maxScore);
} else {
topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE);
maxScoreCollector = new MaxScoreCollector();
}
try {
for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) {
intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx);
}
} finally {
clearReleasables(Lifetime.COLLECTION);
}
TopDocs topDocs = topDocsCollector.topDocs(from(), size());
float maxScore = Float.NaN;
if (maxScoreCollector != null) {
maxScore = maxScoreCollector.getMaxScore();
}
return new TopDocsAndMaxScore(topDocs, maxScore);
}
return result;
}
private String getSortedDocValue(String field, SearchContext context, int docId) {

View File

@ -644,6 +644,17 @@ public class InnerHitsIT extends ESIntegTestCase {
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0));
// Check that inner hits contain _source even when it's disabled on the root request.
response = client().prepareSearch()
.setFetchSource(false)
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None)
.innerHit(new InnerHitBuilder()))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L));
assertFalse(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().isEmpty());
}
public void testInnerHitsWithIgnoreUnmapped() throws Exception {

View File

@ -388,61 +388,57 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
}
@Override
public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
Weight innerHitQueryWeight = createInnerHitQueryWeight();
TopDocsAndMaxScore[] result = new TopDocsAndMaxScore[hits.length];
for (int i = 0; i < hits.length; i++) {
SearchHit hit = hits[i];
Query rawParentFilter;
if (parentObjectMapper == null) {
rawParentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated());
} else {
rawParentFilter = parentObjectMapper.nestedTypeFilter();
}
public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException {
Weight innerHitQueryWeight = getInnerHitQueryWeight();
int parentDocId = hit.docId();
final int readerIndex = ReaderUtil.subIndex(parentDocId, searcher().getIndexReader().leaves());
// With nested inner hits the nested docs are always in the same segement, so need to use the other segments
LeafReaderContext ctx = searcher().getIndexReader().leaves().get(readerIndex);
Query rawParentFilter;
if (parentObjectMapper == null) {
rawParentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated());
} else {
rawParentFilter = parentObjectMapper.nestedTypeFilter();
}
Query childFilter = childObjectMapper.nestedTypeFilter();
BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter);
Query q = new ParentChildrenBlockJoinQuery(parentFilter, childFilter, parentDocId);
Weight weight = context.searcher().createWeight(context.searcher().rewrite(q),
org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1f);
if (size() == 0) {
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx);
result[i] = new TopDocsAndMaxScore(new TopDocs(new TotalHits(totalHitCountCollector.getTotalHits(),
TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN);
} else {
int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
TopDocsCollector<?> topDocsCollector;
MaxScoreCollector maxScoreCollector = null;
if (sort() != null) {
topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE);
if (trackScores()) {
maxScoreCollector = new MaxScoreCollector();
}
} else {
topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE);
int parentDocId = hit.docId();
final int readerIndex = ReaderUtil.subIndex(parentDocId, searcher().getIndexReader().leaves());
// With nested inner hits the nested docs are always in the same segement, so need to use the other segments
LeafReaderContext ctx = searcher().getIndexReader().leaves().get(readerIndex);
Query childFilter = childObjectMapper.nestedTypeFilter();
BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter);
Query q = new ParentChildrenBlockJoinQuery(parentFilter, childFilter, parentDocId);
Weight weight = context.searcher().createWeight(context.searcher().rewrite(q),
org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1f);
if (size() == 0) {
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx);
return new TopDocsAndMaxScore(new TopDocs(new TotalHits(totalHitCountCollector.getTotalHits(),
TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN);
} else {
int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
TopDocsCollector<?> topDocsCollector;
MaxScoreCollector maxScoreCollector = null;
if (sort() != null) {
topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE);
if (trackScores()) {
maxScoreCollector = new MaxScoreCollector();
}
try {
intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx);
} finally {
clearReleasables(Lifetime.COLLECTION);
}
TopDocs td = topDocsCollector.topDocs(from(), size());
float maxScore = Float.NaN;
if (maxScoreCollector != null) {
maxScore = maxScoreCollector.getMaxScore();
}
result[i] = new TopDocsAndMaxScore(td, maxScore);
} else {
topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE);
maxScoreCollector = new MaxScoreCollector();
}
try {
intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx);
} finally {
clearReleasables(Lifetime.COLLECTION);
}
TopDocs td = topDocsCollector.topDocs(from(), size());
float maxScore = Float.NaN;
if (maxScoreCollector != null) {
maxScore = maxScoreCollector.getMaxScore();
}
return new TopDocsAndMaxScore(td, maxScore);
}
return result;
}
}
}

View File

@ -286,19 +286,33 @@ public class FetchPhase implements SearchPhase {
// Also if highlighting is requested on nested documents we need to fetch the _source from the root document,
// otherwise highlighting will attempt to fetch the _source from the nested doc, which will fail,
// because the entire _source is only stored with the root document.
final Uid uid;
final BytesReference source;
final boolean needSource = context.sourceRequested() || context.highlight() != null;
if (needSource || (context instanceof InnerHitsContext.InnerHitSubContext == false)) {
boolean needSource = context.sourceRequested() || context.highlight() != null;
Uid rootId;
Map<String, Object> rootSourceAsMap = null;
XContentType rootSourceContentType = null;
if (context instanceof InnerHitsContext.InnerHitSubContext) {
InnerHitsContext.InnerHitSubContext innerHitsContext = (InnerHitsContext.InnerHitSubContext) context;
rootId = innerHitsContext.getRootId();
if (needSource) {
SourceLookup rootLookup = innerHitsContext.getRootLookup();
rootSourceAsMap = rootLookup.loadSourceIfNeeded();
rootSourceContentType = rootLookup.sourceContentType();
}
} else {
FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource);
loadStoredFields(context.shardTarget(), subReaderContext, rootFieldsVisitor, rootSubDocId);
rootFieldsVisitor.postProcess(context.mapperService());
uid = rootFieldsVisitor.uid();
source = rootFieldsVisitor.source();
} else {
// In case of nested inner hits we already know the uid, so no need to fetch it from stored fields again!
uid = ((InnerHitsContext.InnerHitSubContext) context).getUid();
source = null;
rootId = rootFieldsVisitor.uid();
if (needSource) {
BytesReference rootSource = rootFieldsVisitor.source();
Tuple<XContentType, Map<String, Object>> tuple = XContentHelper.convertToMap(rootSource, false);
rootSourceAsMap = tuple.v2();
rootSourceContentType = tuple.v1();
}
}
Map<String, DocumentField> docFields = emptyMap();
@ -322,14 +336,10 @@ public class FetchPhase implements SearchPhase {
SearchHit.NestedIdentity nestedIdentity =
getInternalNestedIdentity(context, nestedSubDocId, subReaderContext, context.mapperService(), nestedObjectMapper);
SearchHit hit = new SearchHit(nestedTopDocId, uid.id(), typeText, nestedIdentity, docFields, metaFields);
SearchHit hit = new SearchHit(nestedTopDocId, rootId.id(), typeText, nestedIdentity, docFields, metaFields);
hitContext.reset(hit, subReaderContext, nestedSubDocId, context.searcher());
if (source != null) {
Tuple<XContentType, Map<String, Object>> tuple = XContentHelper.convertToMap(source, true);
XContentType contentType = tuple.v1();
Map<String, Object> sourceAsMap = tuple.v2();
if (rootSourceAsMap != null) {
// Isolate the nested json array object that matches with nested hit and wrap it back into the same json
// structure with the nested json array object being the actual content. The latter is important, so that
// features like source filtering and highlighting work consistent regardless of whether the field points
@ -339,7 +349,7 @@ public class FetchPhase implements SearchPhase {
for (SearchHit.NestedIdentity nested = nestedIdentity; nested != null; nested = nested.getChild()) {
String nestedPath = nested.getField().string();
current.put(nestedPath, new HashMap<>());
Object extractedValue = XContentMapValues.extractValue(nestedPath, sourceAsMap);
Object extractedValue = XContentMapValues.extractValue(nestedPath, rootSourceAsMap);
List<?> nestedParsedSource;
if (extractedValue instanceof List) {
// nested field has an array value in the _source
@ -361,9 +371,9 @@ public class FetchPhase implements SearchPhase {
throw new IllegalArgumentException("Cannot execute inner hits. One or more parent object fields of nested field [" +
nestedObjectMapper.name() + "] are not nested. All parent fields need to be nested fields too");
}
sourceAsMap = (Map<String, Object>) nestedParsedSource.get(nested.getOffset());
rootSourceAsMap = (Map<String, Object>) nestedParsedSource.get(nested.getOffset());
if (nested.getChild() == null) {
current.put(nestedPath, sourceAsMap);
current.put(nestedPath, rootSourceAsMap);
} else {
Map<String, Object> next = new HashMap<>();
current.put(nestedPath, next);
@ -372,7 +382,7 @@ public class FetchPhase implements SearchPhase {
}
hitContext.sourceLookup().setSource(nestedSourceAsMap);
hitContext.sourceLookup().setSourceContentType(contentType);
hitContext.sourceLookup().setSourceContentType(rootSourceContentType);
}
}

View File

@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SubSearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.Arrays;
@ -79,9 +80,11 @@ public final class InnerHitsContext {
private final String name;
protected final SearchContext context;
private InnerHitsContext childInnerHits;
private Weight innerHitQueryWeight;
// TODO: when types are complete removed just use String instead for the id:
private Uid uid;
private Uid rootId;
private SourceLookup rootLookup;
protected InnerHitSubContext(String name, SearchContext context) {
super(context);
@ -89,7 +92,7 @@ public final class InnerHitsContext {
this.context = context;
}
public abstract TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException;
public abstract TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException;
public String getName() {
return name;
@ -104,22 +107,43 @@ public final class InnerHitsContext {
this.childInnerHits = new InnerHitsContext(childInnerHits);
}
protected Weight createInnerHitQueryWeight() throws IOException {
final boolean needsScores = size() != 0 && (sort() == null || sort().sort.needsScores());
return context.searcher().createWeight(context.searcher().rewrite(query()),
protected Weight getInnerHitQueryWeight() throws IOException {
if (innerHitQueryWeight == null) {
final boolean needsScores = size() != 0 && (sort() == null || sort().sort.needsScores());
innerHitQueryWeight = context.searcher().createWeight(context.searcher().rewrite(query()),
needsScores ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES, 1f);
}
return innerHitQueryWeight;
}
public SearchContext parentSearchContext() {
return context;
}
public Uid getUid() {
return uid;
/**
* The _id of the root document.
*
* Since this ID is available on the context, inner hits can avoid re-loading the root _id.
*/
public Uid getRootId() {
return rootId;
}
public void setUid(Uid uid) {
this.uid = uid;
public void setRootId(Uid rootId) {
this.rootId = rootId;
}
/**
* A source lookup for the root document.
*
* This shared lookup allows inner hits to avoid re-loading the root _source.
*/
public SourceLookup getRootLookup() {
return rootLookup;
}
public void setRootLookup(SourceLookup rootLookup) {
this.rootLookup = rootLookup;
}
}

View File

@ -29,6 +29,7 @@ import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.HashMap;
@ -43,44 +44,44 @@ public final class InnerHitsPhase implements FetchSubPhase {
}
@Override
public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {
if ((context.innerHits() != null && context.innerHits().getInnerHits().size() > 0) == false) {
public void hitExecute(SearchContext context, HitContext hitContext) throws IOException {
if (context.innerHits() == null) {
return;
}
SearchHit hit = hitContext.hit();
SourceLookup sourceLookup = hitContext.sourceLookup();
for (Map.Entry<String, InnerHitsContext.InnerHitSubContext> entry : context.innerHits().getInnerHits().entrySet()) {
InnerHitsContext.InnerHitSubContext innerHits = entry.getValue();
TopDocsAndMaxScore[] topDocs = innerHits.topDocs(hits);
for (int i = 0; i < hits.length; i++) {
SearchHit hit = hits[i];
TopDocsAndMaxScore topDoc = topDocs[i];
TopDocsAndMaxScore topDoc = innerHits.topDocs(hit);
Map<String, SearchHits> results = hit.getInnerHits();
if (results == null) {
hit.setInnerHits(results = new HashMap<>());
}
innerHits.queryResult().topDocs(topDoc, innerHits.sort() == null ? null : innerHits.sort().formats);
int[] docIdsToLoad = new int[topDoc.topDocs.scoreDocs.length];
for (int j = 0; j < topDoc.topDocs.scoreDocs.length; j++) {
docIdsToLoad[j] = topDoc.topDocs.scoreDocs[j].doc;
}
innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
innerHits.setUid(new Uid(hit.getType(), hit.getId()));
fetchPhase.execute(innerHits);
FetchSearchResult fetchResult = innerHits.fetchResult();
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();
for (int j = 0; j < internalHits.length; j++) {
ScoreDoc scoreDoc = topDoc.topDocs.scoreDocs[j];
SearchHit searchHitFields = internalHits[j];
searchHitFields.score(scoreDoc.score);
if (scoreDoc instanceof FieldDoc) {
FieldDoc fieldDoc = (FieldDoc) scoreDoc;
searchHitFields.sortValues(fieldDoc.fields, innerHits.sort().formats);
}
}
results.put(entry.getKey(), fetchResult.hits());
Map<String, SearchHits> results = hit.getInnerHits();
if (results == null) {
hit.setInnerHits(results = new HashMap<>());
}
innerHits.queryResult().topDocs(topDoc, innerHits.sort() == null ? null : innerHits.sort().formats);
int[] docIdsToLoad = new int[topDoc.topDocs.scoreDocs.length];
for (int j = 0; j < topDoc.topDocs.scoreDocs.length; j++) {
docIdsToLoad[j] = topDoc.topDocs.scoreDocs[j].doc;
}
innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
innerHits.setRootId(new Uid(hit.getType(), hit.getId()));
innerHits.setRootLookup(sourceLookup);
fetchPhase.execute(innerHits);
FetchSearchResult fetchResult = innerHits.fetchResult();
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();
for (int j = 0; j < internalHits.length; j++) {
ScoreDoc scoreDoc = topDoc.topDocs.scoreDocs[j];
SearchHit searchHitFields = internalHits[j];
searchHitFields.score(scoreDoc.score);
if (scoreDoc instanceof FieldDoc) {
FieldDoc fieldDoc = (FieldDoc) scoreDoc;
searchHitFields.sortValues(fieldDoc.fields, innerHits.sort().formats);
}
}
results.put(entry.getKey(), fetchResult.hits());
}
}
}