Merge pull request #10694 from martijnvg/matched_queries/cleanup
matched queries: Remove redundant and broken code
This commit is contained in:
commit
24c75dec2d
|
@ -20,8 +20,6 @@ package org.elasticsearch.search.fetch.matchedqueries;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import org.apache.lucene.index.Term;
|
|
||||||
import org.apache.lucene.queries.TermFilter;
|
|
||||||
import org.apache.lucene.search.DocIdSet;
|
import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
|
@ -29,8 +27,6 @@ import org.apache.lucene.util.Bits;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||||
import org.elasticsearch.index.mapper.Uid;
|
|
||||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
|
||||||
import org.elasticsearch.search.SearchParseElement;
|
import org.elasticsearch.search.SearchParseElement;
|
||||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||||
|
@ -71,16 +67,10 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase {
|
||||||
List<String> matchedQueries = Lists.newArrayListWithCapacity(2);
|
List<String> matchedQueries = Lists.newArrayListWithCapacity(2);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
DocIdSet docAndNestedDocsIdSet = null;
|
addMatchedQueries(hitContext, context.parsedQuery().namedFilters(), matchedQueries);
|
||||||
if (context.mapperService().documentMapper(hitContext.hit().type()).hasNestedObjects()) {
|
|
||||||
// Both main and nested Lucene docs have a _uid field
|
|
||||||
Filter docAndNestedDocsFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(hitContext.hit().type(), hitContext.hit().id())));
|
|
||||||
docAndNestedDocsIdSet = docAndNestedDocsFilter.getDocIdSet(hitContext.readerContext(), null);
|
|
||||||
}
|
|
||||||
addMatchedQueries(hitContext, context.parsedQuery().namedFilters(), matchedQueries, docAndNestedDocsIdSet);
|
|
||||||
|
|
||||||
if (context.parsedPostFilter() != null) {
|
if (context.parsedPostFilter() != null) {
|
||||||
addMatchedQueries(hitContext, context.parsedPostFilter().namedFilters(), matchedQueries, docAndNestedDocsIdSet);
|
addMatchedQueries(hitContext, context.parsedPostFilter().namedFilters(), matchedQueries);
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw ExceptionsHelper.convertToElastic(e);
|
throw ExceptionsHelper.convertToElastic(e);
|
||||||
|
@ -91,41 +81,24 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase {
|
||||||
hitContext.hit().matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()]));
|
hitContext.hit().matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()]));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addMatchedQueries(HitContext hitContext, ImmutableMap<String, Filter> namedFiltersAndQueries, List<String> matchedQueries, DocIdSet docAndNestedDocsIdSet) throws IOException {
|
private void addMatchedQueries(HitContext hitContext, ImmutableMap<String, Filter> namedFiltersAndQueries, List<String> matchedQueries) throws IOException {
|
||||||
for (Map.Entry<String, Filter> entry : namedFiltersAndQueries.entrySet()) {
|
for (Map.Entry<String, Filter> entry : namedFiltersAndQueries.entrySet()) {
|
||||||
String name = entry.getKey();
|
String name = entry.getKey();
|
||||||
Filter filter = entry.getValue();
|
Filter filter = entry.getValue();
|
||||||
|
|
||||||
DocIdSet filterDocIdSet = filter.getDocIdSet(hitContext.readerContext(), null); // null is fine, since we filter by hitContext.docId()
|
DocIdSet filterDocIdSet = filter.getDocIdSet(hitContext.readerContext(), null); // null is fine, since we filter by hitContext.docId()
|
||||||
if (!DocIdSets.isEmpty(filterDocIdSet)) {
|
if (!DocIdSets.isEmpty(filterDocIdSet)) {
|
||||||
if (!DocIdSets.isEmpty(docAndNestedDocsIdSet)) {
|
Bits bits = filterDocIdSet.bits();
|
||||||
DocIdSetIterator filterIterator = filterDocIdSet.iterator();
|
if (bits != null) {
|
||||||
DocIdSetIterator docAndNestedDocsIterator = docAndNestedDocsIdSet.iterator();
|
if (bits.get(hitContext.docId())) {
|
||||||
if (filterIterator != null && docAndNestedDocsIterator != null) {
|
matchedQueries.add(name);
|
||||||
int matchedDocId = -1;
|
|
||||||
for (int docId = docAndNestedDocsIterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = docAndNestedDocsIterator.nextDoc()) {
|
|
||||||
if (docId != matchedDocId) {
|
|
||||||
matchedDocId = filterIterator.advance(docId);
|
|
||||||
}
|
|
||||||
if (matchedDocId == docId) {
|
|
||||||
matchedQueries.add(name);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Bits bits = filterDocIdSet.bits();
|
DocIdSetIterator iterator = filterDocIdSet.iterator();
|
||||||
if (bits != null) {
|
if (iterator != null) {
|
||||||
if (bits.get(hitContext.docId())) {
|
if (iterator.advance(hitContext.docId()) == hitContext.docId()) {
|
||||||
matchedQueries.add(name);
|
matchedQueries.add(name);
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
DocIdSetIterator iterator = filterDocIdSet.iterator();
|
|
||||||
if (iterator != null) {
|
|
||||||
if (iterator.advance(hitContext.docId()) == hitContext.docId()) {
|
|
||||||
matchedQueries.add(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,13 +63,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.*;
|
||||||
import static org.hamcrest.Matchers.emptyArray;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
import static org.hamcrest.Matchers.not;
|
|
||||||
import static org.hamcrest.Matchers.notNullValue;
|
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
|
||||||
import static org.hamcrest.Matchers.sameInstance;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -776,7 +770,7 @@ public class TopHitsTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0));
|
assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/10661")
|
@Test
|
||||||
public void testNestedFetchFeatures() {
|
public void testNestedFetchFeatures() {
|
||||||
String hlType = randomFrom("plain", "fvh", "postings");
|
String hlType = randomFrom("plain", "fvh", "postings");
|
||||||
HighlightBuilder.Field hlField = new HighlightBuilder.Field("comments.message")
|
HighlightBuilder.Field hlField = new HighlightBuilder.Field("comments.message")
|
||||||
|
@ -826,7 +820,7 @@ public class TopHitsTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(version, equalTo(1l));
|
assertThat(version, equalTo(1l));
|
||||||
|
|
||||||
// Can't use named queries for the same reason explain doesn't work:
|
// Can't use named queries for the same reason explain doesn't work:
|
||||||
assertThat(searchHit.matchedQueries(), emptyArray());
|
assertThat(searchHit.matchedQueries(), arrayContaining("test"));
|
||||||
|
|
||||||
SearchHitField field = searchHit.field("comments.user");
|
SearchHitField field = searchHit.field("comments.user");
|
||||||
assertThat(field.getValue().toString(), equalTo("a"));
|
assertThat(field.getValue().toString(), equalTo("a"));
|
||||||
|
|
Loading…
Reference in New Issue