matched queries: Remove redundant and broken code

Because the fetch phase now has nested doc, the logic that deals with detecting if a named nested query/filter matches with a hit can be removed.

Closes #10661
This commit is contained in:
Martijn van Groningen 2015-04-21 08:52:51 +02:00
parent f4d5914511
commit bb1cd65c0d
2 changed files with 13 additions and 46 deletions

View File

@ -20,8 +20,6 @@ package org.elasticsearch.search.fetch.matchedqueries;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
@ -29,8 +27,6 @@ import org.apache.lucene.util.Bits;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.InternalSearchHit;
@ -71,16 +67,10 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase {
List<String> matchedQueries = Lists.newArrayListWithCapacity(2);
try {
DocIdSet docAndNestedDocsIdSet = null;
if (context.mapperService().documentMapper(hitContext.hit().type()).hasNestedObjects()) {
// Both main and nested Lucene docs have a _uid field
Filter docAndNestedDocsFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(hitContext.hit().type(), hitContext.hit().id())));
docAndNestedDocsIdSet = docAndNestedDocsFilter.getDocIdSet(hitContext.readerContext(), null);
}
addMatchedQueries(hitContext, context.parsedQuery().namedFilters(), matchedQueries, docAndNestedDocsIdSet);
addMatchedQueries(hitContext, context.parsedQuery().namedFilters(), matchedQueries);
if (context.parsedPostFilter() != null) {
addMatchedQueries(hitContext, context.parsedPostFilter().namedFilters(), matchedQueries, docAndNestedDocsIdSet);
addMatchedQueries(hitContext, context.parsedPostFilter().namedFilters(), matchedQueries);
}
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
@ -91,29 +81,13 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase {
hitContext.hit().matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()]));
}
private void addMatchedQueries(HitContext hitContext, ImmutableMap<String, Filter> namedFiltersAndQueries, List<String> matchedQueries, DocIdSet docAndNestedDocsIdSet) throws IOException {
private void addMatchedQueries(HitContext hitContext, ImmutableMap<String, Filter> namedFiltersAndQueries, List<String> matchedQueries) throws IOException {
for (Map.Entry<String, Filter> entry : namedFiltersAndQueries.entrySet()) {
String name = entry.getKey();
Filter filter = entry.getValue();
DocIdSet filterDocIdSet = filter.getDocIdSet(hitContext.readerContext(), null); // null is fine, since we filter by hitContext.docId()
if (!DocIdSets.isEmpty(filterDocIdSet)) {
if (!DocIdSets.isEmpty(docAndNestedDocsIdSet)) {
DocIdSetIterator filterIterator = filterDocIdSet.iterator();
DocIdSetIterator docAndNestedDocsIterator = docAndNestedDocsIdSet.iterator();
if (filterIterator != null && docAndNestedDocsIterator != null) {
int matchedDocId = -1;
for (int docId = docAndNestedDocsIterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = docAndNestedDocsIterator.nextDoc()) {
if (docId != matchedDocId) {
matchedDocId = filterIterator.advance(docId);
}
if (matchedDocId == docId) {
matchedQueries.add(name);
break;
}
}
}
} else {
Bits bits = filterDocIdSet.bits();
if (bits != null) {
if (bits.get(hitContext.docId())) {
@ -131,4 +105,3 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase {
}
}
}
}

View File

@ -63,13 +63,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.emptyArray;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
import static org.hamcrest.Matchers.*;
/**
*
@ -776,7 +770,7 @@ public class TopHitsTests extends ElasticsearchIntegrationTest {
assertThat(topReviewers.getHits().getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0));
}
@Test @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/10661")
@Test
public void testNestedFetchFeatures() {
String hlType = randomFrom("plain", "fvh", "postings");
HighlightBuilder.Field hlField = new HighlightBuilder.Field("comments.message")
@ -826,7 +820,7 @@ public class TopHitsTests extends ElasticsearchIntegrationTest {
assertThat(version, equalTo(1l));
// Can't use named queries for the same reason explain doesn't work:
assertThat(searchHit.matchedQueries(), emptyArray());
assertThat(searchHit.matchedQueries(), arrayContaining("test"));
SearchHitField field = searchHit.field("comments.user");
assertThat(field.getValue().toString(), equalTo("a"));