cleanup: Move catching of IOException higher op the stack to reduce the number of try-catch clauses.

This commit is contained in:
Martijn van Groningen 2015-01-21 15:39:17 +01:00
parent 25f944009c
commit d038f372d4
2 changed files with 18 additions and 23 deletions

View File

@ -88,7 +88,7 @@ public final class InnerHitsContext {
return new ParsedQuery(query, ImmutableMap.<String, Filter>of()); return new ParsedQuery(query, ImmutableMap.<String, Filter>of());
} }
public abstract TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext); public abstract TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException;
@Override @Override
public InnerHitsContext innerHits() { public InnerHitsContext innerHits() {
@ -109,7 +109,7 @@ public final class InnerHitsContext {
} }
@Override @Override
public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) { public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException {
TopDocsCollector topDocsCollector; TopDocsCollector topDocsCollector;
int topN = from() + size(); int topN = from() + size();
if (sort() != null) { if (sort() != null) {
@ -130,12 +130,8 @@ public final class InnerHitsContext {
} }
BitDocIdSetFilter parentFilter = context.bitsetFilterCache().getBitDocIdSetFilter(rawParentFilter); BitDocIdSetFilter parentFilter = context.bitsetFilterCache().getBitDocIdSetFilter(rawParentFilter);
Filter childFilter = context.filterCache().cache(childObjectMapper.nestedTypeFilter(), null, context.queryParserService().autoFilterCachePolicy()); Filter childFilter = context.filterCache().cache(childObjectMapper.nestedTypeFilter(), null, context.queryParserService().autoFilterCachePolicy());
try { Query q = new FilteredQuery(query, new NestedChildrenFilter(parentFilter, childFilter, hitContext));
Query q = new FilteredQuery(query, new NestedChildrenFilter(parentFilter, childFilter, hitContext)); context.searcher().search(q, topDocsCollector);
context.searcher().search(q, topDocsCollector);
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
}
return topDocsCollector.topDocs(from(), size()); return topDocsCollector.topDocs(from(), size());
} }
@ -244,15 +240,11 @@ public final class InnerHitsContext {
} }
@Override @Override
public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) { public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException {
TopDocsCollector topDocsCollector; TopDocsCollector topDocsCollector;
int topN = from() + size(); int topN = from() + size();
if (sort() != null) { if (sort() != null) {
try { topDocsCollector = TopFieldCollector.create(sort(), topN, true, trackScores(), trackScores());
topDocsCollector = TopFieldCollector.create(sort(), topN, true, trackScores(), trackScores());
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
}
} else { } else {
topDocsCollector = TopScoreDocCollector.create(topN); topDocsCollector = TopScoreDocCollector.create(topN);
} }
@ -269,14 +261,10 @@ public final class InnerHitsContext {
String term = Uid.createUid(hitContext.hit().type(), hitContext.hit().id()); String term = Uid.createUid(hitContext.hit().type(), hitContext.hit().id());
Filter filter = new TermFilter(new Term(field, term)); // Only include docs that have the current hit as parent Filter filter = new TermFilter(new Term(field, term)); // Only include docs that have the current hit as parent
Filter typeFilter = documentMapper.typeFilter(); // Only include docs that have this inner hits type. Filter typeFilter = documentMapper.typeFilter(); // Only include docs that have this inner hits type.
try { context.searcher().search(
context.searcher().search( new FilteredQuery(query, new AndFilter(Arrays.asList(filter, typeFilter))),
new FilteredQuery(query, new AndFilter(Arrays.asList(filter, typeFilter))), topDocsCollector
topDocsCollector );
);
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
}
return topDocsCollector.topDocs(from(), size()); return topDocsCollector.topDocs(from(), size());
} }
} }

View File

@ -24,6 +24,7 @@ import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchPhase;
@ -38,6 +39,7 @@ import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.SortParseElement; import org.elasticsearch.search.sort.SortParseElement;
import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -79,7 +81,12 @@ public class InnerHitsFetchSubPhase implements FetchSubPhase {
Map<String, InternalSearchHits> results = new HashMap<>(); Map<String, InternalSearchHits> results = new HashMap<>();
for (Map.Entry<String, InnerHitsContext.BaseInnerHits> entry : context.innerHits().getInnerHits().entrySet()) { for (Map.Entry<String, InnerHitsContext.BaseInnerHits> entry : context.innerHits().getInnerHits().entrySet()) {
InnerHitsContext.BaseInnerHits innerHits = entry.getValue(); InnerHitsContext.BaseInnerHits innerHits = entry.getValue();
TopDocs topDocs = innerHits.topDocs(context, hitContext); TopDocs topDocs;
try {
topDocs = innerHits.topDocs(context, hitContext);
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
}
innerHits.queryResult().topDocs(topDocs); innerHits.queryResult().topDocs(topDocs);
int[] docIdsToLoad = new int[topDocs.scoreDocs.length]; int[] docIdsToLoad = new int[topDocs.scoreDocs.length];
for (int i = 0; i < topDocs.scoreDocs.length; i++) { for (int i = 0; i < topDocs.scoreDocs.length; i++) {