Highlighting: The result of highlighting for a hit can contain data from another document, closes #600.

This commit is contained in:
kimchy 2011-01-05 15:45:46 +02:00
parent 6258915205
commit 31231531e1
2 changed files with 62 additions and 1 deletions

View File

@ -155,7 +155,7 @@ public class HighlightPhase implements SearchHitPhase {
try {
// a HACK to make highlighter do highlighting, even though its using the single frag list builder
int numberOfFragments = field.numberOfFragments() == 0 ? 1 : field.numberOfFragments();
fragments = highlighter.getBestFragments(fieldQuery, context.searcher().getIndexReader(), docId, mapper.names().indexName(), field.fragmentCharSize(), numberOfFragments);
fragments = highlighter.getBestFragments(fieldQuery, reader, docId, mapper.names().indexName(), field.fragmentCharSize(), numberOfFragments);
} catch (IOException e) {
throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + field.field() + "]", e);
}

View File

@ -19,7 +19,9 @@
package org.elasticsearch.test.integration.search.highlight;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -190,6 +192,65 @@ public class HighlighterSearchTests extends AbstractNodesTests {
assertThat(searchResponse.hits().getAt(0).highlightFields().get("field2").fragments()[0], equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog"));
}
@Test public void testFastVectorHighlighterManyDocs() throws Exception {
try {
client.admin().indices().prepareDelete("test").execute().actionGet();
} catch (ElasticSearchException e) {
assertThat(e.unwrapCause(), instanceOf(IndexMissingException.class));
}
client.admin().indices().prepareCreate("test").addMapping("type1", type1TermVectorMapping()).execute().actionGet();
client.admin().cluster().prepareHealth("test").setWaitForGreenStatus().execute().actionGet();
int COUNT = 100;
logger.info("--> indexing docs");
for (int i = 0; i < COUNT; i++) {
client.prepareIndex("test", "type1", Integer.toString(i)).setSource("field1", "test " + i).execute().actionGet();
if (i % 5 == 0) {
// flush so we get updated readers and segmented readers
client.admin().indices().prepareFlush().execute().actionGet();
}
}
client.admin().indices().prepareRefresh().execute().actionGet();
logger.info("--> searching explicitly on field1 and highlighting on it");
SearchResponse searchResponse = client.prepareSearch()
.setSize(COUNT)
.setQuery(termQuery("field1", "test"))
.addHighlightedField("field1", 100, 0)
.execute().actionGet();
assertThat(searchResponse.hits().totalHits(), equalTo((long) COUNT));
assertThat(searchResponse.hits().hits().length, equalTo(COUNT));
for (SearchHit hit : searchResponse.hits()) {
assertThat(hit.highlightFields().get("field1").fragments()[0], equalTo("<em>test</em> " + hit.id()));
}
logger.info("--> searching explicitly on field1 and highlighting on it, with DFS");
searchResponse = client.prepareSearch()
.setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
.setSize(COUNT)
.setQuery(termQuery("field1", "test"))
.addHighlightedField("field1", 100, 0)
.execute().actionGet();
assertThat(searchResponse.hits().totalHits(), equalTo((long) COUNT));
assertThat(searchResponse.hits().hits().length, equalTo(COUNT));
for (SearchHit hit : searchResponse.hits()) {
assertThat(hit.highlightFields().get("field1").fragments()[0], equalTo("<em>test</em> " + hit.id()));
}
logger.info("--> searching explicitly _all and highlighting on _all");
searchResponse = client.prepareSearch()
.setSize(COUNT)
.setQuery(termQuery("_all", "test"))
.addHighlightedField("_all", 100, 0)
.execute().actionGet();
assertThat(searchResponse.hits().totalHits(), equalTo((long) COUNT));
assertThat(searchResponse.hits().hits().length, equalTo(COUNT));
for (SearchHit hit : searchResponse.hits()) {
assertThat(hit.highlightFields().get("_all").fragments()[0], equalTo("<em>test</em> " + hit.id() + " "));
}
}
public XContentBuilder type1TermVectorMapping() throws IOException {
return XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("_all").field("store", "yes").field("termVector", "with_positions_offsets").endObject()