highlighter: Save unwrap BytesRefHash.MaxBytesLengthExceededException

In Lucene 5x the exception thrown when highlighter encounters a huge term
is a BytesRefHash.MaxBytesLengthExceededException but in Lucene 4x it is
wrapped in a RuntimeException. Therefore, it seems saver to unwrap this.
This commit is contained in:
Britta Weber 2015-06-25 12:32:03 +02:00
parent 7cf3096c3c
commit 74aed85c36
2 changed files with 6 additions and 1 deletions

View File

@ -26,6 +26,7 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.search.highlight.*;
import org.apache.lucene.util.BytesRefHash;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.mapper.FieldMapper;
@ -118,7 +119,7 @@ public class PlainHighlighter implements Highlighter {
}
}
} catch (Exception e) {
if (e instanceof BytesRefHash.MaxBytesLengthExceededException) {
if (ExceptionsHelper.unwrap(e, BytesRefHash.MaxBytesLengthExceededException.class) != null) {
// this can happen if for example a field is not_analyzed and ignore_above option is set.
// the field will be ignored when indexing but the huge term is still in the source and
// the plain highlighter will parse the source and try to analyze it.

View File

@ -127,6 +127,10 @@ public class HighlighterSearchTests extends ElasticsearchIntegrationTest {
search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("long_text").highlighterType(highlighter)).get();
assertNoFailures(search);
assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0));
search = client().prepareSearch().setQuery(prefixQuery("text", "te")).addHighlightedField(new Field("long_text").highlighterType(highlighter)).get();
assertNoFailures(search);
assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0));
}
@Test