mirror of https://github.com/apache/lucene.git
LUCENE-6385: add null check to WeightedSpanTermExtractor
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1671064 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
79bf72708b
commit
db2faf0c25
|
@ -40,10 +40,10 @@ API Changes
|
||||||
|
|
||||||
New Features
|
New Features
|
||||||
|
|
||||||
* LUCENE-6308: Span queries now share document conjunction/intersection
|
* LUCENE-6308, LUCENE-6385: Span queries now share document conjunction/intersection
|
||||||
code with boolean queries, and use two-phased iterators for
|
code with boolean queries, and use two-phased iterators for
|
||||||
faster intersection by avoiding loading positions in certain cases.
|
faster intersection by avoiding loading positions in certain cases.
|
||||||
(Paul Elschot, Robert Muir via Mike McCandless)
|
(Paul Elschot, Terry Smith, Robert Muir via Mike McCandless)
|
||||||
|
|
||||||
* LUCENE-6352: Added a new query time join to the join module that uses
|
* LUCENE-6352: Added a new query time join to the join module that uses
|
||||||
global ordinals, which is faster for subsequent joins between reopens.
|
global ordinals, which is faster for subsequent joins between reopens.
|
||||||
|
|
|
@ -306,6 +306,9 @@ public class WeightedSpanTermExtractor {
|
||||||
}
|
}
|
||||||
Bits acceptDocs = context.reader().getLiveDocs();
|
Bits acceptDocs = context.reader().getLiveDocs();
|
||||||
final Spans spans = q.getSpans(context, acceptDocs, termContexts);
|
final Spans spans = q.getSpans(context, acceptDocs, termContexts);
|
||||||
|
if (spans == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// collect span positions
|
// collect span positions
|
||||||
while (spans.nextDoc() != Spans.NO_MORE_DOCS) {
|
while (spans.nextDoc() != Spans.NO_MORE_DOCS) {
|
||||||
|
|
|
@ -0,0 +1,82 @@
|
||||||
|
package org.apache.lucene.search.highlight;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
|
import org.apache.lucene.search.BooleanQuery;
|
||||||
|
import org.apache.lucene.search.PhraseQuery;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.apache.lucene.search.spans.SpanNearQuery;
|
||||||
|
import org.apache.lucene.search.spans.SpanQuery;
|
||||||
|
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
|
||||||
|
public class MissesTest extends LuceneTestCase {
|
||||||
|
public void testTermQuery() throws IOException, InvalidTokenOffsetsException {
|
||||||
|
try (Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) {
|
||||||
|
final Query query = new TermQuery(new Term("test", "foo"));
|
||||||
|
final Highlighter highlighter = new Highlighter(new SimpleHTMLFormatter(), new QueryScorer(query));
|
||||||
|
assertEquals("this is a <B>foo</B> bar example",
|
||||||
|
highlighter.getBestFragment(analyzer, "test", "this is a foo bar example"));
|
||||||
|
assertNull(highlighter.getBestFragment(analyzer, "test", "this does not match"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testBooleanQuery() throws IOException, InvalidTokenOffsetsException {
|
||||||
|
try (Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) {
|
||||||
|
final BooleanQuery query = new BooleanQuery();
|
||||||
|
query.add(new TermQuery(new Term("test", "foo")), Occur.MUST);
|
||||||
|
query.add(new TermQuery(new Term("test", "bar")), Occur.MUST);
|
||||||
|
final Highlighter highlighter = new Highlighter(new SimpleHTMLFormatter(), new QueryScorer(query));
|
||||||
|
assertEquals("this is a <B>foo</B> <B>bar</B> example",
|
||||||
|
highlighter.getBestFragment(analyzer, "test", "this is a foo bar example"));
|
||||||
|
assertNull(highlighter.getBestFragment(analyzer, "test", "this does not match"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPhraseQuery() throws IOException, InvalidTokenOffsetsException {
|
||||||
|
try (Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) {
|
||||||
|
final PhraseQuery query = new PhraseQuery();
|
||||||
|
query.add(new Term("test", "foo"));
|
||||||
|
query.add(new Term("test", "bar"));
|
||||||
|
final Highlighter highlighter = new Highlighter(new SimpleHTMLFormatter(), new QueryScorer(query));
|
||||||
|
assertEquals("this is a <B>foo</B> <B>bar</B> example",
|
||||||
|
highlighter.getBestFragment(analyzer, "test", "this is a foo bar example"));
|
||||||
|
assertNull(highlighter.getBestFragment(analyzer, "test", "this does not match"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSpanNearQuery() throws IOException, InvalidTokenOffsetsException {
|
||||||
|
try (Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) {
|
||||||
|
final Query query = new SpanNearQuery(new SpanQuery[] {
|
||||||
|
new SpanTermQuery(new Term("test", "foo")),
|
||||||
|
new SpanTermQuery(new Term("test", "bar"))}, 0, true);
|
||||||
|
final Highlighter highlighter = new Highlighter(new SimpleHTMLFormatter(), new QueryScorer(query));
|
||||||
|
assertEquals("this is a <B>foo</B> <B>bar</B> example",
|
||||||
|
highlighter.getBestFragment(analyzer, "test", "this is a foo bar example"));
|
||||||
|
assertNull(highlighter.getBestFragment(analyzer, "test", "this does not match"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue