SOLR-1485: improve TokenStream API usage

This commit is contained in:
Erik Hatcher 2017-05-02 19:41:06 -04:00
parent 0be8e17832
commit 5d42177b92
3 changed files with 31 additions and 22 deletions

View File

@ -17,6 +17,7 @@
package org.apache.solr.search; package org.apache.solr.search;
import java.io.IOException;
import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodHandles;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -67,7 +68,12 @@ public class PayloadCheckQParserPlugin extends QParserPlugin {
FieldType ft = req.getCore().getLatestSchema().getFieldType(field); FieldType ft = req.getCore().getLatestSchema().getFieldType(field);
Analyzer analyzer = ft.getQueryAnalyzer(); Analyzer analyzer = ft.getQueryAnalyzer();
SpanQuery query = PayloadUtils.createSpanQuery(field, value, analyzer); SpanQuery query = null;
try {
query = PayloadUtils.createSpanQuery(field, value, analyzer);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
}
if (query == null) { if (query == null) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "SpanQuery is null"); throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "SpanQuery is null");

View File

@ -17,6 +17,8 @@
package org.apache.solr.search; package org.apache.solr.search;
import java.io.IOException;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queries.payloads.PayloadFunction; import org.apache.lucene.queries.payloads.PayloadFunction;
import org.apache.lucene.queries.payloads.PayloadScoreQuery; import org.apache.lucene.queries.payloads.PayloadScoreQuery;
@ -61,7 +63,12 @@ public class PayloadScoreQParserPlugin extends QParserPlugin {
FieldType ft = req.getCore().getLatestSchema().getFieldType(field); FieldType ft = req.getCore().getLatestSchema().getFieldType(field);
Analyzer analyzer = ft.getQueryAnalyzer(); Analyzer analyzer = ft.getQueryAnalyzer();
SpanQuery query = PayloadUtils.createSpanQuery(field, value, analyzer); SpanQuery query = null;
try {
query = PayloadUtils.createSpanQuery(field, value, analyzer);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,e);
}
if (query == null) { if (query == null) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "SpanQuery is null"); throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "SpanQuery is null");

View File

@ -38,7 +38,6 @@ import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.solr.analysis.TokenizerChain; import org.apache.solr.analysis.TokenizerChain;
import org.apache.solr.common.SolrException;
import org.apache.solr.schema.FieldType; import org.apache.solr.schema.FieldType;
public class PayloadUtils { public class PayloadUtils {
@ -100,34 +99,31 @@ public class PayloadUtils {
return payloadFunction; return payloadFunction;
} }
public static SpanQuery createSpanQuery(String field, String value, Analyzer analyzer) { /**
SpanQuery query; * The generated SpanQuery will be either a SpanTermQuery or an ordered, zero slop SpanNearQuery, depending
try { * on how many tokens are emitted.
// adapted this from QueryBuilder.createSpanQuery (which isn't currently public) and added reset(), end(), and close() calls */
TokenStream in = analyzer.tokenStream(field, value); public static SpanQuery createSpanQuery(String field, String value, Analyzer analyzer) throws IOException {
// adapted this from QueryBuilder.createSpanQuery (which isn't currently public) and added reset(), end(), and close() calls
List<SpanTermQuery> terms = new ArrayList<>();
try (TokenStream in = analyzer.tokenStream(field, value)) {
in.reset(); in.reset();
TermToBytesRefAttribute termAtt = in.getAttribute(TermToBytesRefAttribute.class); TermToBytesRefAttribute termAtt = in.getAttribute(TermToBytesRefAttribute.class);
List<SpanTermQuery> terms = new ArrayList<>();
while (in.incrementToken()) { while (in.incrementToken()) {
terms.add(new SpanTermQuery(new Term(field, termAtt.getBytesRef()))); terms.add(new SpanTermQuery(new Term(field, termAtt.getBytesRef())));
} }
in.end(); in.end();
in.close(); }
if (terms.isEmpty()) { SpanQuery query;
query = null; if (terms.isEmpty()) {
} else if (terms.size() == 1) { query = null;
query = terms.get(0); } else if (terms.size() == 1) {
} else { query = terms.get(0);
query = new SpanNearQuery(terms.toArray(new SpanTermQuery[terms.size()]), 0, true); } else {
} query = new SpanNearQuery(terms.toArray(new SpanTermQuery[terms.size()]), 0, true);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
} }
return query; return query;
} }
} }