Search API: Support highlighting, closes #69.

This commit is contained in:
kimchy 2010-03-18 16:05:24 +02:00
parent fc3a805514
commit 28b0b5fc30
23 changed files with 973 additions and 40 deletions

View File

@ -201,7 +201,7 @@ public class SearchRequest implements ActionRequest {
* Allows to provide additional source that will be used as well.
*/
public SearchRequest extraSource(byte[] source) {
this.source = source;
this.extraSource = source;
return this;
}
@ -256,6 +256,13 @@ public class SearchRequest implements ActionRequest {
return this;
}
/**
* If set, will enable scrolling of the search request for the specified timeout.
*/
public SearchRequest scroll(TimeValue keepAlive) {
return scroll(new Scroll(keepAlive));
}
/**
* An optional timeout to control how long search is allowed to take.
*/

View File

@ -95,6 +95,10 @@ public class ShardSearchFailure implements ShardOperationFailedException {
return this.reason;
}
@Override public String toString() {
return "Search Failure Shard " + shardTarget + ", reason [" + reason + "]";
}
public static ShardSearchFailure readShardSearchFailure(DataInput in) throws IOException, ClassNotFoundException {
ShardSearchFailure shardSearchFailure = new ShardSearchFailure();
shardSearchFailure.readFrom(in);

View File

@ -122,6 +122,14 @@ public class DocumentFieldMappers implements Iterable<FieldMapper> {
return indexName(name);
}
public FieldMapper smartNameFieldMapper(String name) {
FieldMappers fieldMappers = smartName(name);
if (fieldMappers == null) {
return null;
}
return fieldMappers.mapper();
}
/**
* A smart analyzer used for indexing that takes into account specific analyzers configured
* per {@link FieldMapper}.

View File

@ -115,7 +115,13 @@ public class RestSearchAction extends BaseRestHandler {
private SearchRequest parseSearchRequest(RestRequest request) {
String[] indices = RestActions.splitIndices(request.param("index"));
SearchRequest searchRequest = new SearchRequest(indices, parseSearchSource(request));
SearchRequest searchRequest = new SearchRequest(indices);
// get the content, and put it in the body
if (request.hasContent()) {
searchRequest.source(request.contentAsBytes());
}
// add extra source based on the request parameters
searchRequest.extraSource(parseSearchSource(request));
searchRequest.searchType(parseSearchType(request.param("searchType")));
@ -157,13 +163,9 @@ public class RestSearchAction extends BaseRestHandler {
}
private byte[] parseSearchSource(RestRequest request) {
if (request.hasContent()) {
return request.contentAsBytes();
}
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
String queryString = request.param("q");
if (queryString == null) {
throw new ElasticSearchIllegalArgumentException("No query to execute, not in body, and not bounded to 'q' parameter");
}
if (queryString != null) {
QueryStringJsonQueryBuilder queryBuilder = JsonQueryBuilders.queryString(queryString);
queryBuilder.defaultField(request.param("df"));
queryBuilder.analyzer(request.param("analyzer"));
@ -177,9 +179,10 @@ public class RestSearchAction extends BaseRestHandler {
throw new ElasticSearchIllegalArgumentException("Unsupported defaultOperator [" + defaultOperator + "], can either be [OR] or [AND]");
}
}
searchSourceBuilder.query(queryBuilder);
}
// TODO add different parameters to the query
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(queryBuilder);
searchSourceBuilder.queryParserName(request.param("queryParserName"));
searchSourceBuilder.explain(request.paramAsBoolean("explain", false));

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.search.highlight.HighlightField;
import org.elasticsearch.util.io.Streamable;
import org.elasticsearch.util.json.ToJson;
@ -75,8 +76,13 @@ public interface SearchHit extends Streamable, ToJson, Iterable<SearchHitField>
*/
Map<String, SearchHitField> fields();
/**
* A map of highlighted fields.
*/
Map<String, HighlightField> highlightFields();
/**
* The shard of the search hit.
*/
SearchShardTarget target();
SearchShardTarget shard();
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.search.controller.SearchPhaseController;
import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.facets.FacetsPhase;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.highlight.HighlightPhase;
import org.elasticsearch.search.query.QueryPhase;
/**
@ -37,6 +38,7 @@ public class SearchModule extends AbstractModule {
bind(FacetsPhase.class).asEagerSingleton();
bind(QueryPhase.class).asEagerSingleton();
bind(FetchPhase.class).asEagerSingleton();
bind(HighlightPhase.class).asEagerSingleton();
bind(SearchService.class).asEagerSingleton();
bind(SearchPhaseController.class).asEagerSingleton();

View File

@ -55,6 +55,13 @@ public class SearchSourceBuilder {
return new SearchSourceFacetsBuilder();
}
/**
* A static factory method to construct new search highlights.
*/
public static SearchSourceHighlightBuilder highlight() {
return new SearchSourceHighlightBuilder();
}
private JsonQueryBuilder queryBuilder;
private int from = -1;
@ -71,6 +78,8 @@ public class SearchSourceBuilder {
private SearchSourceFacetsBuilder facetsBuilder;
private SearchSourceHighlightBuilder highlightBuilder;
private TObjectFloatHashMap<String> indexBoost = null;
@ -175,6 +184,14 @@ public class SearchSourceBuilder {
return this;
}
/**
* Adds highlight to perform as part of the search.
*/
public SearchSourceBuilder highlight(SearchSourceHighlightBuilder highlightBuilder) {
this.highlightBuilder = highlightBuilder;
return this;
}
/**
* Sets the fields to load and return as part of the search request. If none are specified,
* the source of the document will be returend.
@ -277,6 +294,10 @@ public class SearchSourceBuilder {
facetsBuilder.toJson(builder, ToJson.EMPTY_PARAMS);
}
if (highlightBuilder != null) {
highlightBuilder.toJson(builder, ToJson.EMPTY_PARAMS);
}
builder.endObject();
return builder.copiedBytes();

View File

@ -0,0 +1,192 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.builder;
import org.elasticsearch.util.json.JsonBuilder;
import org.elasticsearch.util.json.ToJson;
import java.io.IOException;
import java.util.List;
import static com.google.common.collect.Lists.*;
/**
* A builder for search highlighting.
*
* @author kimchy (shay.banon)
* @see SearchSourceBuilder#highlight()
*/
public class SearchSourceHighlightBuilder implements ToJson {
private List<Field> fields;
private String tagsSchema;
private String[] preTags;
private String[] postTags;
private String order;
/**
* Adds a field to be highlighted with default fragment size of 100 characters, and
* default number of fragments of 5.
*
* @param name The field to highlight
*/
public SearchSourceHighlightBuilder field(String name) {
if (fields == null) {
fields = newArrayList();
}
fields.add(new Field(name));
return this;
}
/**
* Adds a field to be highlighted with a provided fragment size (in characters), and
* default number of fragments of 5.
*
* @param name The field to highlight
* @param fragmentSize The size of a fragment in characters
*/
public SearchSourceHighlightBuilder field(String name, int fragmentSize) {
if (fields == null) {
fields = newArrayList();
}
fields.add(new Field(name).fragmentSize(fragmentSize));
return this;
}
/**
* Adds a field to be highlighted with a provided fragment size (in characters), and
* a provided (maximum) number of fragments.
*
* @param name The field to highlight
* @param fragmentSize The size of a fragment in characters
* @param numberOfFragments The (maximum) number of fragments
*/
public SearchSourceHighlightBuilder field(String name, int fragmentSize, int numberOfFragments) {
if (fields == null) {
fields = newArrayList();
}
fields.add(new Field(name).fragmentSize(fragmentSize).numOfFragments(numberOfFragments));
return this;
}
/**
* Set a tag scheme that encapsulates a built in pre and post tags. The allows schemes
* are <tt>styled</tt> and <tt>default</tt>.
*
* @param schemaName The tag scheme name
*/
public SearchSourceHighlightBuilder tagsSchema(String schemaName) {
this.tagsSchema = schemaName;
return this;
}
/**
* Explicitly set the pre tags that will be used for highlighting.
*/
public SearchSourceHighlightBuilder preTags(String... preTags) {
this.preTags = preTags;
return this;
}
/**
* Explicitly set the post tags that will be used for highlighting.
*/
public SearchSourceHighlightBuilder postTags(String... postTags) {
this.postTags = postTags;
return this;
}
/**
* The order of fragments per field. By default, ordered by the order in the
* highlighted text. Can be <tt>score</tt>, which then it will be ordered
* by score of the fragments.
*/
public SearchSourceHighlightBuilder order(String order) {
this.order = order;
return this;
}
@Override public void toJson(JsonBuilder builder, Params params) throws IOException {
builder.startObject("highlight");
if (tagsSchema != null) {
builder.field("tagsSchema", tagsSchema);
}
if (preTags != null) {
builder.array("preTags", preTags);
}
if (postTags != null) {
builder.array("postTags", postTags);
}
if (order != null) {
builder.field("order", order);
}
if (fields != null) {
builder.startObject("fields");
for (Field field : fields) {
builder.startObject(field.name());
if (field.fragmentSize() != -1) {
builder.field("fragmentSize", field.fragmentSize());
}
if (field.numOfFragments() != -1) {
builder.field("numberOfFragments", field.numOfFragments());
}
builder.endObject();
}
builder.endObject();
}
builder.endObject();
}
private static class Field {
private final String name;
private int fragmentSize = -1;
private int numOfFragments = -1;
private Field(String name) {
this.name = name;
}
public String name() {
return name;
}
public int fragmentSize() {
return fragmentSize;
}
public Field fragmentSize(int fragmentSize) {
this.fragmentSize = fragmentSize;
return this;
}
public int numOfFragments() {
return numOfFragments;
}
public Field numOfFragments(int numOfFragments) {
this.numOfFragments = numOfFragments;
return this;
}
}
}

View File

@ -27,7 +27,6 @@ import org.elasticsearch.index.query.json.JsonIndexQueryParser;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContextFacets;
import org.elasticsearch.util.Booleans;
import java.util.List;

View File

@ -28,7 +28,6 @@ import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContextFacets;
import org.elasticsearch.util.lucene.Lucene;
import java.io.IOException;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.search.internal;
package org.elasticsearch.search.facets;
import org.apache.lucene.search.Query;

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.fetch;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Inject;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.Fieldable;
@ -28,6 +29,7 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.highlight.HighlightPhase;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.InternalSearchHits;
@ -43,11 +45,22 @@ import java.util.Map;
*/
public class FetchPhase implements SearchPhase {
private final HighlightPhase highlightPhase;
@Inject public FetchPhase(HighlightPhase highlightPhase) {
this.highlightPhase = highlightPhase;
}
@Override public Map<String, ? extends SearchParseElement> parseElements() {
return ImmutableMap.of("explain", new ExplainParseElement(), "fields", new FieldsParseElement());
ImmutableMap.Builder<String, SearchParseElement> parseElements = ImmutableMap.builder();
parseElements.put("explain", new ExplainParseElement())
.put("fields", new FieldsParseElement())
.putAll(highlightPhase.parseElements());
return parseElements.build();
}
@Override public void preProcess(SearchContext context) {
highlightPhase.preProcess(context);
}
public void execute(SearchContext context) {
@ -63,7 +76,7 @@ public class FetchPhase implements SearchPhase {
byte[] source = extractSource(doc, documentMapper);
InternalSearchHit searchHit = new InternalSearchHit(uid.id(), uid.type(), source, null);
InternalSearchHit searchHit = new InternalSearchHit(docId, uid.id(), uid.type(), source, null);
hits[index] = searchHit;
for (Object oField : doc.getFields()) {
@ -102,6 +115,8 @@ public class FetchPhase implements SearchPhase {
index++;
}
context.fetchResult().hits(new InternalSearchHits(hits, context.queryResult().topDocs().totalHits));
highlightPhase.execute(context);
}
private void doExplanation(SearchContext context, int docId, InternalSearchHit searchHit) {

View File

@ -0,0 +1,100 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.highlight;
import org.elasticsearch.util.Strings;
import org.elasticsearch.util.io.Streamable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
/**
* A field highlighted with its higlighted fragments.
*
* @author kimchy (shay.banon)
*/
public class HighlightField implements Streamable {
private String name;
private String[] fragments;
HighlightField() {
}
public HighlightField(String name, String[] fragments) {
this.name = name;
this.fragments = fragments;
}
/**
* The name of the field highlighted.
*/
public String name() {
return name;
}
/**
* The highlighted fragments. <tt>null</tt> if failed to highlight (for example, the field is not stored).
*/
public String[] fragments() {
return fragments;
}
@Override public String toString() {
return "[" + name + "], fragments[" + Arrays.toString(fragments) + "]";
}
public static HighlightField readHighlightField(DataInput in) throws IOException, ClassNotFoundException {
HighlightField field = new HighlightField();
field.readFrom(in);
return field;
}
@Override public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
name = in.readUTF();
if (in.readBoolean()) {
int size = in.readInt();
if (size == 0) {
fragments = Strings.EMPTY_ARRAY;
} else {
fragments = new String[size];
for (int i = 0; i < size; i++) {
fragments[i] = in.readUTF();
}
}
}
}
@Override public void writeTo(DataOutput out) throws IOException {
out.writeUTF(name);
if (fragments == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeInt(fragments.length);
for (String fragment : fragments) {
out.writeUTF(fragment);
}
}
}
}

View File

@ -0,0 +1,91 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.highlight;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.search.vectorhighlight.*;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* @author kimchy (shay.banon)
*/
public class HighlightPhase implements SearchPhase {
@Override public Map<String, ? extends SearchParseElement> parseElements() {
return ImmutableMap.of("highlight", new HighlighterParseElement());
}
@Override public void preProcess(SearchContext context) {
}
@Override public void execute(SearchContext context) throws ElasticSearchException {
if (context.highlight() == null) {
return;
}
FragListBuilder fragListBuilder = new SimpleFragListBuilder();
FragmentsBuilder fragmentsBuilder;
if (context.highlight().scoreOrdered()) {
fragmentsBuilder = new ScoreOrderFragmentsBuilder(context.highlight().preTags(), context.highlight().postTags());
} else {
fragmentsBuilder = new SimpleFragmentsBuilder(context.highlight().preTags(), context.highlight().postTags());
}
FastVectorHighlighter highlighter = new FastVectorHighlighter(true, false, fragListBuilder, fragmentsBuilder);
FieldQuery fieldQuery = highlighter.getFieldQuery(context.query());
for (SearchHit hit : context.fetchResult().hits().hits()) {
InternalSearchHit internalHit = (InternalSearchHit) hit;
DocumentMapper documentMapper = context.mapperService().type(internalHit.type());
int docId = internalHit.docId();
Map<String, HighlightField> highlightFields = new HashMap<String, HighlightField>();
for (SearchContextHighlight.ParsedHighlightField parsedHighlightField : context.highlight().fields()) {
String indexName = parsedHighlightField.field();
FieldMapper mapper = documentMapper.mappers().smartNameFieldMapper(parsedHighlightField.field());
if (mapper != null) {
indexName = mapper.names().indexName();
}
String[] fragments = null;
try {
fragments = highlighter.getBestFragments(fieldQuery, context.searcher().getIndexReader(), docId, indexName, parsedHighlightField.fragmentCharSize(), parsedHighlightField.numberOfFragments());
} catch (IOException e) {
throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + parsedHighlightField.field() + "]", e);
}
HighlightField highlightField = new HighlightField(parsedHighlightField.field(), fragments);
highlightFields.put(highlightField.name(), highlightField);
}
internalHit.highlightFields(highlightFields);
}
}
}

View File

@ -0,0 +1,139 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.highlight;
import com.google.common.collect.Lists;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.internal.SearchContext;
import java.util.List;
import static com.google.common.collect.Lists.*;
/**
* <pre>
* highlight : {
* tagsSchema : "styled",
* preTags : ["tag1", "tag2"],
* postTags : ["tag1", "tag2"],
* order : "score",
* fields : {
* field1 : { }
* field2 : { fragmentSize : 100, numOfFragments : 2 }
* }
* }
* </pre>
*
* @author kimchy (shay.banon)
*/
public class HighlighterParseElement implements SearchParseElement {
private static final String[] DEFAULT_PRE_TAGS = new String[]{"<em>"};
private static final String[] DEFAULT_POST_TAGS = new String[]{"</em>"};
private static final String[] STYLED_PRE_TAG = {
"<em class=\"hlt1\">", "<em class=\"hlt2\">", "<em class=\"hlt2\">",
"<em class=\"hlt3\">", "<em class=\"hlt4\">", "<em class=\"hlt5\">",
"<em class=\"hlt6\">", "<em class=\"hlt7\">", "<em class=\"hlt8\">",
"<em class=\"hlt9\">"
};
public static final String[] STYLED_POST_TAGS = {"</em>"};
@Override public void parse(JsonParser jp, SearchContext context) throws Exception {
JsonToken token;
String topLevelFieldName = null;
List<SearchContextHighlight.ParsedHighlightField> fields = newArrayList();
String[] preTags = DEFAULT_PRE_TAGS;
String[] postTags = DEFAULT_POST_TAGS;
boolean scoreOrdered = false;
while ((token = jp.nextToken()) != JsonToken.END_OBJECT) {
if (token == JsonToken.FIELD_NAME) {
topLevelFieldName = jp.getCurrentName();
} else if (token == JsonToken.START_ARRAY) {
if ("preTags".equals(topLevelFieldName)) {
List<String> preTagsList = Lists.newArrayList();
while ((token = jp.nextToken()) != JsonToken.END_ARRAY) {
preTagsList.add(jp.getText());
}
preTags = preTagsList.toArray(new String[preTagsList.size()]);
} else if ("postTags".equals(topLevelFieldName)) {
List<String> postTagsList = Lists.newArrayList();
while ((token = jp.nextToken()) != JsonToken.END_ARRAY) {
postTagsList.add(jp.getText());
}
postTags = postTagsList.toArray(new String[postTagsList.size()]);
}
} else if (token == JsonToken.VALUE_STRING) {
if ("order".equals(topLevelFieldName)) {
if ("score".equals(jp.getText())) {
scoreOrdered = true;
} else {
scoreOrdered = false;
}
} else if ("tagsSchema".equals(topLevelFieldName)) {
String schema = jp.getText();
if ("styled".equals(schema)) {
preTags = STYLED_PRE_TAG;
postTags = STYLED_POST_TAGS;
}
}
} else if (token == JsonToken.START_OBJECT) {
if ("fields".equals(topLevelFieldName)) {
String highlightFieldName = null;
while ((token = jp.nextToken()) != JsonToken.END_OBJECT) {
if (token == JsonToken.FIELD_NAME) {
highlightFieldName = jp.getCurrentName();
} else if (token == JsonToken.START_OBJECT) {
String fieldName = null;
int fragmentSize = 100;
int numOfFragments = 5;
while ((token = jp.nextToken()) != JsonToken.END_OBJECT) {
if (token == JsonToken.FIELD_NAME) {
fieldName = jp.getCurrentName();
} else if (token == JsonToken.VALUE_STRING) {
if ("fragmentSize".equals(fieldName)) {
fragmentSize = Integer.parseInt(jp.getText());
} else if ("numberOfFragments".equals(fieldName)) {
numOfFragments = Integer.parseInt(jp.getText());
}
} else if (token == JsonToken.VALUE_NUMBER_INT) {
if ("fragmentSize".equals(fieldName)) {
fragmentSize = jp.getIntValue();
} else if ("numberOfFragments".equals(fieldName)) {
numOfFragments = jp.getIntValue();
}
}
}
fields.add(new SearchContextHighlight.ParsedHighlightField(highlightFieldName, fragmentSize, numOfFragments));
}
}
}
}
}
if (preTags != null && postTags == null) {
throw new SearchParseException(context, "Highlighter preTags are set, but postTags are not set");
}
context.highlight(new SearchContextHighlight(fields, preTags, postTags, scoreOrdered));
}
}

View File

@ -0,0 +1,86 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.highlight;
import java.util.List;
/**
* @author kimchy (shay.banon)
*/
public class SearchContextHighlight {
private List<ParsedHighlightField> fields;
private String[] preTags;
private String[] postTags;
private boolean scoreOrdered = false;
public SearchContextHighlight(List<ParsedHighlightField> fields, String[] preTags, String[] postTags, boolean scoreOrdered) {
this.fields = fields;
this.preTags = preTags;
this.postTags = postTags;
this.scoreOrdered = scoreOrdered;
}
public List<ParsedHighlightField> fields() {
return fields;
}
public String[] preTags() {
return preTags;
}
public String[] postTags() {
return postTags;
}
public boolean scoreOrdered() {
return scoreOrdered;
}
public static class ParsedHighlightField {
private final String field;
private final int fragmentCharSize;
private final int numberOfFragments;
public ParsedHighlightField(String field, int fragmentCharSize, int numberOfFragments) {
this.field = field;
this.fragmentCharSize = fragmentCharSize;
this.numberOfFragments = numberOfFragments;
}
public String field() {
return field;
}
public int fragmentCharSize() {
return fragmentCharSize;
}
public int numberOfFragments() {
return numberOfFragments;
}
}
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.highlight.HighlightField;
import org.elasticsearch.util.Nullable;
import org.elasticsearch.util.Unicode;
import org.elasticsearch.util.json.JsonBuilder;
@ -36,6 +37,7 @@ import java.util.Iterator;
import java.util.Map;
import static org.elasticsearch.search.SearchShardTarget.*;
import static org.elasticsearch.search.highlight.HighlightField.*;
import static org.elasticsearch.search.internal.InternalSearchHitField.*;
import static org.elasticsearch.util.json.Jackson.*;
import static org.elasticsearch.util.lucene.Lucene.*;
@ -45,13 +47,17 @@ import static org.elasticsearch.util.lucene.Lucene.*;
*/
public class InternalSearchHit implements SearchHit {
private transient int docId;
private String id;
private String type;
private byte[] source;
private Map<String, SearchHitField> fields;
private Map<String, SearchHitField> fields = ImmutableMap.of();
private Map<String, HighlightField> highlightFields = ImmutableMap.of();
private Explanation explanation;
@ -61,13 +67,18 @@ public class InternalSearchHit implements SearchHit {
}
public InternalSearchHit(String id, String type, byte[] source, Map<String, SearchHitField> fields) {
public InternalSearchHit(int docId, String id, String type, byte[] source, Map<String, SearchHitField> fields) {
this.docId = docId;
this.id = id;
this.type = type;
this.source = source;
this.fields = fields;
}
public int docId() {
return this.docId;
}
@Override public String index() {
return shard.index();
}
@ -114,6 +125,14 @@ public class InternalSearchHit implements SearchHit {
this.fields = fields;
}
@Override public Map<String, HighlightField> highlightFields() {
return this.highlightFields;
}
public void highlightFields(Map<String, HighlightField> highlightFields) {
this.highlightFields = highlightFields;
}
@Override public Explanation explanation() {
return explanation;
}
@ -122,7 +141,7 @@ public class InternalSearchHit implements SearchHit {
this.explanation = explanation;
}
public SearchShardTarget shard() {
@Override public SearchShardTarget shard() {
return shard;
}
@ -130,10 +149,6 @@ public class InternalSearchHit implements SearchHit {
this.shard = target;
}
@Override public SearchShardTarget target() {
return null;
}
@Override public void toJson(JsonBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("_index", shard.index());
@ -145,9 +160,9 @@ public class InternalSearchHit implements SearchHit {
builder.raw(", \"_source\" : ");
builder.raw(source());
}
if (fields() != null && !fields().isEmpty()) {
if (fields != null && !fields.isEmpty()) {
builder.startObject("fields");
for (SearchHitField field : fields().values()) {
for (SearchHitField field : fields.values()) {
if (field.values().isEmpty()) {
continue;
}
@ -164,6 +179,22 @@ public class InternalSearchHit implements SearchHit {
}
builder.endObject();
}
if (highlightFields != null && !highlightFields.isEmpty()) {
builder.startObject("highlight");
for (HighlightField field : highlightFields.values()) {
builder.field(field.name());
if (field.fragments() == null) {
builder.nullValue();
} else {
builder.startArray();
for (String fragment : field.fragments()) {
builder.value(fragment);
}
builder.endArray();
}
}
builder.endObject();
}
if (explanation() != null) {
builder.field("_explanation");
buildExplanation(builder, explanation());
@ -239,6 +270,37 @@ public class InternalSearchHit implements SearchHit {
}
fields = builder.build();
}
size = in.readInt();
if (size == 0) {
highlightFields = ImmutableMap.of();
} else if (size == 1) {
HighlightField field = readHighlightField(in);
highlightFields = ImmutableMap.of(field.name(), field);
} else if (size == 2) {
HighlightField field1 = readHighlightField(in);
HighlightField field2 = readHighlightField(in);
highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2);
} else if (size == 3) {
HighlightField field1 = readHighlightField(in);
HighlightField field2 = readHighlightField(in);
HighlightField field3 = readHighlightField(in);
highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2, field3.name(), field3);
} else if (size == 4) {
HighlightField field1 = readHighlightField(in);
HighlightField field2 = readHighlightField(in);
HighlightField field3 = readHighlightField(in);
HighlightField field4 = readHighlightField(in);
highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2, field3.name(), field3, field4.name(), field4);
} else {
ImmutableMap.Builder<String, HighlightField> builder = ImmutableMap.builder();
for (int i = 0; i < size; i++) {
HighlightField field = readHighlightField(in);
builder.put(field.name(), field);
}
highlightFields = builder.build();
}
if (in.readBoolean()) {
shard = readSearchShardTarget(in);
}
@ -267,6 +329,14 @@ public class InternalSearchHit implements SearchHit {
hitField.writeTo(out);
}
}
if (highlightFields == null) {
out.writeInt(0);
} else {
out.writeInt(highlightFields.size());
for (HighlightField highlightField : highlightFields.values()) {
highlightField.writeTo(out);
}
}
if (shard == null) {
out.writeBoolean(false);
} else {

View File

@ -33,7 +33,9 @@ import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.facets.SearchContextFacets;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.util.TimeValue;
import org.elasticsearch.util.lease.Releasable;
@ -89,6 +91,8 @@ public class SearchContext implements Releasable {
private SearchContextFacets facets;
private SearchContextHighlight highlight;
private boolean queryRewritten;
@ -165,8 +169,12 @@ public class SearchContext implements Releasable {
return this;
}
public Engine.Searcher engineSearcher() {
return this.engineSearcher;
public SearchContextHighlight highlight() {
return highlight;
}
public void highlight(SearchContextHighlight highlight) {
this.highlight = highlight;
}
public ContextIndexSearcher searcher() {

View File

@ -24,6 +24,7 @@ import com.google.inject.Inject;
import org.apache.lucene.search.*;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.facets.FacetsPhase;
import org.elasticsearch.search.internal.SearchContext;
@ -54,6 +55,9 @@ public class QueryPhase implements SearchPhase {
}
@Override public void preProcess(SearchContext context) {
if (context.query() == null) {
throw new SearchParseException(context, "No query specified in search request");
}
context.query().setBoost(context.query().getBoost() * context.queryBoost());
facetsPhase.preProcess(context);
}

View File

@ -78,6 +78,24 @@ public abstract class JsonBuilder<T extends JsonBuilder> {
return builder;
}
public T array(String name, String... values) throws IOException {
startArray(name);
for (String value : values) {
value(value);
}
endArray();
return builder;
}
public T array(String name, Object... values) throws IOException {
startArray(name);
for (Object value : values) {
value(value);
}
endArray();
return builder;
}
public T startArray(String name) throws IOException {
field(name);
startArray();
@ -227,6 +245,11 @@ public abstract class JsonBuilder<T extends JsonBuilder> {
return builder;
}
public T nullValue() throws IOException {
generator.writeNull();
return builder;
}
public T raw(String json) throws IOException {
generator.writeRaw(json);
return builder;

View File

@ -28,15 +28,20 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.integration.AbstractServersTests;
import org.elasticsearch.util.Unicode;
import org.elasticsearch.util.json.JsonBuilder;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.action.search.SearchType.*;
import static org.elasticsearch.client.Requests.*;
import static org.elasticsearch.index.query.json.JsonQueryBuilders.*;
import static org.elasticsearch.search.builder.SearchSourceBuilder.*;
import static org.elasticsearch.util.TimeValue.*;
import static org.elasticsearch.util.json.JsonBuilder.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
@ -75,6 +80,7 @@ public class TransportTwoServersSearchTests extends AbstractServersTests {
.from(0).size(60).explain(true);
SearchResponse searchResponse = client.search(searchRequest("test").source(source).searchType(DFS_QUERY_THEN_FETCH).scroll(new Scroll(timeValueMinutes(10)))).actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(100l));
assertThat(searchResponse.hits().hits().length, equalTo(60));
@ -102,6 +108,7 @@ public class TransportTwoServersSearchTests extends AbstractServersTests {
.from(0).size(60).explain(true).sort("age", false);
SearchResponse searchResponse = client.search(searchRequest("test").source(source).searchType(DFS_QUERY_THEN_FETCH).scroll(new Scroll(timeValueMinutes(10)))).actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(100l));
assertThat(searchResponse.hits().hits().length, equalTo(60));
for (int i = 0; i < 60; i++) {
@ -126,6 +133,7 @@ public class TransportTwoServersSearchTests extends AbstractServersTests {
.from(0).size(60).explain(true);
SearchResponse searchResponse = client.search(searchRequest("test").source(source).searchType(QUERY_THEN_FETCH).scroll(new Scroll(timeValueMinutes(10)))).actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(100l));
assertThat(searchResponse.hits().hits().length, equalTo(60));
for (int i = 0; i < 60; i++) {
@ -150,6 +158,7 @@ public class TransportTwoServersSearchTests extends AbstractServersTests {
.from(0).size(60).explain(true).sort("age", false);
SearchResponse searchResponse = client.search(searchRequest("test").source(source).searchType(QUERY_THEN_FETCH).scroll(new Scroll(timeValueMinutes(10)))).actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(100l));
assertThat(searchResponse.hits().hits().length, equalTo(60));
for (int i = 0; i < 60; i++) {
@ -174,6 +183,7 @@ public class TransportTwoServersSearchTests extends AbstractServersTests {
.from(0).size(20).explain(true);
SearchResponse searchResponse = client.search(searchRequest("test").source(source).searchType(QUERY_AND_FETCH).scroll(new Scroll(timeValueMinutes(10)))).actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(100l));
assertThat(searchResponse.hits().hits().length, equalTo(60)); // 20 per shard
for (int i = 0; i < 60; i++) {
@ -199,6 +209,7 @@ public class TransportTwoServersSearchTests extends AbstractServersTests {
.from(0).size(20).explain(true);
SearchResponse searchResponse = client.search(searchRequest("test").source(source).searchType(DFS_QUERY_AND_FETCH).scroll(new Scroll(timeValueMinutes(10)))).actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(100l));
assertThat(searchResponse.hits().hits().length, equalTo(60)); // 20 per shard
for (int i = 0; i < 60; i++) {
@ -225,6 +236,7 @@ public class TransportTwoServersSearchTests extends AbstractServersTests {
.facets(facets().facet("all", termQuery("multi", "test"), true).facet("test1", termQuery("name", "test1")));
SearchResponse searchResponse = client.search(searchRequest("test").source(sourceBuilder)).actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(100l));
assertThat(searchResponse.facets().countFacet("test1").count(), equalTo(1l));
@ -248,15 +260,21 @@ public class TransportTwoServersSearchTests extends AbstractServersTests {
}
private void index(Client client, String id, String nameValue, int age) {
private void index(Client client, String id, String nameValue, int age) throws IOException {
client.index(Requests.indexRequest("test").type("type1").id(id).source(source(id, nameValue, age))).actionGet();
}
private String source(String id, String nameValue, int age) {
private JsonBuilder source(String id, String nameValue, int age) throws IOException {
StringBuilder multi = new StringBuilder().append(nameValue);
for (int i = 0; i < age; i++) {
multi.append(" ").append(nameValue);
}
return "{ type1 : { \"id\" : \"" + id + "\", \"name\" : \"" + (nameValue + id) + "\", age : " + age + ", multi : \"" + multi.toString() + "\", _boost : " + (age * 10) + " } }";
return binaryJsonBuilder().startObject()
.field("id", id)
.field("name", nameValue + id)
.field("age", age)
.field("multi", multi.toString())
.field("_boost", age * 10)
.endObject();
}
}

View File

@ -0,0 +1,129 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.integration.search.highlight;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.integration.AbstractServersTests;
import org.elasticsearch.util.json.JsonBuilder;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.action.search.SearchType.*;
import static org.elasticsearch.client.Requests.*;
import static org.elasticsearch.index.query.json.JsonQueryBuilders.*;
import static org.elasticsearch.search.builder.SearchSourceBuilder.*;
import static org.elasticsearch.util.TimeValue.*;
import static org.elasticsearch.util.json.JsonBuilder.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (shay.banon)
*/
public class HighlightSearchTests extends AbstractServersTests {
private Client client;
@BeforeClass public void createServers() throws Exception {
startServer("server1");
startServer("server2");
client = getClient();
client.admin().indices().create(createIndexRequest("test")).actionGet();
logger.info("Update mapping (_all to store and have term vectors)");
client.admin().indices().putMapping(putMappingRequest("test").mappingSource(mapping())).actionGet();
for (int i = 0; i < 100; i++) {
index(client("server1"), Integer.toString(i), "test", i);
}
client.admin().indices().refresh(refreshRequest("test")).actionGet();
}
@AfterClass public void closeServers() {
client.close();
closeAllServers();
}
protected Client getClient() {
return client("server1");
}
@Test public void testSimpleHighlighting() throws Exception {
SearchSourceBuilder source = searchSource()
.query(termQuery("multi", "test"))
.from(0).size(60).explain(true)
.highlight(highlight().field("_all").order("score").preTags("<xxx>").postTags("</xxx>"));
SearchResponse searchResponse = client.search(searchRequest("test").source(source).searchType(QUERY_THEN_FETCH).scroll(timeValueMinutes(10))).actionGet();
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
assertThat(searchResponse.hits().totalHits(), equalTo(100l));
assertThat(searchResponse.hits().hits().length, equalTo(60));
for (int i = 0; i < 60; i++) {
SearchHit hit = searchResponse.hits().hits()[i];
// System.out.println(hit.target() + ": " + hit.explanation());
assertThat("id[" + hit.id() + "]", hit.id(), equalTo(Integer.toString(100 - i - 1)));
// System.out.println(hit.shard() + ": " + hit.highlightFields());
assertThat(hit.highlightFields().size(), equalTo(1));
assertThat(hit.highlightFields().get("_all").fragments().length, greaterThan(0));
}
searchResponse = client.searchScroll(searchScrollRequest(searchResponse.scrollId())).actionGet();
assertThat(searchResponse.hits().totalHits(), equalTo(100l));
assertThat(searchResponse.hits().hits().length, equalTo(40));
for (int i = 0; i < 40; i++) {
SearchHit hit = searchResponse.hits().hits()[i];
assertThat("id[" + hit.id() + "]", hit.id(), equalTo(Integer.toString(100 - 60 - 1 - i)));
}
}
private void index(Client client, String id, String nameValue, int age) throws IOException {
client.index(Requests.indexRequest("test").type("type1").id(id).source(source(id, nameValue, age))).actionGet();
}
public JsonBuilder mapping() throws IOException {
return binaryJsonBuilder().startObject().startObject("type1")
.startObject("allField").field("store", "yes").field("termVector", "with_positions_offsets").endObject()
.endObject().endObject();
}
private JsonBuilder source(String id, String nameValue, int age) throws IOException {
StringBuilder multi = new StringBuilder().append(nameValue);
for (int i = 0; i < age; i++) {
multi.append(" ").append(nameValue);
}
return binaryJsonBuilder().startObject()
.field("id", id)
.field("name", nameValue + id)
.field("age", age)
.field("multi", multi.toString())
.field("_boost", age * 10)
.endObject();
}
}

View File

@ -0,0 +1,9 @@
cluster:
routing:
schedule: 100ms
index:
numberOfShards: 3
numberOfReplicas: 0
routing :
# Use simple hashing since we want even distribution and our ids are simple incremented number based
hash.type : simple