Added highlighter to percolate api.

The highlighter in the percolate api highlights snippets in the document being percolated. If highlighting is enabled then foreach matching query, highlight snippets will be generated.
All highlight options that are supported via the search api are also supported in the percolate api, since the percolate api embeds the same highlighter infrastructure as the search api.
The `size` option is a required option if highlighting is specified in the percolate api, other than that the `highlight`request part can just be placed in the percolate api request body.

Closes #3574
This commit is contained in:
Martijn van Groningen 2013-08-26 16:28:27 +02:00
parent df3922a22a
commit 3ca0239668
17 changed files with 1954 additions and 642 deletions

View File

@ -41,6 +41,7 @@ import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.internal.DefaultSearchContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.threadpool.ThreadPool;
@ -169,7 +170,7 @@ public class TransportValidateQueryAction extends TransportBroadcastOperationAct
if (request.querySource().length() == 0) {
valid = true;
} else {
SearchContext.setCurrent(new SearchContext(0,
SearchContext.setCurrent(new DefaultSearchContext(0,
new ShardSearchRequest().types(request.types()),
null, indexShard.searcher(), indexService, indexShard,
scriptService, cacheRecycler));

View File

@ -41,6 +41,7 @@ import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.internal.DefaultSearchContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.query.QueryPhaseExecutionException;
@ -151,7 +152,7 @@ public class TransportCountAction extends TransportBroadcastOperationAction<Coun
IndexShard indexShard = indexService.shardSafe(request.shardId());
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId());
SearchContext context = new SearchContext(0,
SearchContext context = new DefaultSearchContext(0,
new ShardSearchRequest().types(request.types()).filteringAliases(request.filteringAliases()),
shardTarget, indexShard.searcher(), indexService, indexShard,
scriptService, cacheRecycler);

View File

@ -43,6 +43,7 @@ import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.internal.DefaultSearchContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.rescore.RescoreSearchContext;
@ -98,7 +99,7 @@ public class TransportExplainAction extends TransportShardSingleOperationAction<
return new ExplainResponse(false);
}
SearchContext context = new SearchContext(
SearchContext context = new DefaultSearchContext(
0,
new ShardSearchRequest().types(new String[]{request.type()})
.filteringAliases(request.filteringAlias()),

View File

@ -30,6 +30,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder;
import java.util.Map;
@ -178,6 +179,11 @@ public class PercolateRequestBuilder extends BroadcastOperationRequestBuilder<Pe
return this;
}
public PercolateRequestBuilder setHighlightBuilder(HighlightBuilder highlightBuilder) {
sourceBuilder().setHighlightBuilder(highlightBuilder);
return this;
}
private PercolateSourceBuilder sourceBuilder() {
if (sourceBuilder == null) {
sourceBuilder = new PercolateSourceBuilder();

View File

@ -31,11 +31,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.percolator.PercolatorService;
import org.elasticsearch.rest.action.support.RestActions;
import org.elasticsearch.search.highlight.HighlightField;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.*;
/**
*
@ -116,17 +115,33 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
boolean justIds = "ids".equals(params.param("percolate_format"));
if (justIds) {
for (PercolateResponse.Match match : matches) {
builder.value(match.id());
builder.value(match.getId());
}
} else {
for (PercolateResponse.Match match : matches) {
builder.startObject();
builder.field(Fields._INDEX, match.getIndex());
builder.field(Fields._ID, match.getId());
float score = match.score();
float score = match.getScore();
if (score != PercolatorService.NO_SCORE) {
builder.field(Fields._SCORE, match.getScore());
}
if (match.getHighlightFields() != null) {
builder.startObject(Fields.HIGHLIGHT);
for (HighlightField field : match.getHighlightFields().values()) {
builder.field(field.name());
if (field.fragments() == null) {
builder.nullValue();
} else {
builder.startArray();
for (Text fragment : field.fragments()) {
builder.value(fragment);
}
builder.endArray();
}
}
builder.endObject();
}
builder.endObject();
}
}
@ -166,6 +181,14 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
private Text index;
private Text id;
private float score;
private Map<String, HighlightField> hl;
public Match(Text index, Text id, float score, Map<String, HighlightField> hl) {
this.id = id;
this.score = score;
this.index = index;
this.hl = hl;
}
public Match(Text index, Text id, float score) {
this.id = id;
@ -176,28 +199,20 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
Match() {
}
public Text index() {
public Text getIndex() {
return index;
}
public Text id() {
public Text getId() {
return id;
}
public float score() {
public float getScore() {
return score;
}
public Text getIndex() {
return index();
}
public Text getId() {
return id();
}
public float getScore() {
return score();
public Map<String, HighlightField> getHighlightFields() {
return hl;
}
@Override
@ -205,6 +220,13 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
id = in.readText();
index = in.readText();
score = in.readFloat();
int size = in.readVInt();
if (size > 0) {
hl = new HashMap<String, HighlightField>(size);
for (int j = 0; j < size; j++) {
hl.put(in.readString(), HighlightField.readHighlightField(in));
}
}
}
@Override
@ -212,6 +234,15 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
out.writeText(id);
out.writeText(index);
out.writeFloat(score);
if (hl != null) {
out.writeVInt(hl.size());
for (Map.Entry<String, HighlightField> entry : hl.entrySet()) {
out.writeString(entry.getKey());
entry.getValue().writeTo(out);
}
} else {
out.writeVInt(0);
}
}
}
@ -222,6 +253,7 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString _SCORE = new XContentBuilderString("_score");
static final XContentBuilderString HIGHLIGHT = new XContentBuilderString("highlight");
}
}

View File

@ -23,9 +23,14 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.percolator.PercolatorService;
import org.elasticsearch.percolator.PercolateContext;
import org.elasticsearch.search.highlight.HighlightField;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*/
@ -36,13 +41,24 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
private long count;
private float[] scores;
private BytesRef[] matches;
private List<Map<String, HighlightField>> hls = new ArrayList<Map<String, HighlightField>>();
private byte percolatorTypeId;
private int requestedSize;
public PercolateShardResponse() {
PercolateShardResponse() {
}
public PercolateShardResponse(BytesRef[] matches, long count, float[] scores, PercolatorService.PercolateContext context, String index, int shardId) {
public PercolateShardResponse(BytesRef[] matches, List<Map<String, HighlightField>> hls, long count, float[] scores, PercolateContext context, String index, int shardId) {
super(index, shardId);
this.matches = matches;
this.hls = hls;
this.count = count;
this.scores = scores;
this.percolatorTypeId = context.percolatorTypeId;
this.requestedSize = context.size;
}
public PercolateShardResponse(BytesRef[] matches, long count, float[] scores, PercolateContext context, String index, int shardId) {
super(index, shardId);
this.matches = matches;
this.count = count;
@ -51,7 +67,7 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
this.requestedSize = context.size;
}
public PercolateShardResponse(BytesRef[] matches, long count, PercolatorService.PercolateContext context, String index, int shardId) {
public PercolateShardResponse(BytesRef[] matches, long count, PercolateContext context, String index, int shardId) {
super(index, shardId);
this.matches = matches;
this.scores = new float[0];
@ -60,7 +76,17 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
this.requestedSize = context.size;
}
public PercolateShardResponse(long count, PercolatorService.PercolateContext context, String index, int shardId) {
public PercolateShardResponse(BytesRef[] matches, List<Map<String, HighlightField>> hls, long count, PercolateContext context, String index, int shardId) {
super(index, shardId);
this.matches = matches;
this.hls = hls;
this.scores = new float[0];
this.count = count;
this.percolatorTypeId = context.percolatorTypeId;
this.requestedSize = context.size;
}
public PercolateShardResponse(long count, PercolateContext context, String index, int shardId) {
super(index, shardId);
this.count = count;
this.matches = EMPTY;
@ -69,7 +95,7 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
this.requestedSize = context.size;
}
public PercolateShardResponse(PercolatorService.PercolateContext context, String index, int shardId) {
public PercolateShardResponse(PercolateContext context, String index, int shardId) {
super(index, shardId);
this.matches = EMPTY;
this.scores = new float[0];
@ -92,6 +118,10 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
return requestedSize;
}
public List<Map<String, HighlightField>> hls() {
return hls;
}
public byte percolatorTypeId() {
return percolatorTypeId;
}
@ -103,6 +133,8 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
percolatorTypeId = in.readByte();
requestedSize = in.readVInt();
count = in.readVLong();
matches = new BytesRef[in.readVInt()];
for (int i = 0; i < matches.length; i++) {
@ -112,13 +144,22 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
for (int i = 0; i < scores.length; i++) {
scores[i] = in.readFloat();
}
percolatorTypeId = in.readByte();
requestedSize = in.readVInt();
int size = in.readVInt();
for (int i = 0; i < size; i++) {
int mSize = in.readVInt();
Map<String, HighlightField> fields = new HashMap<String, HighlightField>();
for (int j = 0; j < mSize; j++) {
fields.put(in.readString(), HighlightField.readHighlightField(in));
}
hls.add(fields);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeByte(percolatorTypeId);
out.writeVLong(requestedSize);
out.writeVLong(count);
out.writeVInt(matches.length);
for (BytesRef match : matches) {
@ -128,7 +169,13 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
for (float score : scores) {
out.writeFloat(score);
}
out.writeByte(percolatorTypeId);
out.writeVLong(requestedSize);
out.writeVInt(hls.size());
for (Map<String, HighlightField> hl : hls) {
out.writeVInt(hl.size());
for (Map.Entry<String, HighlightField> entry : hl.entrySet()) {
out.writeString(entry.getKey());
entry.getValue().writeTo(out);
}
}
}
}

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.*;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import org.elasticsearch.search.highlight.HighlightBuilder;
import java.io.IOException;
import java.util.HashMap;
@ -41,6 +42,7 @@ public class PercolateSourceBuilder implements ToXContent {
private Integer size;
private Boolean sort;
private Boolean score;
private HighlightBuilder highlightBuilder;
public DocBuilder percolateDocument() {
if (docBuilder == null) {
@ -91,6 +93,11 @@ public class PercolateSourceBuilder implements ToXContent {
return this;
}
public PercolateSourceBuilder setHighlightBuilder(HighlightBuilder highlightBuilder) {
this.highlightBuilder = highlightBuilder;
return this;
}
public BytesReference buildAsBytes(XContentType contentType) throws SearchSourceBuilderException {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
@ -124,6 +131,9 @@ public class PercolateSourceBuilder implements ToXContent {
if (score != null) {
builder.field("score", score);
}
if (highlightBuilder != null) {
highlightBuilder.toXContent(builder, params);
}
builder.endObject();
return builder;
}

View File

@ -196,10 +196,10 @@ public class TransportPercolateAction extends TransportBroadcastOperationAction<
protected PercolateShardResponse shardOperation(PercolateShardRequest request) throws ElasticSearchException {
try {
return percolatorService.percolate(request);
} catch (Throwable t) {
logger.trace("[{}][{}] failed to percolate", t, request.index(), request.shardId());
} catch (Throwable e) {
logger.trace("[{}][{}] failed to percolate", e, request.index(), request.shardId());
ShardId shardId = new ShardId(request.index(), request.shardId());
throw new PercolateException(shardId, "failed to percolate", t);
throw new PercolateException(shardId, "failed to percolate", e);
}
}

View File

@ -21,8 +21,8 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.elasticsearch.common.bytes.BytesReference;
import org.apache.lucene.document.Field;
import org.elasticsearch.common.bytes.BytesReference;
import java.util.List;
@ -47,7 +47,7 @@ public class ParsedDocument {
private final Analyzer analyzer;
private final BytesReference source;
private BytesReference source;
private boolean mappingsModified;
@ -111,6 +111,10 @@ public class ParsedDocument {
return this.source;
}
public void setSource(BytesReference source) {
this.source = source;
}
public ParsedDocument parent(String parent) {
this.parent = parent;
return this;

View File

@ -0,0 +1,656 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.elasticsearch.percolator;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.lucene.HashedBytesRef;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.docset.DocSetCache;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.index.cache.id.IdCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.fieldvisitor.JustSourceFieldsVisitor;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedFilter;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.facet.SearchContextFacets;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.partial.PartialFieldsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.*;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.scan.ScanContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
/**
*/
public class PercolateContext extends SearchContext {
public boolean limit;
public int size;
public boolean score;
public boolean sort;
public byte percolatorTypeId;
private final PercolateShardRequest request;
private final SearchShardTarget searchShardTarget;
private final IndexService indexService;
private final IndexFieldDataService fieldDataService;
private final IndexShard indexShard;
private final ConcurrentMap<HashedBytesRef, Query> percolateQueries;
private String[] types;
private Engine.Searcher docEngineSearcher;
private SearchContextHighlight highlight;
private SearchLookup searchLookup;
private ParsedQuery parsedQuery;
private Query query;
private boolean queryRewritten;
private Query percolateQuery;
private FetchSubPhase.HitContext hitContext;
public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard, IndexService indexService) {
this.request = request;
this.indexShard = indexShard;
this.indexService = indexService;
this.fieldDataService = indexService.fieldData();
this.searchShardTarget = searchShardTarget;
this.percolateQueries = indexShard.percolateRegistry().percolateQueries();
this.types = new String[]{request.documentType()};
}
public void initialize(final MemoryIndex memoryIndex, ParsedDocument parsedDocument) {
final IndexSearcher docSearcher = memoryIndex.createSearcher();
final IndexReader topLevelReader = docSearcher.getIndexReader();
AtomicReaderContext readerContext = topLevelReader.leaves().get(0);
docEngineSearcher = new Engine.Searcher() {
@Override
public IndexReader reader() {
return topLevelReader;
}
@Override
public IndexSearcher searcher() {
return docSearcher;
}
@Override
public boolean release() throws ElasticSearchException {
try {
docSearcher.getIndexReader().close();
memoryIndex.reset();
} catch (IOException e) {
throw new ElasticSearchException("failed to close percolator in-memory index", e);
}
return true;
}
};
lookup().setNextReader(readerContext);
lookup().setNextDocId(0);
lookup().source().setNextSource(parsedDocument.source());
Map<String, SearchHitField> fields = new HashMap<String, SearchHitField>();
for (IndexableField field : parsedDocument.rootDoc().getFields()) {
fields.put(field.name(), new InternalSearchHitField(field.name(), ImmutableList.of()));
}
hitContext = new FetchSubPhase.HitContext();
hitContext.reset(new InternalSearchHit(0, "unknown", new StringText(request.documentType()), fields), readerContext, 0, topLevelReader, 0, new JustSourceFieldsVisitor());
}
public IndexSearcher docSearcher() {
return docEngineSearcher.searcher();
}
public IndexShard indexShard() {
return indexShard;
}
public IndexService indexService() {
return indexService;
}
public ConcurrentMap<HashedBytesRef, Query> percolateQueries() {
return percolateQueries;
}
public Query percolateQuery() {
return percolateQuery;
}
public void percolateQuery(Query percolateQuery) {
this.percolateQuery = percolateQuery;
}
public FetchSubPhase.HitContext hitContext() {
return hitContext;
}
@Override
public SearchContextHighlight highlight() {
return highlight;
}
@Override
public void highlight(SearchContextHighlight highlight) {
this.highlight = highlight;
}
@Override
public SearchShardTarget shardTarget() {
return searchShardTarget;
}
@Override
public SearchLookup lookup() {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService(), fieldData(), types);
}
return searchLookup;
}
@Override
public boolean release() throws ElasticSearchException {
if (docEngineSearcher != null) {
IndexReader indexReader = docEngineSearcher.reader();
fieldDataService.clear(indexReader);
indexService.cache().clear(indexReader);
return docEngineSearcher.release();
} else {
return false;
}
}
@Override
public MapperService mapperService() {
return indexService.mapperService();
}
@Override
public SearchContext parsedQuery(ParsedQuery query) {
parsedQuery = query;
this.query = query.query();
queryRewritten = false;
return this;
}
@Override
public ParsedQuery parsedQuery() {
return parsedQuery;
}
@Override
public Query query() {
return query;
}
@Override
public boolean queryRewritten() {
return queryRewritten;
}
@Override
public SearchContext updateRewriteQuery(Query rewriteQuery) {
queryRewritten = true;
query = rewriteQuery;
return this;
}
@Override
public String[] types() {
return types;
}
public void types(String[] types) {
this.types = types;
searchLookup = new SearchLookup(mapperService(), fieldData(), types);
}
@Override
public IndexFieldDataService fieldData() {
return fieldDataService;
}
// Unused:
@Override
public void preProcess() {
throw new UnsupportedOperationException();
}
@Override
public Filter searchFilter(String[] types) {
throw new UnsupportedOperationException();
}
@Override
public long id() {
throw new UnsupportedOperationException();
}
@Override
public ShardSearchRequest request() {
throw new UnsupportedOperationException();
}
@Override
public SearchType searchType() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext searchType(SearchType searchType) {
throw new UnsupportedOperationException();
}
@Override
public int numberOfShards() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasTypes() {
throw new UnsupportedOperationException();
}
@Override
public float queryBoost() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext queryBoost(float queryBoost) {
throw new UnsupportedOperationException();
}
@Override
public long nowInMillis() {
throw new UnsupportedOperationException();
}
@Override
public Scroll scroll() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext scroll(Scroll scroll) {
throw new UnsupportedOperationException();
}
@Override
public SearchContextFacets facets() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext facets(SearchContextFacets facets) {
throw new UnsupportedOperationException();
}
@Override
public SuggestionSearchContext suggest() {
throw new UnsupportedOperationException();
}
@Override
public void suggest(SuggestionSearchContext suggest) {
throw new UnsupportedOperationException();
}
@Override
public RescoreSearchContext rescore() {
throw new UnsupportedOperationException();
}
@Override
public void rescore(RescoreSearchContext rescore) {
throw new UnsupportedOperationException();
}
@Override
public boolean hasScriptFields() {
throw new UnsupportedOperationException();
}
@Override
public ScriptFieldsContext scriptFields() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasPartialFields() {
throw new UnsupportedOperationException();
}
@Override
public PartialFieldsContext partialFields() {
throw new UnsupportedOperationException();
}
@Override
public boolean sourceRequested() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasFetchSourceContext() {
throw new UnsupportedOperationException();
}
@Override
public FetchSourceContext fetchSourceContext() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext) {
throw new UnsupportedOperationException();
}
@Override
public ContextIndexSearcher searcher() {
throw new UnsupportedOperationException();
}
@Override
public AnalysisService analysisService() {
throw new UnsupportedOperationException();
}
@Override
public IndexQueryParserService queryParserService() {
throw new UnsupportedOperationException();
}
@Override
public SimilarityService similarityService() {
throw new UnsupportedOperationException();
}
@Override
public ScriptService scriptService() {
throw new UnsupportedOperationException();
}
@Override
public CacheRecycler cacheRecycler() {
throw new UnsupportedOperationException();
}
@Override
public FilterCache filterCache() {
throw new UnsupportedOperationException();
}
@Override
public DocSetCache docSetCache() {
throw new UnsupportedOperationException();
}
@Override
public IdCache idCache() {
throw new UnsupportedOperationException();
}
@Override
public long timeoutInMillis() {
throw new UnsupportedOperationException();
}
@Override
public void timeoutInMillis(long timeoutInMillis) {
throw new UnsupportedOperationException();
}
@Override
public SearchContext minimumScore(float minimumScore) {
throw new UnsupportedOperationException();
}
@Override
public Float minimumScore() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext sort(Sort sort) {
throw new UnsupportedOperationException();
}
@Override
public Sort sort() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext trackScores(boolean trackScores) {
throw new UnsupportedOperationException();
}
@Override
public boolean trackScores() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext parsedFilter(ParsedFilter filter) {
throw new UnsupportedOperationException();
}
@Override
public ParsedFilter parsedFilter() {
throw new UnsupportedOperationException();
}
@Override
public Filter aliasFilter() {
throw new UnsupportedOperationException();
}
@Override
public int from() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext from(int from) {
throw new UnsupportedOperationException();
}
@Override
public int size() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext size(int size) {
throw new UnsupportedOperationException();
}
@Override
public boolean hasFieldNames() {
throw new UnsupportedOperationException();
}
@Override
public List<String> fieldNames() {
throw new UnsupportedOperationException();
}
@Override
public void emptyFieldNames() {
throw new UnsupportedOperationException();
}
@Override
public boolean explain() {
throw new UnsupportedOperationException();
}
@Override
public void explain(boolean explain) {
throw new UnsupportedOperationException();
}
@Override
public List<String> groupStats() {
throw new UnsupportedOperationException();
}
@Override
public void groupStats(List<String> groupStats) {
throw new UnsupportedOperationException();
}
@Override
public boolean version() {
throw new UnsupportedOperationException();
}
@Override
public void version(boolean version) {
throw new UnsupportedOperationException();
}
@Override
public int[] docIdsToLoad() {
throw new UnsupportedOperationException();
}
@Override
public int docIdsToLoadFrom() {
throw new UnsupportedOperationException();
}
@Override
public int docIdsToLoadSize() {
throw new UnsupportedOperationException();
}
@Override
public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) {
throw new UnsupportedOperationException();
}
@Override
public void accessed(long accessTime) {
throw new UnsupportedOperationException();
}
@Override
public long lastAccessTime() {
throw new UnsupportedOperationException();
}
@Override
public long keepAlive() {
throw new UnsupportedOperationException();
}
@Override
public void keepAlive(long keepAlive) {
throw new UnsupportedOperationException();
}
@Override
public DfsSearchResult dfsResult() {
throw new UnsupportedOperationException();
}
@Override
public QuerySearchResult queryResult() {
throw new UnsupportedOperationException();
}
@Override
public FetchSearchResult fetchResult() {
throw new UnsupportedOperationException();
}
@Override
public void addRewrite(Rewrite rewrite) {
throw new UnsupportedOperationException();
}
@Override
public List<Rewrite> rewrites() {
throw new UnsupportedOperationException();
}
@Override
public ScanContext scanContext() {
throw new UnsupportedOperationException();
}
@Override
public MapperService.SmartNameFieldMappers smartFieldMappers(String name) {
throw new UnsupportedOperationException();
}
@Override
public FieldMappers smartNameFieldMappers(String name) {
throw new UnsupportedOperationException();
}
@Override
public FieldMapper smartNameFieldMapper(String name) {
throw new UnsupportedOperationException();
}
@Override
public MapperService.SmartNameObjectMapper smartNameObjectMapper(String name) {
throw new UnsupportedOperationException();
}
}

View File

@ -18,6 +18,7 @@
package org.elasticsearch.percolator;
import com.google.common.collect.ImmutableMap;
import gnu.trove.map.hash.TByteObjectHashMap;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.AtomicReaderContext;
@ -34,10 +35,12 @@ import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.action.percolate.PercolateResponse;
import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.action.percolate.PercolateShardResponse;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.HashedBytesRef;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
@ -48,14 +51,14 @@ import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
@ -63,17 +66,20 @@ import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.percolator.stats.ShardPercolateService;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.highlight.HighlightField;
import org.elasticsearch.search.highlight.HighlightPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import static org.elasticsearch.index.mapper.SourceToParse.source;
import static org.elasticsearch.percolator.QueryCollector.*;
@ -88,18 +94,25 @@ public class PercolatorService extends AbstractComponent {
private final IndicesService indicesService;
private final TByteObjectHashMap<PercolatorType> percolatorTypes;
private final ClusterService clusterService;
private final HighlightPhase highlightPhase;
@Inject
public PercolatorService(Settings settings, IndicesService indicesService) {
public PercolatorService(Settings settings, IndicesService indicesService, HighlightPhase highlightPhase, ClusterService clusterService) {
super(settings);
this.indicesService = indicesService;
this.clusterService = clusterService;
this.highlightPhase = highlightPhase;
final long maxReuseBytes = settings.getAsBytesSize("indices.memory.memory_index.size_per_thread", new ByteSizeValue(1, ByteSizeUnit.MB)).bytes();
cache = new CloseableThreadLocal<MemoryIndex>() {
@Override
protected MemoryIndex initialValue() {
return new ExtendedMemoryIndex(false, maxReuseBytes);
return new ExtendedMemoryIndex(true, maxReuseBytes);
}
};
percolatorTypes = new TByteObjectHashMap<PercolatorType>(6);
percolatorTypes.put(countPercolator.id(), countPercolator);
percolatorTypes.put(queryCountPercolator.id(), queryCountPercolator);
@ -124,12 +137,13 @@ public class PercolatorService extends AbstractComponent {
long startTime = System.nanoTime();
try {
final PercolateContext context = new PercolateContext();
context.percolateQueries = indexShard.percolateRegistry().percolateQueries();
context.indexShard = indexShard;
context.percolateIndexService = percolateIndexService;
ParsedDocument parsedDocument = parsePercolate(percolateIndexService, request, context);
if (context.percolateQueries.isEmpty()) {
SearchShardTarget searchShardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId());
final PercolateContext context = new PercolateContext(
request, searchShardTarget, indexShard, percolateIndexService
);
ParsedDocument parsedDocument = parseRequest(percolateIndexService, request, context);
if (context.percolateQueries().isEmpty()) {
return new PercolateShardResponse(context, request.index(), request.shardId());
}
@ -139,7 +153,7 @@ public class PercolatorService extends AbstractComponent {
throw new ElasticSearchIllegalArgumentException("Nothing to percolate");
}
if (context.query == null && (context.score || context.sort)) {
if (context.percolateQuery() == null && (context.score || context.sort)) {
throw new ElasticSearchIllegalArgumentException("Can't sort or score if query isn't specified");
}
@ -147,6 +161,10 @@ public class PercolatorService extends AbstractComponent {
throw new ElasticSearchIllegalArgumentException("Can't sort if size isn't specified");
}
if (context.highlight() != null && !context.limit) {
throw new ElasticSearchIllegalArgumentException("Can't highlight if size isn't specified");
}
if (context.size < 0) {
context.size = 0;
}
@ -177,11 +195,11 @@ public class PercolatorService extends AbstractComponent {
PercolatorType action;
if (request.onlyCount()) {
action = context.query != null ? queryCountPercolator : countPercolator;
action = context.percolateQuery() != null ? queryCountPercolator : countPercolator;
} else {
if (context.sort) {
action = topMatchingPercolator;
} else if (context.query != null) {
} else if (context.percolateQuery() != null) {
action = context.score ? scoringPercolator : queryPercolator;
} else {
action = matchPercolator;
@ -189,39 +207,33 @@ public class PercolatorService extends AbstractComponent {
}
context.percolatorTypeId = action.id();
context.docSearcher = memoryIndex.createSearcher();
context.fieldData = percolateIndexService.fieldData();
IndexCache indexCache = percolateIndexService.cache();
try {
return action.doPercolate(request, context);
} finally {
// explicitly clear the reader, since we can only register on callback on SegmentReader
indexCache.clear(context.docSearcher.getIndexReader());
context.fieldData.clear(context.docSearcher.getIndexReader());
}
context.initialize(memoryIndex, parsedDocument);
return action.doPercolate(request, context);
} finally {
memoryIndex.reset();
context.release();
}
} finally {
shardPercolateService.postPercolate(System.nanoTime() - startTime);
}
}
private ParsedDocument parsePercolate(IndexService documentIndexService, PercolateShardRequest request, PercolateContext context) throws ElasticSearchException {
private ParsedDocument parseRequest(IndexService documentIndexService, PercolateShardRequest request, PercolateContext context) throws ElasticSearchException {
BytesReference source = request.source();
if (source == null || source.length() == 0) {
return null;
}
Map<String, ? extends SearchParseElement> hlElements = highlightPhase.parseElements();
ParsedDocument doc = null;
XContentParser parser = null;
// Some queries (function_score query when for decay functions) rely on SearchContext being set:
SearchContext searchContext = new SearchContext(0,
new ShardSearchRequest().types(new String[0]),
null, context.indexShard.searcher(), context.percolateIndexService, context.indexShard,
null, null);
SearchContext.setCurrent(searchContext);
// Some queries (function_score query when for decay functions) rely on a SearchContext being set:
// We switch types because this context needs to be in the context of the percolate queries in the shard and
// not the in memory percolate doc
String[] previousTypes = context.types();
context.types(new String[]{Constants.TYPE_NAME});
SearchContext.setCurrent(context);
try {
parser = XContentFactory.xContent(source).createParser(source);
String currentFieldName = null;
@ -241,17 +253,20 @@ public class PercolatorService extends AbstractComponent {
doc = docMapper.parse(source(parser).type(request.documentType()).flyweight(true));
}
} else if (token == XContentParser.Token.START_OBJECT) {
SearchParseElement element = hlElements.get(currentFieldName);
if ("query".equals(currentFieldName)) {
if (context.query != null) {
if (context.percolateQuery() != null) {
throw new ElasticSearchParseException("Either specify query or filter, not both");
}
context.query = documentIndexService.queryParserService().parse(parser).query();
context.percolateQuery(documentIndexService.queryParserService().parse(parser).query());
} else if ("filter".equals(currentFieldName)) {
if (context.query != null) {
if (context.percolateQuery() != null) {
throw new ElasticSearchParseException("Either specify query or filter, not both");
}
Filter filter = documentIndexService.queryParserService().parseInnerFilter(parser).filter();
context.query = new XConstantScoreQuery(filter);
context.percolateQuery(new XConstantScoreQuery(filter));
} else if (element != null) {
element.parse(parser, context);
}
} else if (token == null) {
break;
@ -269,10 +284,39 @@ public class PercolatorService extends AbstractComponent {
}
}
}
} catch (IOException e) {
// We need to get the actual source from the request body for highlighting, so parse the request body again
// and only get the doc source.
if (context.highlight() != null) {
parser.close();
currentFieldName = null;
parser = XContentFactory.xContent(source).createParser(source);
token = parser.nextToken();
assert token == XContentParser.Token.START_OBJECT;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("doc".equals(currentFieldName)) {
BytesStreamOutput bStream = new BytesStreamOutput();
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.SMILE, bStream);
builder.copyCurrentStructure(parser);
builder.close();
doc.setSource(bStream.bytes());
break;
} else {
parser.skipChildren();
}
} else if (token == null) {
break;
}
}
}
} catch (Throwable e) {
throw new ElasticSearchParseException("failed to parse request", e);
} finally {
searchContext.release();
context.types(previousTypes);
SearchContext.removeCurrent();
if (parser != null) {
parser.close();
@ -290,7 +334,7 @@ public class PercolatorService extends AbstractComponent {
MapperService mapperService = documentIndexService.mapperService();
DocumentMapper docMapper = mapperService.documentMapperWithAutoCreate(type);
doc = docMapper.parse(source(parser).type(type).flyweight(true));
} catch (IOException e) {
} catch (Throwable e) {
throw new ElasticSearchParseException("failed to parse request", e);
} finally {
if (parser != null) {
@ -340,10 +384,10 @@ public class PercolatorService extends AbstractComponent {
public PercolateShardResponse doPercolate(PercolateShardRequest request, PercolateContext context) {
long count = 0;
Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
for (Map.Entry<HashedBytesRef, Query> entry : context.percolateQueries.entrySet()) {
for (Map.Entry<HashedBytesRef, Query> entry : context.percolateQueries().entrySet()) {
collector.reset();
try {
context.docSearcher.search(entry.getValue(), collector);
context.docSearcher().search(entry.getValue(), collector);
} catch (IOException e) {
logger.warn("[" + entry.getKey() + "] failed to execute query", e);
}
@ -372,12 +416,12 @@ public class PercolatorService extends AbstractComponent {
@Override
public PercolateShardResponse doPercolate(PercolateShardRequest request, PercolateContext context) {
long count = 0;
Engine.Searcher percolatorSearcher = context.indexShard.searcher();
Engine.Searcher percolatorSearcher = context.indexShard().searcher();
try {
Count countCollector = count(logger, context);
queryBasedPercolating(percolatorSearcher, context, countCollector);
count = countCollector.counter();
} catch (IOException e) {
} catch (Throwable e) {
logger.warn("failed to execute", e);
} finally {
percolatorSearcher.release();
@ -411,7 +455,8 @@ public class PercolatorService extends AbstractComponent {
for (int i = 0; i < response.matches().length; i++) {
float score = response.scores().length == 0 ? NO_SCORE : response.scores()[i];
Text match = new BytesText(new BytesArray(response.matches()[i]));
finalMatches.add(new PercolateResponse.Match(index, match, score));
Map<String, HighlightField> hl = response.hls().isEmpty() ? null : response.hls().get(i);
finalMatches.add(new PercolateResponse.Match(index, match, score, hl));
if (requestedSize != 0 && finalMatches.size() == requestedSize) {
break outer;
}
@ -424,24 +469,35 @@ public class PercolatorService extends AbstractComponent {
public PercolateShardResponse doPercolate(PercolateShardRequest request, PercolateContext context) {
long count = 0;
List<BytesRef> matches = new ArrayList<BytesRef>();
List<Map<String, HighlightField>> hls = new ArrayList<Map<String, HighlightField>>();
Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
for (Map.Entry<HashedBytesRef, Query> entry : context.percolateQueries.entrySet()) {
for (Map.Entry<HashedBytesRef, Query> entry : context.percolateQueries().entrySet()) {
collector.reset();
if (context.highlight() != null) {
context.parsedQuery(new ParsedQuery(entry.getValue(), ImmutableMap.<String, Filter>of()));
context.hitContext().cache().clear();
}
try {
context.docSearcher.search(entry.getValue(), collector);
} catch (IOException e) {
context.docSearcher().search(entry.getValue(), collector);
} catch (Throwable e) {
logger.warn("[" + entry.getKey() + "] failed to execute query", e);
}
if (collector.exists()) {
if (!context.limit || count < context.size) {
matches.add(entry.getKey().bytes);
if (context.highlight() != null) {
highlightPhase.hitExecute(context, context.hitContext());
hls.add(context.hitContext().hit().getHighlightFields());
}
}
count++;
}
}
return new PercolateShardResponse(matches.toArray(new BytesRef[0]), count, context, request.index(), request.shardId());
BytesRef[] finalMatches = matches.toArray(new BytesRef[matches.size()]);
return new PercolateShardResponse(finalMatches, hls, count, context, request.index(), request.shardId());
}
};
@ -459,16 +515,19 @@ public class PercolatorService extends AbstractComponent {
@Override
public PercolateShardResponse doPercolate(PercolateShardRequest request, PercolateContext context) {
Engine.Searcher percolatorSearcher = context.indexShard.searcher();
Engine.Searcher percolatorSearcher = context.indexShard().searcher();
try {
Match match = match(logger, context);
Match match = match(logger, context, highlightPhase);
queryBasedPercolating(percolatorSearcher, context, match);
List<BytesRef> matches = match.matches();
List<Map<String, HighlightField>> hls = match.hls();
long count = match.counter();
return new PercolateShardResponse(matches.toArray(new BytesRef[0]), count, context, request.index(), request.shardId());
} catch (IOException e) {
BytesRef[] finalMatches = matches.toArray(new BytesRef[matches.size()]);
return new PercolateShardResponse(finalMatches, hls, count, context, request.index(), request.shardId());
} catch (Throwable e) {
logger.debug("failed to execute", e);
throw new PercolateException(context.indexShard.shardId(), "failed to execute", e);
throw new PercolateException(context.indexShard().shardId(), "failed to execute", e);
} finally {
percolatorSearcher.release();
}
@ -489,17 +548,20 @@ public class PercolatorService extends AbstractComponent {
@Override
public PercolateShardResponse doPercolate(PercolateShardRequest request, PercolateContext context) {
Engine.Searcher percolatorSearcher = context.indexShard.searcher();
Engine.Searcher percolatorSearcher = context.indexShard().searcher();
try {
MatchAndScore matchAndScore = matchAndScore(logger, context);
MatchAndScore matchAndScore = matchAndScore(logger, context, highlightPhase);
queryBasedPercolating(percolatorSearcher, context, matchAndScore);
BytesRef[] matches = matchAndScore.matches().toArray(new BytesRef[0]);
List<BytesRef> matches = matchAndScore.matches();
List<Map<String, HighlightField>> hls = matchAndScore.hls();
float[] scores = matchAndScore.scores().toArray();
long count = matchAndScore.counter();
return new PercolateShardResponse(matches, count, scores, context, request.index(), request.shardId());
} catch (IOException e) {
BytesRef[] finalMatches = matches.toArray(new BytesRef[matches.size()]);
return new PercolateShardResponse(finalMatches, hls, count, scores, context, request.index(), request.shardId());
} catch (Throwable e) {
logger.debug("failed to execute", e);
throw new PercolateException(context.indexShard.shardId(), "failed to execute", e);
throw new PercolateException(context.indexShard().shardId(), "failed to execute", e);
} finally {
percolatorSearcher.release();
}
@ -539,7 +601,12 @@ public class PercolatorService extends AbstractComponent {
for (int i = 0; i < response.matches().length; i++) {
float score = response.scores().length == 0 ? Float.NaN : response.scores()[i];
Text match = new BytesText(new BytesArray(response.matches()[i]));
finalMatches.add(new PercolateResponse.Match(index, match, score));
if (!response.hls().isEmpty()) {
Map<String, HighlightField> hl = response.hls().get(i);
finalMatches.add(new PercolateResponse.Match(index, match, score, hl));
} else {
finalMatches.add(new PercolateResponse.Match(index, match, score));
}
}
} else {
int[] slots = new int[shardResults.size()];
@ -575,7 +642,12 @@ public class PercolatorService extends AbstractComponent {
Text index = new StringText(shardResponse.getIndex());
Text match = new BytesText(new BytesArray(shardResponse.matches()[itemIndex]));
float score = shardResponse.scores()[itemIndex];
finalMatches.add(new PercolateResponse.Match(index, match, score));
if (!shardResponse.hls().isEmpty()) {
Map<String, HighlightField> hl = shardResponse.hls().get(itemIndex);
finalMatches.add(new PercolateResponse.Match(index, match, score, hl));
} else {
finalMatches.add(new PercolateResponse.Match(index, match, score));
}
if (finalMatches.size() == requestedSize) {
break;
}
@ -586,7 +658,7 @@ public class PercolatorService extends AbstractComponent {
@Override
public PercolateShardResponse doPercolate(PercolateShardRequest request, PercolateContext context) {
Engine.Searcher percolatorSearcher = context.indexShard.searcher();
Engine.Searcher percolatorSearcher = context.indexShard().searcher();
try {
MatchAndSort matchAndSort = QueryCollector.matchAndSort(logger, context);
queryBasedPercolating(percolatorSearcher, context, matchAndSort);
@ -594,24 +666,40 @@ public class PercolatorService extends AbstractComponent {
long count = topDocs.totalHits;
List<BytesRef> matches = new ArrayList<BytesRef>(topDocs.scoreDocs.length);
float[] scores = new float[topDocs.scoreDocs.length];
List<Map<String, HighlightField>> hls = null;
if (context.highlight() != null) {
hls = new ArrayList<Map<String, HighlightField>>(topDocs.scoreDocs.length);
}
IndexFieldData idFieldData = context.fieldData.getForField(
IndexFieldData idFieldData = context.fieldData().getForField(
new FieldMapper.Names(IdFieldMapper.NAME),
new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes"))
);
int i = 0;
final HashedBytesRef spare = new HashedBytesRef(new BytesRef());
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
int segmentIdx = ReaderUtil.subIndex(scoreDoc.doc, percolatorSearcher.reader().leaves());
AtomicReaderContext atomicReaderContext = percolatorSearcher.reader().leaves().get(segmentIdx);
BytesValues values = idFieldData.load(atomicReaderContext).getBytesValues();
BytesRef id = values.getValue(scoreDoc.doc - atomicReaderContext.docBase);
matches.add(values.makeSafe(id));
spare.hash = values.getValueHashed(scoreDoc.doc - atomicReaderContext.docBase, spare.bytes);
matches.add(values.makeSafe(spare.bytes));
if (hls != null) {
Query query = context.percolateQueries().get(spare);
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
context.hitContext().cache().clear();
highlightPhase.hitExecute(context, context.hitContext());
hls.add(i, context.hitContext().hit().getHighlightFields());
}
scores[i++] = scoreDoc.score;
}
return new PercolateShardResponse(matches.toArray(new BytesRef[matches.size()]), count, scores, context, request.index(), request.shardId());
} catch (Exception e) {
if (hls != null) {
return new PercolateShardResponse(matches.toArray(new BytesRef[matches.size()]), hls, count, scores, context, request.index(), request.shardId());
} else {
return new PercolateShardResponse(matches.toArray(new BytesRef[matches.size()]), count, scores, context, request.index(), request.shardId());
}
} catch (Throwable e) {
logger.debug("failed to execute", e);
throw new PercolateException(context.indexShard.shardId(), "failed to execute", e);
throw new PercolateException(context.indexShard().shardId(), "failed to execute", e);
} finally {
percolatorSearcher.release();
}
@ -620,29 +708,12 @@ public class PercolatorService extends AbstractComponent {
};
private static void queryBasedPercolating(Engine.Searcher percolatorSearcher, PercolateContext context, Collector collector) throws IOException {
Filter percolatorTypeFilter = context.percolateIndexService.mapperService().documentMapper(Constants.TYPE_NAME).typeFilter();
percolatorTypeFilter = context.percolateIndexService.cache().filter().cache(percolatorTypeFilter);
FilteredQuery query = new FilteredQuery(context.query, percolatorTypeFilter);
Filter percolatorTypeFilter = context.indexService().mapperService().documentMapper(Constants.TYPE_NAME).typeFilter();
percolatorTypeFilter = context.indexService().cache().filter().cache(percolatorTypeFilter);
FilteredQuery query = new FilteredQuery(context.percolateQuery(), percolatorTypeFilter);
percolatorSearcher.searcher().search(query, collector);
}
public class PercolateContext {
public boolean limit;
public int size;
public boolean score;
public boolean sort;
public byte percolatorTypeId;
Query query;
ConcurrentMap<HashedBytesRef, Query> percolateQueries;
IndexSearcher docSearcher;
IndexShard indexShard;
IndexFieldDataService fieldData;
IndexService percolateIndexService;
}
public final static class ReduceResult {
private final long count;

View File

@ -1,5 +1,25 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import com.google.common.collect.ImmutableMap;
import gnu.trove.list.array.TFloatArrayList;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.*;
@ -13,10 +33,14 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.search.highlight.HighlightField;
import org.elasticsearch.search.highlight.HighlightPhase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
/**
@ -33,11 +57,11 @@ abstract class QueryCollector extends Collector {
BytesValues values;
QueryCollector(ESLogger logger, PercolatorService.PercolateContext context) {
QueryCollector(ESLogger logger, PercolateContext context) {
this.logger = logger;
this.queries = context.percolateQueries;
this.searcher = context.docSearcher;
this.idFieldData = context.fieldData.getForField(
this.queries = context.percolateQueries();
this.searcher = context.docSearcher();
this.idFieldData = context.fieldData().getForField(
new FieldMapper.Names(IdFieldMapper.NAME),
new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes"))
);
@ -59,33 +83,39 @@ abstract class QueryCollector extends Collector {
}
static Match match(ESLogger logger, PercolatorService.PercolateContext context) {
return new Match(logger, context);
static Match match(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
return new Match(logger, context, highlightPhase);
}
static Count count(ESLogger logger, PercolatorService.PercolateContext context) {
static Count count(ESLogger logger, PercolateContext context) {
return new Count(logger, context);
}
static MatchAndScore matchAndScore(ESLogger logger, PercolatorService.PercolateContext context) {
return new MatchAndScore(logger, context);
static MatchAndScore matchAndScore(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
return new MatchAndScore(logger, context, highlightPhase);
}
static MatchAndSort matchAndSort(ESLogger logger, PercolatorService.PercolateContext context) {
static MatchAndSort matchAndSort(ESLogger logger, PercolateContext context) {
return new MatchAndSort(logger, context);
}
final static class Match extends QueryCollector {
private final List<BytesRef> matches = new ArrayList<BytesRef>();
private final boolean limit;
private final int size;
private long counter = 0;
final PercolateContext context;
final HighlightPhase highlightPhase;
Match(ESLogger logger, PercolatorService.PercolateContext context) {
final List<BytesRef> matches = new ArrayList<BytesRef>();
final List<Map<String, HighlightField>> hls = new ArrayList<Map<String, HighlightField>>();
final boolean limit;
final int size;
long counter = 0;
Match(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
super(logger, context);
this.limit = context.limit;
this.size = context.size;
this.context = context;
this.highlightPhase = highlightPhase;
}
@Override
@ -99,10 +129,19 @@ abstract class QueryCollector extends Collector {
// run the query
try {
collector.reset();
if (context.highlight() != null) {
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
context.hitContext().cache().clear();
}
searcher.search(query, collector);
if (collector.exists()) {
if (!limit || counter < size) {
matches.add(values.makeSafe(spare.bytes));
if (context.highlight() != null) {
highlightPhase.hitExecute(context, context.hitContext());
hls.add(context.hitContext().hit().getHighlightFields());
}
}
counter++;
}
@ -119,13 +158,16 @@ abstract class QueryCollector extends Collector {
return matches;
}
List<Map<String, HighlightField>> hls() {
return hls;
}
}
final static class MatchAndSort extends QueryCollector {
private final TopScoreDocCollector topDocsCollector;
MatchAndSort(ESLogger logger, PercolatorService.PercolateContext context) {
MatchAndSort(ESLogger logger, PercolateContext context) {
super(logger, context);
// TODO: Use TopFieldCollector.create(...) for ascending and decending scoring?
topDocsCollector = TopScoreDocCollector.create(context.size, false);
@ -170,19 +212,25 @@ abstract class QueryCollector extends Collector {
final static class MatchAndScore extends QueryCollector {
private final List<BytesRef> matches = new ArrayList<BytesRef>();
final PercolateContext context;
final HighlightPhase highlightPhase;
final List<BytesRef> matches = new ArrayList<BytesRef>();
final List<Map<String, HighlightField>> hls = new ArrayList<Map<String, HighlightField>>();
// TODO: Use thread local in order to cache the scores lists?
private final TFloatArrayList scores = new TFloatArrayList();
private final boolean limit;
private final int size;
private long counter = 0;
final TFloatArrayList scores = new TFloatArrayList();
final boolean limit;
final int size;
long counter = 0;
private Scorer scorer;
MatchAndScore(ESLogger logger, PercolatorService.PercolateContext context) {
MatchAndScore(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
super(logger, context);
this.limit = context.limit;
this.size = context.size;
this.context = context;
this.highlightPhase = highlightPhase;
}
@Override
@ -196,11 +244,19 @@ abstract class QueryCollector extends Collector {
// run the query
try {
collector.reset();
if (context.highlight() != null) {
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
context.hitContext().cache().clear();
}
searcher.search(query, collector);
if (collector.exists()) {
if (!limit || counter < size) {
matches.add(values.makeSafe(spare.bytes));
scores.add(scorer.score());
if (context.highlight() != null) {
highlightPhase.hitExecute(context, context.hitContext());
hls.add(context.hitContext().hit().getHighlightFields());
}
}
counter++;
}
@ -225,13 +281,17 @@ abstract class QueryCollector extends Collector {
TFloatArrayList scores() {
return scores;
}
List<Map<String, HighlightField>> hls() {
return hls;
}
}
final static class Count extends QueryCollector {
private long counter = 0;
Count(ESLogger logger, PercolatorService.PercolateContext context) {
Count(ESLogger logger, PercolateContext context) {
super(logger, context);
}

View File

@ -52,6 +52,7 @@ import org.elasticsearch.search.dfs.CachedDfSource;
import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.*;
import org.elasticsearch.search.internal.DefaultSearchContext;
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
@ -480,7 +481,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId());
Engine.Searcher engineSearcher = searcher == null ? indexShard.searcher() : searcher;
SearchContext context = new SearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, cacheRecycler);
SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, cacheRecycler);
SearchContext.setCurrent(context);
try {
context.scroll(request.scroll());

View File

@ -0,0 +1,648 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.internal;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.search.AndFilter;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.lucene.search.function.BoostScoreFunction;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.docset.DocSetCache;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.index.cache.id.IdCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedFilter;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.facet.SearchContextFacets;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.partial.PartialFieldsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.scan.ScanContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.ArrayList;
import java.util.List;
/**
*
*/
public class DefaultSearchContext extends SearchContext {
private final long id;
private final ShardSearchRequest request;
private final SearchShardTarget shardTarget;
private SearchType searchType;
private final Engine.Searcher engineSearcher;
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final IndexShard indexShard;
private final IndexService indexService;
private final ContextIndexSearcher searcher;
private final DfsSearchResult dfsResult;
private final QuerySearchResult queryResult;
private final FetchSearchResult fetchResult;
// lazy initialized only if needed
private ScanContext scanContext;
private float queryBoost = 1.0f;
// timeout in millis
private long timeoutInMillis = -1;
private List<String> groupStats;
private Scroll scroll;
private boolean explain;
private boolean version = false; // by default, we don't return versions
private List<String> fieldNames;
private ScriptFieldsContext scriptFields;
private PartialFieldsContext partialFields;
private FetchSourceContext fetchSourceContext;
private int from = -1;
private int size = -1;
private Sort sort;
private Float minimumScore;
private boolean trackScores = false; // when sorting, track scores as well...
private ParsedQuery originalQuery;
private Query query;
private ParsedFilter filter;
private Filter aliasFilter;
private int[] docIdsToLoad;
private int docsIdsToLoadFrom;
private int docsIdsToLoadSize;
private SearchContextFacets facets;
private SearchContextHighlight highlight;
private SuggestionSearchContext suggest;
private RescoreSearchContext rescore;
private SearchLookup searchLookup;
private boolean queryRewritten;
private volatile long keepAlive;
private volatile long lastAccessTime;
private List<SearchContext.Rewrite> rewrites = null;
public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard,
ScriptService scriptService, CacheRecycler cacheRecycler) {
this.id = id;
this.request = request;
this.searchType = request.searchType();
this.shardTarget = shardTarget;
this.engineSearcher = engineSearcher;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.dfsResult = new DfsSearchResult(id, shardTarget);
this.queryResult = new QuerySearchResult(id, shardTarget);
this.fetchResult = new FetchSearchResult(id, shardTarget);
this.indexShard = indexShard;
this.indexService = indexService;
this.searcher = new ContextIndexSearcher(this, engineSearcher);
// initialize the filtering alias based on the provided filters
aliasFilter = indexService.aliasesService().aliasFilter(request.filteringAliases());
}
@Override
public boolean release() throws ElasticSearchException {
if (scanContext != null) {
scanContext.clear();
}
// clear and scope phase we have
if (rewrites != null) {
for (SearchContext.Rewrite rewrite : rewrites) {
rewrite.contextClear();
}
}
searcher.release();
engineSearcher.release();
return true;
}
/**
* Should be called before executing the main query and after all other parameters have been set.
*/
public void preProcess() {
if (query() == null) {
parsedQuery(ParsedQuery.parsedMatchAllQuery());
}
if (queryBoost() != 1.0f) {
parsedQuery(new ParsedQuery(new FunctionScoreQuery(query(), new BoostScoreFunction(queryBoost)), parsedQuery()));
}
Filter searchFilter = searchFilter(types());
if (searchFilter != null) {
if (Queries.isConstantMatchAllQuery(query())) {
Query q = new XConstantScoreQuery(searchFilter);
q.setBoost(query().getBoost());
parsedQuery(new ParsedQuery(q, parsedQuery()));
} else {
parsedQuery(new ParsedQuery(new XFilteredQuery(query(), searchFilter), parsedQuery()));
}
}
}
public Filter searchFilter(String[] types) {
Filter filter = mapperService().searchFilter(types);
if (filter == null) {
return aliasFilter;
} else {
filter = filterCache().cache(filter);
if (aliasFilter != null) {
return new AndFilter(ImmutableList.of(filter, aliasFilter));
}
return filter;
}
}
public long id() {
return this.id;
}
public ShardSearchRequest request() {
return this.request;
}
public SearchType searchType() {
return this.searchType;
}
public SearchContext searchType(SearchType searchType) {
this.searchType = searchType;
return this;
}
public SearchShardTarget shardTarget() {
return this.shardTarget;
}
public int numberOfShards() {
return request.numberOfShards();
}
public boolean hasTypes() {
return request.types() != null && request.types().length > 0;
}
public String[] types() {
return request.types();
}
public float queryBoost() {
return queryBoost;
}
public SearchContext queryBoost(float queryBoost) {
this.queryBoost = queryBoost;
return this;
}
public long nowInMillis() {
return request.nowInMillis();
}
public Scroll scroll() {
return this.scroll;
}
public SearchContext scroll(Scroll scroll) {
this.scroll = scroll;
return this;
}
public SearchContextFacets facets() {
return facets;
}
public SearchContext facets(SearchContextFacets facets) {
this.facets = facets;
return this;
}
public SearchContextHighlight highlight() {
return highlight;
}
public void highlight(SearchContextHighlight highlight) {
this.highlight = highlight;
}
public SuggestionSearchContext suggest() {
return suggest;
}
public void suggest(SuggestionSearchContext suggest) {
this.suggest = suggest;
}
public RescoreSearchContext rescore() {
return this.rescore;
}
public void rescore(RescoreSearchContext rescore) {
this.rescore = rescore;
}
public boolean hasScriptFields() {
return scriptFields != null;
}
public ScriptFieldsContext scriptFields() {
if (scriptFields == null) {
scriptFields = new ScriptFieldsContext();
}
return this.scriptFields;
}
public boolean hasPartialFields() {
return partialFields != null;
}
public PartialFieldsContext partialFields() {
if (partialFields == null) {
partialFields = new PartialFieldsContext();
}
return this.partialFields;
}
/**
* A shortcut function to see whether there is a fetchSourceContext and it says the source is requested.
*
* @return
*/
public boolean sourceRequested() {
return fetchSourceContext != null && fetchSourceContext.fetchSource();
}
public boolean hasFetchSourceContext() {
return fetchSourceContext != null;
}
public FetchSourceContext fetchSourceContext() {
return this.fetchSourceContext;
}
public SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext) {
this.fetchSourceContext = fetchSourceContext;
return this;
}
public ContextIndexSearcher searcher() {
return this.searcher;
}
public IndexShard indexShard() {
return this.indexShard;
}
public MapperService mapperService() {
return indexService.mapperService();
}
public AnalysisService analysisService() {
return indexService.analysisService();
}
public IndexQueryParserService queryParserService() {
return indexService.queryParserService();
}
public SimilarityService similarityService() {
return indexService.similarityService();
}
public ScriptService scriptService() {
return scriptService;
}
public CacheRecycler cacheRecycler() {
return cacheRecycler;
}
public FilterCache filterCache() {
return indexService.cache().filter();
}
public DocSetCache docSetCache() {
return indexService.cache().docSet();
}
public IndexFieldDataService fieldData() {
return indexService.fieldData();
}
public IdCache idCache() {
return indexService.cache().idCache();
}
public long timeoutInMillis() {
return timeoutInMillis;
}
public void timeoutInMillis(long timeoutInMillis) {
this.timeoutInMillis = timeoutInMillis;
}
public SearchContext minimumScore(float minimumScore) {
this.minimumScore = minimumScore;
return this;
}
public Float minimumScore() {
return this.minimumScore;
}
public SearchContext sort(Sort sort) {
this.sort = sort;
return this;
}
public Sort sort() {
return this.sort;
}
public SearchContext trackScores(boolean trackScores) {
this.trackScores = trackScores;
return this;
}
public boolean trackScores() {
return this.trackScores;
}
public SearchContext parsedFilter(ParsedFilter filter) {
this.filter = filter;
return this;
}
public ParsedFilter parsedFilter() {
return this.filter;
}
public Filter aliasFilter() {
return aliasFilter;
}
public SearchContext parsedQuery(ParsedQuery query) {
queryRewritten = false;
this.originalQuery = query;
this.query = query.query();
return this;
}
public ParsedQuery parsedQuery() {
return this.originalQuery;
}
/**
* The query to execute, might be rewritten.
*/
public Query query() {
return this.query;
}
/**
* Has the query been rewritten already?
*/
public boolean queryRewritten() {
return queryRewritten;
}
/**
* Rewrites the query and updates it. Only happens once.
*/
public SearchContext updateRewriteQuery(Query rewriteQuery) {
query = rewriteQuery;
queryRewritten = true;
return this;
}
public int from() {
return from;
}
public SearchContext from(int from) {
this.from = from;
return this;
}
public int size() {
return size;
}
public SearchContext size(int size) {
this.size = size;
return this;
}
public boolean hasFieldNames() {
return fieldNames != null;
}
public List<String> fieldNames() {
if (fieldNames == null) {
fieldNames = Lists.newArrayList();
}
return fieldNames;
}
public void emptyFieldNames() {
this.fieldNames = ImmutableList.of();
}
public boolean explain() {
return explain;
}
public void explain(boolean explain) {
this.explain = explain;
}
@Nullable
public List<String> groupStats() {
return this.groupStats;
}
public void groupStats(List<String> groupStats) {
this.groupStats = groupStats;
}
public boolean version() {
return version;
}
public void version(boolean version) {
this.version = version;
}
public int[] docIdsToLoad() {
return docIdsToLoad;
}
public int docIdsToLoadFrom() {
return docsIdsToLoadFrom;
}
public int docIdsToLoadSize() {
return docsIdsToLoadSize;
}
public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) {
this.docIdsToLoad = docIdsToLoad;
this.docsIdsToLoadFrom = docsIdsToLoadFrom;
this.docsIdsToLoadSize = docsIdsToLoadSize;
return this;
}
public void accessed(long accessTime) {
this.lastAccessTime = accessTime;
}
public long lastAccessTime() {
return this.lastAccessTime;
}
public long keepAlive() {
return this.keepAlive;
}
public void keepAlive(long keepAlive) {
this.keepAlive = keepAlive;
}
public SearchLookup lookup() {
// TODO: The types should take into account the parsing context in QueryParserContext...
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService(), fieldData(), request.types());
}
return searchLookup;
}
public DfsSearchResult dfsResult() {
return dfsResult;
}
public QuerySearchResult queryResult() {
return queryResult;
}
public FetchSearchResult fetchResult() {
return fetchResult;
}
public void addRewrite(Rewrite rewrite) {
if (this.rewrites == null) {
this.rewrites = new ArrayList<Rewrite>();
}
this.rewrites.add(rewrite);
}
public List<Rewrite> rewrites() {
return this.rewrites;
}
public ScanContext scanContext() {
if (scanContext == null) {
scanContext = new ScanContext();
}
return scanContext;
}
public MapperService.SmartNameFieldMappers smartFieldMappers(String name) {
return mapperService().smartName(name, request.types());
}
public FieldMappers smartNameFieldMappers(String name) {
return mapperService().smartNameFieldMappers(name, request.types());
}
public FieldMapper smartNameFieldMapper(String name) {
return mapperService().smartNameFieldMapper(name, request.types());
}
public MapperService.SmartNameObjectMapper smartNameObjectMapper(String name) {
return mapperService().smartNameObjectMapper(name, request.types());
}
}

View File

@ -1,45 +1,34 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.elasticsearch.search.internal;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.search.AndFilter;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.lucene.search.function.BoostScoreFunction;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.docset.DocSetCache;
import org.elasticsearch.index.cache.filter.FilterCache;
import org.elasticsearch.index.cache.id.IdCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
@ -48,7 +37,6 @@ import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedFilter;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
@ -67,13 +55,11 @@ import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.scan.ScanContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.ArrayList;
import java.util.List;
/**
*
*/
public class SearchContext implements Releasable {
public abstract class SearchContext implements Releasable {
private static ThreadLocal<SearchContext> current = new ThreadLocal<SearchContext>();
@ -100,576 +86,204 @@ public class SearchContext implements Releasable {
void contextClear();
}
private final long id;
private final ShardSearchRequest request;
private final SearchShardTarget shardTarget;
private SearchType searchType;
private final Engine.Searcher engineSearcher;
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final IndexShard indexShard;
private final IndexService indexService;
private final ContextIndexSearcher searcher;
private final DfsSearchResult dfsResult;
private final QuerySearchResult queryResult;
private final FetchSearchResult fetchResult;
// lazy initialized only if needed
private ScanContext scanContext;
private float queryBoost = 1.0f;
// timeout in millis
private long timeoutInMillis = -1;
private List<String> groupStats;
private Scroll scroll;
private boolean explain;
private boolean version = false; // by default, we don't return versions
private List<String> fieldNames;
private ScriptFieldsContext scriptFields;
private PartialFieldsContext partialFields;
private FetchSourceContext fetchSourceContext;
private int from = -1;
private int size = -1;
private Sort sort;
private Float minimumScore;
private boolean trackScores = false; // when sorting, track scores as well...
private ParsedQuery originalQuery;
private Query query;
private ParsedFilter filter;
private Filter aliasFilter;
private int[] docIdsToLoad;
private int docsIdsToLoadFrom;
private int docsIdsToLoadSize;
private SearchContextFacets facets;
private SearchContextHighlight highlight;
private SuggestionSearchContext suggest;
private RescoreSearchContext rescore;
private SearchLookup searchLookup;
private boolean queryRewritten;
private volatile long keepAlive;
private volatile long lastAccessTime;
private List<Rewrite> rewrites = null;
public SearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard,
ScriptService scriptService, CacheRecycler cacheRecycler) {
this.id = id;
this.request = request;
this.searchType = request.searchType();
this.shardTarget = shardTarget;
this.engineSearcher = engineSearcher;
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
this.dfsResult = new DfsSearchResult(id, shardTarget);
this.queryResult = new QuerySearchResult(id, shardTarget);
this.fetchResult = new FetchSearchResult(id, shardTarget);
this.indexShard = indexShard;
this.indexService = indexService;
this.searcher = new ContextIndexSearcher(this, engineSearcher);
// initialize the filtering alias based on the provided filters
aliasFilter = indexService.aliasesService().aliasFilter(request.filteringAliases());
}
@Override
public boolean release() throws ElasticSearchException {
if (scanContext != null) {
scanContext.clear();
}
// clear and scope phase we have
if (rewrites != null) {
for (Rewrite rewrite : rewrites) {
rewrite.contextClear();
}
}
searcher.release();
engineSearcher.release();
return true;
}
/**
* Should be called before executing the main query and after all other parameters have been set.
*/
public void preProcess() {
if (query() == null) {
parsedQuery(ParsedQuery.parsedMatchAllQuery());
}
if (queryBoost() != 1.0f) {
parsedQuery(new ParsedQuery(new FunctionScoreQuery(query(), new BoostScoreFunction(queryBoost)), parsedQuery()));
}
Filter searchFilter = searchFilter(types());
if (searchFilter != null) {
if (Queries.isConstantMatchAllQuery(query())) {
Query q = new XConstantScoreQuery(searchFilter);
q.setBoost(query().getBoost());
parsedQuery(new ParsedQuery(q, parsedQuery()));
} else {
parsedQuery(new ParsedQuery(new XFilteredQuery(query(), searchFilter), parsedQuery()));
}
}
}
public abstract void preProcess();
public Filter searchFilter(String[] types) {
Filter filter = mapperService().searchFilter(types);
if (filter == null) {
return aliasFilter;
} else {
filter = filterCache().cache(filter);
if (aliasFilter != null) {
return new AndFilter(ImmutableList.of(filter, aliasFilter));
}
return filter;
}
}
public abstract Filter searchFilter(String[] types);
public long id() {
return this.id;
}
public abstract long id();
public ShardSearchRequest request() {
return this.request;
}
public abstract ShardSearchRequest request();
public SearchType searchType() {
return this.searchType;
}
public abstract SearchType searchType();
public SearchContext searchType(SearchType searchType) {
this.searchType = searchType;
return this;
}
public abstract SearchContext searchType(SearchType searchType);
public SearchShardTarget shardTarget() {
return this.shardTarget;
}
public abstract SearchShardTarget shardTarget();
public int numberOfShards() {
return request.numberOfShards();
}
public abstract int numberOfShards();
public boolean hasTypes() {
return request.types() != null && request.types().length > 0;
}
public abstract boolean hasTypes();
public String[] types() {
return request.types();
}
public abstract String[] types();
public float queryBoost() {
return queryBoost;
}
public abstract float queryBoost();
public SearchContext queryBoost(float queryBoost) {
this.queryBoost = queryBoost;
return this;
}
public abstract SearchContext queryBoost(float queryBoost);
public long nowInMillis() {
return request.nowInMillis();
}
public abstract long nowInMillis();
public Scroll scroll() {
return this.scroll;
}
public abstract Scroll scroll();
public SearchContext scroll(Scroll scroll) {
this.scroll = scroll;
return this;
}
public abstract SearchContext scroll(Scroll scroll);
public SearchContextFacets facets() {
return facets;
}
public abstract SearchContextFacets facets();
public SearchContext facets(SearchContextFacets facets) {
this.facets = facets;
return this;
}
public abstract SearchContext facets(SearchContextFacets facets);
public SearchContextHighlight highlight() {
return highlight;
}
public abstract SearchContextHighlight highlight();
public void highlight(SearchContextHighlight highlight) {
this.highlight = highlight;
}
public abstract void highlight(SearchContextHighlight highlight);
public SuggestionSearchContext suggest() {
return suggest;
}
public abstract SuggestionSearchContext suggest();
public void suggest(SuggestionSearchContext suggest) {
this.suggest = suggest;
}
public abstract void suggest(SuggestionSearchContext suggest);
public RescoreSearchContext rescore() {
return this.rescore;
}
public abstract RescoreSearchContext rescore();
public void rescore(RescoreSearchContext rescore) {
this.rescore = rescore;
}
public abstract void rescore(RescoreSearchContext rescore);
public boolean hasScriptFields() {
return scriptFields != null;
}
public abstract boolean hasScriptFields();
public ScriptFieldsContext scriptFields() {
if (scriptFields == null) {
scriptFields = new ScriptFieldsContext();
}
return this.scriptFields;
}
public abstract ScriptFieldsContext scriptFields();
public boolean hasPartialFields() {
return partialFields != null;
}
public abstract boolean hasPartialFields();
public PartialFieldsContext partialFields() {
if (partialFields == null) {
partialFields = new PartialFieldsContext();
}
return this.partialFields;
}
public abstract PartialFieldsContext partialFields();
/**
* A shortcut function to see whether there is a fetchSourceContext and it says the source is requested.
*
* @return
*/
public boolean sourceRequested() {
return fetchSourceContext != null && fetchSourceContext.fetchSource();
}
public abstract boolean sourceRequested();
public boolean hasFetchSourceContext() {
return fetchSourceContext != null;
}
public abstract boolean hasFetchSourceContext();
public FetchSourceContext fetchSourceContext() {
return this.fetchSourceContext;
}
public abstract FetchSourceContext fetchSourceContext();
public SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext) {
this.fetchSourceContext = fetchSourceContext;
return this;
}
public abstract SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext);
public ContextIndexSearcher searcher() {
return this.searcher;
}
public abstract ContextIndexSearcher searcher();
public IndexShard indexShard() {
return this.indexShard;
}
public abstract IndexShard indexShard();
public MapperService mapperService() {
return indexService.mapperService();
}
public abstract MapperService mapperService();
public AnalysisService analysisService() {
return indexService.analysisService();
}
public abstract AnalysisService analysisService();
public IndexQueryParserService queryParserService() {
return indexService.queryParserService();
}
public abstract IndexQueryParserService queryParserService();
public SimilarityService similarityService() {
return indexService.similarityService();
}
public abstract SimilarityService similarityService();
public ScriptService scriptService() {
return scriptService;
}
public abstract ScriptService scriptService();
public CacheRecycler cacheRecycler() {
return cacheRecycler;
}
public abstract CacheRecycler cacheRecycler();
public FilterCache filterCache() {
return indexService.cache().filter();
}
public abstract FilterCache filterCache();
public DocSetCache docSetCache() {
return indexService.cache().docSet();
}
public abstract DocSetCache docSetCache();
public IndexFieldDataService fieldData() {
return indexService.fieldData();
}
public abstract IndexFieldDataService fieldData();
public IdCache idCache() {
return indexService.cache().idCache();
}
public abstract IdCache idCache();
public long timeoutInMillis() {
return timeoutInMillis;
}
public abstract long timeoutInMillis();
public void timeoutInMillis(long timeoutInMillis) {
this.timeoutInMillis = timeoutInMillis;
}
public abstract void timeoutInMillis(long timeoutInMillis);
public SearchContext minimumScore(float minimumScore) {
this.minimumScore = minimumScore;
return this;
}
public abstract SearchContext minimumScore(float minimumScore);
public Float minimumScore() {
return this.minimumScore;
}
public abstract Float minimumScore();
public SearchContext sort(Sort sort) {
this.sort = sort;
return this;
}
public abstract SearchContext sort(Sort sort);
public Sort sort() {
return this.sort;
}
public abstract Sort sort();
public SearchContext trackScores(boolean trackScores) {
this.trackScores = trackScores;
return this;
}
public abstract SearchContext trackScores(boolean trackScores);
public boolean trackScores() {
return this.trackScores;
}
public abstract boolean trackScores();
public SearchContext parsedFilter(ParsedFilter filter) {
this.filter = filter;
return this;
}
public abstract SearchContext parsedFilter(ParsedFilter filter);
public ParsedFilter parsedFilter() {
return this.filter;
}
public abstract ParsedFilter parsedFilter();
public Filter aliasFilter() {
return aliasFilter;
}
public abstract Filter aliasFilter();
public SearchContext parsedQuery(ParsedQuery query) {
queryRewritten = false;
this.originalQuery = query;
this.query = query.query();
return this;
}
public abstract SearchContext parsedQuery(ParsedQuery query);
public ParsedQuery parsedQuery() {
return this.originalQuery;
}
public abstract ParsedQuery parsedQuery();
/**
* The query to execute, might be rewritten.
*/
public Query query() {
return this.query;
}
public abstract Query query();
/**
* Has the query been rewritten already?
*/
public boolean queryRewritten() {
return queryRewritten;
}
public abstract boolean queryRewritten();
/**
* Rewrites the query and updates it. Only happens once.
*/
public SearchContext updateRewriteQuery(Query rewriteQuery) {
query = rewriteQuery;
queryRewritten = true;
return this;
}
public abstract SearchContext updateRewriteQuery(Query rewriteQuery);
public int from() {
return from;
}
public abstract int from();
public SearchContext from(int from) {
this.from = from;
return this;
}
public abstract SearchContext from(int from);
public int size() {
return size;
}
public abstract int size();
public SearchContext size(int size) {
this.size = size;
return this;
}
public abstract SearchContext size(int size);
public boolean hasFieldNames() {
return fieldNames != null;
}
public abstract boolean hasFieldNames();
public List<String> fieldNames() {
if (fieldNames == null) {
fieldNames = Lists.newArrayList();
}
return fieldNames;
}
public abstract List<String> fieldNames();
public void emptyFieldNames() {
this.fieldNames = ImmutableList.of();
}
public abstract void emptyFieldNames();
public boolean explain() {
return explain;
}
public abstract boolean explain();
public void explain(boolean explain) {
this.explain = explain;
}
public abstract void explain(boolean explain);
@Nullable
public List<String> groupStats() {
return this.groupStats;
}
public abstract List<String> groupStats();
public void groupStats(List<String> groupStats) {
this.groupStats = groupStats;
}
public abstract void groupStats(List<String> groupStats);
public boolean version() {
return version;
}
public abstract boolean version();
public void version(boolean version) {
this.version = version;
}
public abstract void version(boolean version);
public int[] docIdsToLoad() {
return docIdsToLoad;
}
public abstract int[] docIdsToLoad();
public int docIdsToLoadFrom() {
return docsIdsToLoadFrom;
}
public abstract int docIdsToLoadFrom();
public int docIdsToLoadSize() {
return docsIdsToLoadSize;
}
public abstract int docIdsToLoadSize();
public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) {
this.docIdsToLoad = docIdsToLoad;
this.docsIdsToLoadFrom = docsIdsToLoadFrom;
this.docsIdsToLoadSize = docsIdsToLoadSize;
return this;
}
public abstract SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize);
public void accessed(long accessTime) {
this.lastAccessTime = accessTime;
}
public abstract void accessed(long accessTime);
public long lastAccessTime() {
return this.lastAccessTime;
}
public abstract long lastAccessTime();
public long keepAlive() {
return this.keepAlive;
}
public abstract long keepAlive();
public void keepAlive(long keepAlive) {
this.keepAlive = keepAlive;
}
public abstract void keepAlive(long keepAlive);
public SearchLookup lookup() {
// TODO: The types should take into account the parsing context in QueryParserContext...
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService(), fieldData(), request.types());
}
return searchLookup;
}
public abstract SearchLookup lookup();
public DfsSearchResult dfsResult() {
return dfsResult;
}
public abstract DfsSearchResult dfsResult();
public QuerySearchResult queryResult() {
return queryResult;
}
public abstract QuerySearchResult queryResult();
public FetchSearchResult fetchResult() {
return fetchResult;
}
public abstract FetchSearchResult fetchResult();
public void addRewrite(Rewrite rewrite) {
if (this.rewrites == null) {
this.rewrites = new ArrayList<Rewrite>();
}
this.rewrites.add(rewrite);
}
public abstract void addRewrite(Rewrite rewrite);
public List<Rewrite> rewrites() {
return this.rewrites;
}
public abstract List<Rewrite> rewrites();
public ScanContext scanContext() {
if (scanContext == null) {
scanContext = new ScanContext();
}
return scanContext;
}
public abstract ScanContext scanContext();
public MapperService.SmartNameFieldMappers smartFieldMappers(String name) {
return mapperService().smartName(name, request.types());
}
public abstract MapperService.SmartNameFieldMappers smartFieldMappers(String name);
public FieldMappers smartNameFieldMappers(String name) {
return mapperService().smartNameFieldMappers(name, request.types());
}
public abstract FieldMappers smartNameFieldMappers(String name);
public FieldMapper smartNameFieldMapper(String name) {
return mapperService().smartNameFieldMapper(name, request.types());
}
public abstract FieldMapper smartNameFieldMapper(String name);
public abstract MapperService.SmartNameObjectMapper smartNameObjectMapper(String name);
public MapperService.SmartNameObjectMapper smartNameObjectMapper(String name) {
return mapperService().smartNameObjectMapper(name, request.types());
}
}

View File

@ -262,7 +262,7 @@ public class RecoveryPercolatorTests extends AbstractNodesTests {
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", 100).endObject().endObject())
.execute().actionGet();
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(response.getMatches()[0].id().string(), equalTo("100"));
assertThat(response.getMatches()[0].getId().string(), equalTo("100"));
}
@Test

View File

@ -28,6 +28,7 @@ import org.elasticsearch.action.percolate.PercolateResponse;
import org.elasticsearch.action.percolate.PercolateSourceBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.IgnoreIndices;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.ImmutableSettings;
@ -39,6 +40,8 @@ import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.factor.FactorBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.test.integration.AbstractSharedClusterTest;
import org.junit.Test;
@ -1240,10 +1243,10 @@ public class SimplePercolatorTests extends AbstractSharedClusterTest {
.execute().actionGet();
assertNoFailures(response);
assertThat(response.getCount(), equalTo(2l));
assertThat(response.getMatches()[0].id().string(), equalTo("2"));
assertThat(response.getMatches()[0].score(), equalTo(2f));
assertThat(response.getMatches()[1].id().string(), equalTo("1"));
assertThat(response.getMatches()[1].score(), equalTo(1f));
assertThat(response.getMatches()[0].getId().string(), equalTo("2"));
assertThat(response.getMatches()[0].getScore(), equalTo(2f));
assertThat(response.getMatches()[1].getId().string(), equalTo("1"));
assertThat(response.getMatches()[1].getScore(), equalTo(1f));
response = client().preparePercolate().setIndices("my-index").setDocumentType("my-type")
.setSort(true)
@ -1295,6 +1298,163 @@ public class SimplePercolatorTests extends AbstractSharedClusterTest {
assertThat(response.getCount(), equalTo(0l));
}
@Test
public void testPercolatorWithHighlighting() throws Exception {
Client client = cluster().nodeClient();
client.admin().indices().prepareCreate("test")
.setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 2))
.execute().actionGet();
ensureGreen();
if (randomBoolean()) {
client.admin().indices().preparePutMapping("test").setType("type")
.setSource(
jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field1").field("type", "string").field("term_vector", "with_positions_offsets").endObject()
.endObject()
.endObject().endObject()
)
.execute().actionGet();
}
logger.info("--> register a queries");
client.prepareIndex("test", "_percolator", "1")
.setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "brown fox")).endObject())
.execute().actionGet();
client.prepareIndex("test", "_percolator", "2")
.setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "lazy dog")).endObject())
.execute().actionGet();
client.prepareIndex("test", "_percolator", "3")
.setSource(jsonBuilder().startObject().field("query", termQuery("field1", "jumps")).endObject())
.execute().actionGet();
client.prepareIndex("test", "_percolator", "4")
.setSource(jsonBuilder().startObject().field("query", termQuery("field1", "dog")).endObject())
.execute().actionGet();
client.prepareIndex("test", "_percolator", "5")
.setSource(jsonBuilder().startObject().field("query", termQuery("field1", "fox")).endObject())
.execute().actionGet();
logger.info("--> Percolate doc with field1=The quick brown fox jumps over the lazy dog");
PercolateResponse response = client.preparePercolate()
.setIndices("test").setDocumentType("type")
.setSize(5)
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()))
.setHighlightBuilder(new HighlightBuilder().field("field1"))
.execute().actionGet();
assertNoFailures(response);
assertNoFailures(response);
assertThat(response.getMatches(), arrayWithSize(5));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5"));
PercolateResponse.Match[] matches = response.getMatches();
Arrays.sort(matches, new Comparator<PercolateResponse.Match>() {
@Override
public int compare(PercolateResponse.Match a, PercolateResponse.Match b) {
return a.getId().compareTo(b.getId());
}
});
assertThat(matches[0].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick <em>brown</em> <em>fox</em> jumps over the lazy dog"));
assertThat(matches[1].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the <em>lazy</em> <em>dog</em>"));
assertThat(matches[2].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox <em>jumps</em> over the lazy dog"));
assertThat(matches[3].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy <em>dog</em>"));
assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown <em>fox</em> jumps over the lazy dog"));
// Anything with percolate query isn't realtime
client.admin().indices().prepareRefresh("test").execute().actionGet();
logger.info("--> Query percolate doc with field1=The quick brown fox jumps over the lazy dog");
response = client.preparePercolate()
.setIndices("test").setDocumentType("type")
.setSize(5)
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()))
.setHighlightBuilder(new HighlightBuilder().field("field1"))
.setPercolateQuery(matchAllQuery())
.execute().actionGet();
assertNoFailures(response);
assertThat(response.getMatches(), arrayWithSize(5));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5"));
matches = response.getMatches();
Arrays.sort(matches, new Comparator<PercolateResponse.Match>() {
@Override
public int compare(PercolateResponse.Match a, PercolateResponse.Match b) {
return a.getId().compareTo(b.getId());
}
});
assertThat(matches[0].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick <em>brown</em> <em>fox</em> jumps over the lazy dog"));
assertThat(matches[1].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the <em>lazy</em> <em>dog</em>"));
assertThat(matches[2].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox <em>jumps</em> over the lazy dog"));
assertThat(matches[3].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy <em>dog</em>"));
assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown <em>fox</em> jumps over the lazy dog"));
logger.info("--> Query percolate with score for doc with field1=The quick brown fox jumps over the lazy dog");
response = client.preparePercolate()
.setIndices("test").setDocumentType("type")
.setSize(5)
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()))
.setHighlightBuilder(new HighlightBuilder().field("field1"))
.setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new FactorBuilder().boostFactor(5.5f)))
.setScore(true)
.execute().actionGet();
assertNoFailures(response);
assertThat(response.getMatches(), arrayWithSize(5));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5"));
matches = response.getMatches();
Arrays.sort(matches, new Comparator<PercolateResponse.Match>() {
@Override
public int compare(PercolateResponse.Match a, PercolateResponse.Match b) {
return a.getId().compareTo(b.getId());
}
});
assertThat(matches[0].getScore(), equalTo(5.5f));
assertThat(matches[0].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick <em>brown</em> <em>fox</em> jumps over the lazy dog"));
assertThat(matches[1].getScore(), equalTo(5.5f));
assertThat(matches[1].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the <em>lazy</em> <em>dog</em>"));
assertThat(matches[2].getScore(), equalTo(5.5f));
assertThat(matches[2].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox <em>jumps</em> over the lazy dog"));
assertThat(matches[3].getScore(), equalTo(5.5f));
assertThat(matches[3].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy <em>dog</em>"));
assertThat(matches[4].getScore(), equalTo(5.5f));
assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown <em>fox</em> jumps over the lazy dog"));
logger.info("--> Top percolate for doc with field1=The quick brown fox jumps over the lazy dog");
response = client.preparePercolate()
.setIndices("test").setDocumentType("type")
.setSize(5)
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()))
.setHighlightBuilder(new HighlightBuilder().field("field1"))
.setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new FactorBuilder().boostFactor(5.5f)))
.setSort(true)
.execute().actionGet();
assertNoFailures(response);
assertThat(response.getMatches(), arrayWithSize(5));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5"));
matches = response.getMatches();
Arrays.sort(matches, new Comparator<PercolateResponse.Match>() {
@Override
public int compare(PercolateResponse.Match a, PercolateResponse.Match b) {
return a.getId().compareTo(b.getId());
}
});
assertThat(matches[0].getScore(), equalTo(5.5f));
assertThat(matches[0].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick <em>brown</em> <em>fox</em> jumps over the lazy dog"));
assertThat(matches[1].getScore(), equalTo(5.5f));
assertThat(matches[1].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the <em>lazy</em> <em>dog</em>"));
assertThat(matches[2].getScore(), equalTo(5.5f));
assertThat(matches[2].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox <em>jumps</em> over the lazy dog"));
assertThat(matches[3].getScore(), equalTo(5.5f));
assertThat(matches[3].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy <em>dog</em>"));
assertThat(matches[4].getScore(), equalTo(5.5f));
assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown <em>fox</em> jumps over the lazy dog"));
}
public static String[] convertFromTextArray(PercolateResponse.Match[] matches, String index) {
if (matches.length == 0) {
return Strings.EMPTY_ARRAY;
@ -1302,7 +1462,7 @@ public class SimplePercolatorTests extends AbstractSharedClusterTest {
String[] strings = new String[matches.length];
for (int i = 0; i < matches.length; i++) {
assert index.equals(matches[i].getIndex().string());
strings[i] = matches[i].id().string();
strings[i] = matches[i].getId().string();
}
return strings;
}