percolator: renamed `percolator` query to `percolate` query

This commit is contained in:
Martijn van Groningen 2016-04-19 15:59:09 +02:00
parent a672ea7ccc
commit 81449fc912
24 changed files with 162 additions and 174 deletions

View File

@ -45,7 +45,7 @@ import java.util.Map;
public class PercolateResponse extends BroadcastResponse implements Iterable<PercolateResponse.Match>, ToXContent { public class PercolateResponse extends BroadcastResponse implements Iterable<PercolateResponse.Match>, ToXContent {
public static final Match[] EMPTY = new Match[0]; public static final Match[] EMPTY = new Match[0];
// PercolatorQuery emits this score if no 'query' is defined in the percolate request // PercolateQuery emits this score if no 'query' is defined in the percolate request
public final static float NO_SCORE = 0.0f; public final static float NO_SCORE = 0.0f;
private long tookInMillis; private long tookInMillis;

View File

@ -42,7 +42,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.PercolatorQueryBuilder; import org.elasticsearch.index.query.PercolateQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
@ -57,7 +57,6 @@ import org.elasticsearch.transport.TransportService;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
public class TransportPercolateAction extends HandledTransportAction<PercolateRequest, PercolateResponse> { public class TransportPercolateAction extends HandledTransportAction<PercolateRequest, PercolateResponse> {
@ -199,22 +198,22 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
searchSource.field("size", 0); searchSource.field("size", 0);
} }
PercolatorQueryBuilder percolatorQueryBuilder = PercolateQueryBuilder percolateQueryBuilder =
new PercolatorQueryBuilder("query", percolateRequest.documentType(), documentSource); new PercolateQueryBuilder("query", percolateRequest.documentType(), documentSource);
if (querySource != null) { if (querySource != null) {
try (XContentParser parser = XContentHelper.createParser(querySource)) { try (XContentParser parser = XContentHelper.createParser(querySource)) {
QueryParseContext queryParseContext = new QueryParseContext(queryRegistry, parser, parseFieldMatcher); QueryParseContext queryParseContext = new QueryParseContext(queryRegistry, parser, parseFieldMatcher);
QueryBuilder<?> queryBuilder = queryParseContext.parseInnerQueryBuilder(); QueryBuilder<?> queryBuilder = queryParseContext.parseInnerQueryBuilder();
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
boolQueryBuilder.must(queryBuilder); boolQueryBuilder.must(queryBuilder);
boolQueryBuilder.filter(percolatorQueryBuilder); boolQueryBuilder.filter(percolateQueryBuilder);
searchSource.field("query", boolQueryBuilder); searchSource.field("query", boolQueryBuilder);
} }
} else { } else {
// wrapping in a constant score query with boost 0 for bwc reason. // wrapping in a constant score query with boost 0 for bwc reason.
// percolator api didn't emit scores before and never included scores // percolator api didn't emit scores before and never included scores
// for how well percolator queries matched with the document being percolated // for how well percolator queries matched with the document being percolated
searchSource.field("query", new ConstantScoreQueryBuilder(percolatorQueryBuilder).boost(0f)); searchSource.field("query", new ConstantScoreQueryBuilder(percolateQueryBuilder).boost(0f));
} }
searchSource.endObject(); searchSource.endObject();

View File

@ -31,7 +31,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.PercolatorQuery; import org.elasticsearch.index.query.PercolateQuery;
import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.highlight.HighlightPhase; import org.elasticsearch.search.highlight.HighlightPhase;
@ -44,8 +44,8 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
// Highlighting in the case of the percolator query is a bit different, because the PercolatorQuery itself doesn't get highlighted, // Highlighting in the case of the percolate query is a bit different, because the PercolateQuery itself doesn't get highlighted,
// but the source of the PercolatorQuery gets highlighted by each hit with type '.percolator' (percolator queries). // but the source of the PercolateQuery gets highlighted by each hit with type '.percolator' (percolator queries).
public class PercolatorHighlightSubFetchPhase implements FetchSubPhase { public class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
private final HighlightPhase highlightPhase; private final HighlightPhase highlightPhase;
@ -62,20 +62,20 @@ public class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
@Override @Override
public void hitsExecute(SearchContext context, InternalSearchHit[] hits) { public void hitsExecute(SearchContext context, InternalSearchHit[] hits) {
PercolatorQuery percolatorQuery = locatePercolatorQuery(context.query()); PercolateQuery percolateQuery = locatePercolatorQuery(context.query());
if (percolatorQuery == null) { if (percolateQuery == null) {
// shouldn't happen as we checked for the existence of a percolator query in hitsExecutionNeeded(...) // shouldn't happen as we checked for the existence of a percolator query in hitsExecutionNeeded(...)
throw new IllegalStateException("couldn't locate percolator query"); throw new IllegalStateException("couldn't locate percolator query");
} }
List<LeafReaderContext> ctxs = context.searcher().getIndexReader().leaves(); List<LeafReaderContext> ctxs = context.searcher().getIndexReader().leaves();
PercolatorQueryCache queriesRegistry = context.percolatorQueryCache(); PercolatorQueryCache queriesRegistry = context.percolatorQueryCache();
IndexSearcher percolatorIndexSearcher = percolatorQuery.getPercolatorIndexSearcher(); IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher();
LeafReaderContext percolatorLeafReaderContext = percolatorIndexSearcher.getIndexReader().leaves().get(0); LeafReaderContext percolatorLeafReaderContext = percolatorIndexSearcher.getIndexReader().leaves().get(0);
FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
SubSearchContext subSearchContext = SubSearchContext subSearchContext =
createSubSearchContext(context, percolatorLeafReaderContext, percolatorQuery.getDocumentSource()); createSubSearchContext(context, percolatorLeafReaderContext, percolateQuery.getDocumentSource());
for (InternalSearchHit hit : hits) { for (InternalSearchHit hit : hits) {
LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs)); LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs));
@ -84,7 +84,7 @@ public class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
if (query != null) { if (query != null) {
subSearchContext.parsedQuery(new ParsedQuery(query)); subSearchContext.parsedQuery(new ParsedQuery(query));
hitContext.reset( hitContext.reset(
new InternalSearchHit(0, "unknown", new Text(percolatorQuery.getDocumentType()), Collections.emptyMap()), new InternalSearchHit(0, "unknown", new Text(percolateQuery.getDocumentType()), Collections.emptyMap()),
percolatorLeafReaderContext, 0, percolatorIndexSearcher percolatorLeafReaderContext, 0, percolatorIndexSearcher
); );
hitContext.cache().clear(); hitContext.cache().clear();
@ -108,12 +108,12 @@ public class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
public void hitExecute(SearchContext context, HitContext hitContext) { public void hitExecute(SearchContext context, HitContext hitContext) {
} }
static PercolatorQuery locatePercolatorQuery(Query query) { static PercolateQuery locatePercolatorQuery(Query query) {
if (query instanceof PercolatorQuery) { if (query instanceof PercolateQuery) {
return (PercolatorQuery) query; return (PercolateQuery) query;
} else if (query instanceof BooleanQuery) { } else if (query instanceof BooleanQuery) {
for (BooleanClause clause : ((BooleanQuery) query).clauses()) { for (BooleanClause clause : ((BooleanQuery) query).clauses()) {
PercolatorQuery result = locatePercolatorQuery(clause.getQuery()); PercolateQuery result = locatePercolatorQuery(clause.getQuery());
if (result != null) { if (result != null) {
return result; return result;
} }

View File

@ -55,7 +55,7 @@ import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.query.PercolatorQuery; import org.elasticsearch.index.query.PercolateQuery;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
@ -73,7 +73,7 @@ import static org.elasticsearch.index.percolator.PercolatorFieldMapper.Percolato
import static org.elasticsearch.index.percolator.PercolatorFieldMapper.parseQuery; import static org.elasticsearch.index.percolator.PercolatorFieldMapper.parseQuery;
public final class PercolatorQueryCache extends AbstractIndexComponent public final class PercolatorQueryCache extends AbstractIndexComponent
implements Closeable, LeafReader.CoreClosedListener, PercolatorQuery.QueryRegistry { implements Closeable, LeafReader.CoreClosedListener, PercolateQuery.QueryRegistry {
public final static Setting<Boolean> INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = public final static Setting<Boolean> INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING =
Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope); Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope);

View File

@ -23,14 +23,11 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
@ -45,7 +42,7 @@ import java.util.Set;
import static org.apache.lucene.search.BooleanClause.Occur.FILTER; import static org.apache.lucene.search.BooleanClause.Occur.FILTER;
public final class PercolatorQuery extends Query implements Accountable { public final class PercolateQuery extends Query implements Accountable {
// cost of matching the query against the document, arbitrary as it would be really complex to estimate // cost of matching the query against the document, arbitrary as it would be really complex to estimate
public static final float MATCH_COST = 1000; public static final float MATCH_COST = 1000;
@ -93,7 +90,7 @@ public final class PercolatorQuery extends Query implements Accountable {
this.percolateTypeQuery = Objects.requireNonNull(percolateTypeQuery); this.percolateTypeQuery = Objects.requireNonNull(percolateTypeQuery);
} }
public PercolatorQuery build() { public PercolateQuery build() {
if (percolateTypeQuery != null && queriesMetaDataQuery != null) { if (percolateTypeQuery != null && queriesMetaDataQuery != null) {
throw new IllegalStateException("Either filter by deprecated percolator type or by query metadata"); throw new IllegalStateException("Either filter by deprecated percolator type or by query metadata");
} }
@ -107,7 +104,7 @@ public final class PercolatorQuery extends Query implements Accountable {
builder.add(queriesMetaDataQuery, FILTER); builder.add(queriesMetaDataQuery, FILTER);
} }
return new PercolatorQuery(docType, queryRegistry, documentSource, builder.build(), percolatorIndexSearcher); return new PercolateQuery(docType, queryRegistry, documentSource, builder.build(), percolatorIndexSearcher);
} }
} }
@ -118,7 +115,7 @@ public final class PercolatorQuery extends Query implements Accountable {
private final Query percolatorQueriesQuery; private final Query percolatorQueriesQuery;
private final IndexSearcher percolatorIndexSearcher; private final IndexSearcher percolatorIndexSearcher;
private PercolatorQuery(String documentType, QueryRegistry queryRegistry, BytesReference documentSource, private PercolateQuery(String documentType, QueryRegistry queryRegistry, BytesReference documentSource,
Query percolatorQueriesQuery, IndexSearcher percolatorIndexSearcher) { Query percolatorQueriesQuery, IndexSearcher percolatorIndexSearcher) {
this.documentType = documentType; this.documentType = documentType;
this.documentSource = documentSource; this.documentSource = documentSource;
@ -131,7 +128,7 @@ public final class PercolatorQuery extends Query implements Accountable {
public Query rewrite(IndexReader reader) throws IOException { public Query rewrite(IndexReader reader) throws IOException {
Query rewritten = percolatorQueriesQuery.rewrite(reader); Query rewritten = percolatorQueriesQuery.rewrite(reader);
if (rewritten != percolatorQueriesQuery) { if (rewritten != percolatorQueriesQuery) {
return new PercolatorQuery(documentType, queryRegistry, documentSource, rewritten, percolatorIndexSearcher); return new PercolateQuery(documentType, queryRegistry, documentSource, rewritten, percolatorIndexSearcher);
} else { } else {
return this; return this;
} }
@ -157,14 +154,14 @@ public final class PercolatorQuery extends Query implements Accountable {
QueryRegistry.Leaf percolatorQueries = queryRegistry.getQueries(leafReaderContext); QueryRegistry.Leaf percolatorQueries = queryRegistry.getQueries(leafReaderContext);
Query query = percolatorQueries.getQuery(docId); Query query = percolatorQueries.getQuery(docId);
Explanation detail = percolatorIndexSearcher.explain(query, 0); Explanation detail = percolatorIndexSearcher.explain(query, 0);
return Explanation.match(scorer.score(), "PercolatorQuery", detail); return Explanation.match(scorer.score(), "PercolateQuery", detail);
} else { } else {
return Explanation.match(scorer.score(), "PercolatorQuery"); return Explanation.match(scorer.score(), "PercolateQuery");
} }
} }
} }
} }
return Explanation.noMatch("PercolatorQuery"); return Explanation.noMatch("PercolateQuery");
} }
@Override @Override
@ -247,7 +244,7 @@ public final class PercolatorQuery extends Query implements Accountable {
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false; if (!super.equals(o)) return false;
PercolatorQuery that = (PercolatorQuery) o; PercolateQuery that = (PercolateQuery) o;
if (!documentType.equals(that.documentType)) return false; if (!documentType.equals(that.documentType)) return false;
return documentSource.equals(that.documentSource); return documentSource.equals(that.documentSource);
@ -264,7 +261,7 @@ public final class PercolatorQuery extends Query implements Accountable {
@Override @Override
public String toString(String s) { public String toString(String s) {
return "PercolatorQuery{document_type={" + documentType + "},document_source={" + documentSource.toUtf8() + return "PercolateQuery{document_type={" + documentType + "},document_source={" + documentSource.toUtf8() +
"},inner={" + percolatorQueriesQuery.toString(s) + "}}"; "},inner={" + percolatorQueriesQuery.toString(s) + "}}";
} }

View File

@ -67,9 +67,9 @@ import java.util.Objects;
import static org.elasticsearch.index.mapper.SourceToParse.source; import static org.elasticsearch.index.mapper.SourceToParse.source;
public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQueryBuilder> { public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBuilder> {
public static final String NAME = "percolator"; public static final String NAME = "percolate";
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
static final ParseField DOCUMENT_FIELD = new ParseField("document"); static final ParseField DOCUMENT_FIELD = new ParseField("document");
@ -93,7 +93,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
private final String indexedDocumentPreference; private final String indexedDocumentPreference;
private final Long indexedDocumentVersion; private final Long indexedDocumentVersion;
public PercolatorQueryBuilder(String field, String documentType, BytesReference document) { public PercolateQueryBuilder(String field, String documentType, BytesReference document) {
if (field == null) { if (field == null) {
throw new IllegalArgumentException("[field] is a required argument"); throw new IllegalArgumentException("[field] is a required argument");
} }
@ -114,7 +114,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
indexedDocumentVersion = null; indexedDocumentVersion = null;
} }
public PercolatorQueryBuilder(String field, String documentType, String indexedDocumentIndex, String indexedDocumentType, public PercolateQueryBuilder(String field, String documentType, String indexedDocumentIndex, String indexedDocumentType,
String indexedDocumentId, String indexedDocumentRouting, String indexedDocumentPreference, String indexedDocumentId, String indexedDocumentRouting, String indexedDocumentPreference,
Long indexedDocumentVersion) { Long indexedDocumentVersion) {
if (field == null) { if (field == null) {
@ -146,7 +146,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
/** /**
* Read from a stream. * Read from a stream.
*/ */
public PercolatorQueryBuilder(StreamInput in) throws IOException { public PercolateQueryBuilder(StreamInput in) throws IOException {
super(in); super(in);
field = in.readString(); field = in.readString();
documentType = in.readString(); documentType = in.readString();
@ -222,7 +222,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
builder.endObject(); builder.endObject();
} }
public static PercolatorQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { public static PercolateQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
float boost = AbstractQueryBuilder.DEFAULT_BOOST; float boost = AbstractQueryBuilder.DEFAULT_BOOST;
@ -253,7 +253,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
source = builder.bytes(); source = builder.bytes();
} }
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME + throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +
"] query does not support [" + token + "]"); "] query does not support [" + token + "]");
} }
} else if (token.isValue()) { } else if (token.isValue()) {
@ -278,28 +278,28 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text(); queryName = parser.text();
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME + throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +
"] query does not support [" + currentFieldName + "]"); "] query does not support [" + currentFieldName + "]");
} }
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME + throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +
"] query does not support [" + token + "]"); "] query does not support [" + token + "]");
} }
} }
if (documentType == null) { if (documentType == null) {
throw new IllegalArgumentException("[" + PercolatorQueryBuilder.NAME + "] query is missing required [" + throw new IllegalArgumentException("[" + PercolateQueryBuilder.NAME + "] query is missing required [" +
DOCUMENT_TYPE_FIELD.getPreferredName() + "] parameter"); DOCUMENT_TYPE_FIELD.getPreferredName() + "] parameter");
} }
PercolatorQueryBuilder queryBuilder; PercolateQueryBuilder queryBuilder;
if (source != null) { if (source != null) {
queryBuilder = new PercolatorQueryBuilder(field, documentType, source); queryBuilder = new PercolateQueryBuilder(field, documentType, source);
} else if (indexedDocumentId != null) { } else if (indexedDocumentId != null) {
queryBuilder = new PercolatorQueryBuilder(field, documentType, indexedDocumentIndex, indexedDocumentType, queryBuilder = new PercolateQueryBuilder(field, documentType, indexedDocumentIndex, indexedDocumentType,
indexedDocumentId, indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion); indexedDocumentId, indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion);
} else { } else {
throw new IllegalArgumentException("[" + PercolatorQueryBuilder.NAME + "] query, nothing to percolate"); throw new IllegalArgumentException("[" + PercolateQueryBuilder.NAME + "] query, nothing to percolate");
} }
queryBuilder.queryName(queryName); queryBuilder.queryName(queryName);
queryBuilder.boost(boost); queryBuilder.boost(boost);
@ -307,7 +307,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
} }
@Override @Override
protected boolean doEquals(PercolatorQueryBuilder other) { protected boolean doEquals(PercolateQueryBuilder other) {
return Objects.equals(field, other.field) return Objects.equals(field, other.field)
&& Objects.equals(documentType, other.documentType) && Objects.equals(documentType, other.documentType)
&& Objects.equals(document, other.document) && Objects.equals(document, other.document)
@ -345,7 +345,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
"indexed document [{}/{}/{}] couldn't be found", indexedDocumentIndex, indexedDocumentType, indexedDocumentId "indexed document [{}/{}/{}] couldn't be found", indexedDocumentIndex, indexedDocumentType, indexedDocumentId
); );
} }
return new PercolatorQueryBuilder(field, documentType, getResponse.getSourceAsBytesRef()); return new PercolateQueryBuilder(field, documentType, getResponse.getSourceAsBytesRef());
} }
@Override @Override
@ -396,7 +396,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
throw new QueryShardException(context, "no percolator query registry"); throw new QueryShardException(context, "no percolator query registry");
} }
PercolatorQuery.Builder builder = new PercolatorQuery.Builder( PercolateQuery.Builder builder = new PercolateQuery.Builder(
documentType, registry, document, docSearcher documentType, registry, document, docSearcher
); );
Settings indexSettings = registry.getIndexSettings().getSettings(); Settings indexSettings = registry.getIndexSettings().getSettings();

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.elasticsearch.index.query.support.InnerHitBuilder;
import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.index.search.MatchQuery;
import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.indices.TermsLookup;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
@ -840,21 +839,21 @@ public abstract class QueryBuilders {
return new ExistsQueryBuilder(name); return new ExistsQueryBuilder(name);
} }
public static PercolatorQueryBuilder percolatorQuery(String queryField, String documentType, BytesReference document) { public static PercolateQueryBuilder percolateQuery(String queryField, String documentType, BytesReference document) {
return new PercolatorQueryBuilder(queryField, documentType, document); return new PercolateQueryBuilder(queryField, documentType, document);
} }
public static PercolatorQueryBuilder percolatorQuery(String queryField, String documentType, String indexedDocumentIndex, public static PercolateQueryBuilder percolateQuery(String queryField, String documentType, String indexedDocumentIndex,
String indexedDocumentType, String indexedDocumentId) { String indexedDocumentType, String indexedDocumentId) {
return new PercolatorQueryBuilder(queryField, documentType, indexedDocumentIndex, indexedDocumentType, indexedDocumentId, return new PercolateQueryBuilder(queryField, documentType, indexedDocumentIndex, indexedDocumentType, indexedDocumentId,
null, null, null); null, null, null);
} }
public static PercolatorQueryBuilder percolatorQuery(String queryField, String documentType, String indexedDocumentIndex, public static PercolateQueryBuilder percolateQuery(String queryField, String documentType, String indexedDocumentIndex,
String indexedDocumentType, String indexedDocumentId, String indexedDocumentType, String indexedDocumentId,
String indexedDocumentRouting, String indexedDocumentPreference, String indexedDocumentRouting, String indexedDocumentPreference,
Long indexedDocumentVersion) { Long indexedDocumentVersion) {
return new PercolatorQueryBuilder(queryField, documentType, indexedDocumentIndex, indexedDocumentType, indexedDocumentId, return new PercolateQueryBuilder(queryField, documentType, indexedDocumentIndex, indexedDocumentType, indexedDocumentId,
indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion); indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion);
} }

View File

@ -60,7 +60,7 @@ import org.elasticsearch.index.query.MoreLikeThisQueryBuilder;
import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.NestedQueryBuilder; import org.elasticsearch.index.query.NestedQueryBuilder;
import org.elasticsearch.index.query.ParentIdQueryBuilder; import org.elasticsearch.index.query.ParentIdQueryBuilder;
import org.elasticsearch.index.query.PercolatorQueryBuilder; import org.elasticsearch.index.query.PercolateQueryBuilder;
import org.elasticsearch.index.query.PrefixQueryBuilder; import org.elasticsearch.index.query.PrefixQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.QueryParser;
@ -698,7 +698,7 @@ public class SearchModule extends AbstractModule {
registerQuery(ExistsQueryBuilder::new, ExistsQueryBuilder::fromXContent, ExistsQueryBuilder.QUERY_NAME_FIELD); registerQuery(ExistsQueryBuilder::new, ExistsQueryBuilder::fromXContent, ExistsQueryBuilder.QUERY_NAME_FIELD);
registerQuery(MatchNoneQueryBuilder::new, MatchNoneQueryBuilder::fromXContent, MatchNoneQueryBuilder.QUERY_NAME_FIELD); registerQuery(MatchNoneQueryBuilder::new, MatchNoneQueryBuilder::fromXContent, MatchNoneQueryBuilder.QUERY_NAME_FIELD);
registerQuery(ParentIdQueryBuilder::new, ParentIdQueryBuilder::fromXContent, ParentIdQueryBuilder.QUERY_NAME_FIELD); registerQuery(ParentIdQueryBuilder::new, ParentIdQueryBuilder::fromXContent, ParentIdQueryBuilder.QUERY_NAME_FIELD);
registerQuery(PercolatorQueryBuilder::new, PercolatorQueryBuilder::fromXContent, PercolatorQueryBuilder.QUERY_NAME_FIELD); registerQuery(PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent, PercolateQueryBuilder.QUERY_NAME_FIELD);
if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) {
registerQuery(GeoShapeQueryBuilder::new, GeoShapeQueryBuilder::fromXContent, GeoShapeQueryBuilder.QUERY_NAME_FIELD); registerQuery(GeoShapeQueryBuilder::new, GeoShapeQueryBuilder::fromXContent, GeoShapeQueryBuilder.QUERY_NAME_FIELD);
} }

View File

@ -25,7 +25,7 @@ import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.index.query.PercolatorQuery; import org.elasticsearch.index.query.PercolateQuery;
import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.sameInstance;
public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase { public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase {
public void testHitsExecutionNeeded() { public void testHitsExecutionNeeded() {
PercolatorQuery percolatorQuery = new PercolatorQuery.Builder("", ctx -> null, new BytesArray("{}"), PercolateQuery percolateQuery = new PercolateQuery.Builder("", ctx -> null, new BytesArray("{}"),
Mockito.mock(IndexSearcher.class)) Mockito.mock(IndexSearcher.class))
.build(); .build();
@ -55,12 +55,12 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase {
() -> subFetchPhase.hitsExecute(searchContext, null)); () -> subFetchPhase.hitsExecute(searchContext, null));
assertThat(exception.getMessage(), equalTo("couldn't locate percolator query")); assertThat(exception.getMessage(), equalTo("couldn't locate percolator query"));
Mockito.when(searchContext.query()).thenReturn(percolatorQuery); Mockito.when(searchContext.query()).thenReturn(percolateQuery);
assertThat(subFetchPhase.hitsExecutionNeeded(searchContext), is(true)); assertThat(subFetchPhase.hitsExecutionNeeded(searchContext), is(true));
} }
public void testLocatePercolatorQuery() { public void testLocatePercolatorQuery() {
PercolatorQuery percolatorQuery = new PercolatorQuery.Builder("", ctx -> null, new BytesArray("{}"), PercolateQuery percolateQuery = new PercolateQuery.Builder("", ctx -> null, new BytesArray("{}"),
Mockito.mock(IndexSearcher.class)) Mockito.mock(IndexSearcher.class))
.build(); .build();
@ -68,18 +68,18 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase {
BooleanQuery.Builder bq = new BooleanQuery.Builder(); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()), nullValue()); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()), nullValue());
bq.add(percolatorQuery, BooleanClause.Occur.FILTER); bq.add(percolateQuery, BooleanClause.Occur.FILTER);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()), sameInstance(percolatorQuery)); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()), sameInstance(percolateQuery));
ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(new MatchAllDocsQuery()); ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(new MatchAllDocsQuery());
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(constantScoreQuery), nullValue()); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(constantScoreQuery), nullValue());
constantScoreQuery = new ConstantScoreQuery(percolatorQuery); constantScoreQuery = new ConstantScoreQuery(percolateQuery);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(constantScoreQuery), sameInstance(percolatorQuery)); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(constantScoreQuery), sameInstance(percolateQuery));
BoostQuery boostQuery = new BoostQuery(new MatchAllDocsQuery(), 1f); BoostQuery boostQuery = new BoostQuery(new MatchAllDocsQuery(), 1f);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery), nullValue()); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery), nullValue());
boostQuery = new BoostQuery(percolatorQuery, 1f); boostQuery = new BoostQuery(percolateQuery, 1f);
assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery), sameInstance(percolatorQuery)); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery), sameInstance(percolateQuery));
} }
} }

View File

@ -45,9 +45,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -65,7 +63,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.PercolatorQuery; import org.elasticsearch.index.query.PercolateQuery;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.QueryParser;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
@ -84,8 +82,6 @@ import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery;
@ -224,7 +220,7 @@ public class PercolatorQueryCacheTests extends ESTestCase {
PercolatorQueryCacheStats stats = cache.getStats(shardId); PercolatorQueryCacheStats stats = cache.getStats(shardId);
assertThat(stats.getNumQueries(), equalTo(9L)); assertThat(stats.getNumQueries(), equalTo(9L));
PercolatorQuery.QueryRegistry.Leaf leaf = cache.getQueries(indexReader.leaves().get(0)); PercolateQuery.QueryRegistry.Leaf leaf = cache.getQueries(indexReader.leaves().get(0));
assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0"))));
assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "1")))); assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "1"))));
assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "2")))); assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "2"))));
@ -270,7 +266,7 @@ public class PercolatorQueryCacheTests extends ESTestCase {
listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader)));
assertThat(cache.getStats(shardId).getNumQueries(), equalTo(3L)); assertThat(cache.getStats(shardId).getNumQueries(), equalTo(3L));
PercolatorQuery.QueryRegistry.Leaf leaf = cache.getQueries(indexReader.leaves().get(0)); PercolateQuery.QueryRegistry.Leaf leaf = cache.getQueries(indexReader.leaves().get(0));
assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0"))));
leaf = cache.getQueries(indexReader.leaves().get(1)); leaf = cache.getQueries(indexReader.leaves().get(1));

View File

@ -47,10 +47,10 @@ import java.util.Set;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
public class PercolatorQueryBuilderTests extends AbstractQueryTestCase<PercolatorQueryBuilder> { public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQueryBuilder> {
private static final Set<String> SHUFFLE_PROTECTED_FIELDS = private static final Set<String> SHUFFLE_PROTECTED_FIELDS =
Collections.singleton(PercolatorQueryBuilder.DOCUMENT_FIELD.getPreferredName()); Collections.singleton(PercolateQueryBuilder.DOCUMENT_FIELD.getPreferredName());
private static String queryField; private static String queryField;
private static String docType; private static String docType;
@ -79,11 +79,11 @@ public class PercolatorQueryBuilderTests extends AbstractQueryTestCase<Percolato
} }
@Override @Override
protected PercolatorQueryBuilder doCreateTestQueryBuilder() { protected PercolateQueryBuilder doCreateTestQueryBuilder() {
return doCreateTestQueryBuilder(randomBoolean()); return doCreateTestQueryBuilder(randomBoolean());
} }
private PercolatorQueryBuilder doCreateTestQueryBuilder(boolean indexedDocument) { private PercolateQueryBuilder doCreateTestQueryBuilder(boolean indexedDocument) {
documentSource = randomSource(); documentSource = randomSource();
if (indexedDocument) { if (indexedDocument) {
indexedDocumentIndex = randomAsciiOfLength(4); indexedDocumentIndex = randomAsciiOfLength(4);
@ -92,10 +92,10 @@ public class PercolatorQueryBuilderTests extends AbstractQueryTestCase<Percolato
indexedDocumentRouting = randomAsciiOfLength(4); indexedDocumentRouting = randomAsciiOfLength(4);
indexedDocumentPreference = randomAsciiOfLength(4); indexedDocumentPreference = randomAsciiOfLength(4);
indexedDocumentVersion = (long) randomIntBetween(0, Integer.MAX_VALUE); indexedDocumentVersion = (long) randomIntBetween(0, Integer.MAX_VALUE);
return new PercolatorQueryBuilder(queryField, docType, indexedDocumentIndex, indexedDocumentType, indexedDocumentId, return new PercolateQueryBuilder(queryField, docType, indexedDocumentIndex, indexedDocumentType, indexedDocumentId,
indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion); indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion);
} else { } else {
return new PercolatorQueryBuilder(queryField, docType, documentSource); return new PercolateQueryBuilder(queryField, docType, documentSource);
} }
} }
@ -130,26 +130,26 @@ public class PercolatorQueryBuilderTests extends AbstractQueryTestCase<Percolato
} }
@Override @Override
protected void doAssertLuceneQuery(PercolatorQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(PercolateQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
assertThat(query, Matchers.instanceOf(PercolatorQuery.class)); assertThat(query, Matchers.instanceOf(PercolateQuery.class));
PercolatorQuery percolatorQuery = (PercolatorQuery) query; PercolateQuery percolateQuery = (PercolateQuery) query;
assertThat(percolatorQuery.getDocumentType(), Matchers.equalTo(queryBuilder.getDocumentType())); assertThat(percolateQuery.getDocumentType(), Matchers.equalTo(queryBuilder.getDocumentType()));
assertThat(percolatorQuery.getDocumentSource(), Matchers.equalTo(documentSource)); assertThat(percolateQuery.getDocumentSource(), Matchers.equalTo(documentSource));
} }
@Override @Override
public void testMustRewrite() throws IOException { public void testMustRewrite() throws IOException {
PercolatorQueryBuilder pqb = doCreateTestQueryBuilder(true); PercolateQueryBuilder pqb = doCreateTestQueryBuilder(true);
IllegalStateException e = expectThrows(IllegalStateException.class, () -> pqb.toQuery(createShardContext())); IllegalStateException e = expectThrows(IllegalStateException.class, () -> pqb.toQuery(createShardContext()));
assertThat(e.getMessage(), equalTo("query builder must be rewritten first")); assertThat(e.getMessage(), equalTo("query builder must be rewritten first"));
QueryBuilder<?> rewrite = pqb.rewrite(createShardContext()); QueryBuilder<?> rewrite = pqb.rewrite(createShardContext());
PercolatorQueryBuilder geoShapeQueryBuilder = new PercolatorQueryBuilder(pqb.getField(), pqb.getDocumentType(), documentSource); PercolateQueryBuilder geoShapeQueryBuilder = new PercolateQueryBuilder(pqb.getField(), pqb.getDocumentType(), documentSource);
assertEquals(geoShapeQueryBuilder, rewrite); assertEquals(geoShapeQueryBuilder, rewrite);
} }
public void testIndexedDocumentDoesNotExist() throws IOException { public void testIndexedDocumentDoesNotExist() throws IOException {
indexedDocumentExists = false; indexedDocumentExists = false;
PercolatorQueryBuilder pqb = doCreateTestQueryBuilder(true); PercolateQueryBuilder pqb = doCreateTestQueryBuilder(true);
ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> pqb.rewrite(createShardContext())); ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> pqb.rewrite(createShardContext()));
String expectedString = "indexed document [" + indexedDocumentIndex + "/" + indexedDocumentType + "/" + String expectedString = "indexed document [" + indexedDocumentIndex + "/" + indexedDocumentType + "/" +
indexedDocumentId + "] couldn't be found"; indexedDocumentId + "] couldn't be found";
@ -185,46 +185,46 @@ public class PercolatorQueryBuilderTests extends AbstractQueryTestCase<Percolato
public void testRequiredParameters() { public void testRequiredParameters() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
QueryBuilders.percolatorQuery(null, null, new BytesArray("{}")); QueryBuilders.percolateQuery(null, null, new BytesArray("{}"));
}); });
assertThat(e.getMessage(), equalTo("[field] is a required argument")); assertThat(e.getMessage(), equalTo("[field] is a required argument"));
e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.percolatorQuery("_field", null, new BytesArray("{}"))); e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.percolateQuery("_field", null, new BytesArray("{}")));
assertThat(e.getMessage(), equalTo("[document_type] is a required argument")); assertThat(e.getMessage(), equalTo("[document_type] is a required argument"));
e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.percolatorQuery("_field", "_document_type", null)); e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.percolateQuery("_field", "_document_type", null));
assertThat(e.getMessage(), equalTo("[document] is a required argument")); assertThat(e.getMessage(), equalTo("[document] is a required argument"));
e = expectThrows(IllegalArgumentException.class, () -> { e = expectThrows(IllegalArgumentException.class, () -> {
QueryBuilders.percolatorQuery(null, null, "_index", "_type", "_id", null, null, null); QueryBuilders.percolateQuery(null, null, "_index", "_type", "_id", null, null, null);
}); });
assertThat(e.getMessage(), equalTo("[field] is a required argument")); assertThat(e.getMessage(), equalTo("[field] is a required argument"));
e = expectThrows(IllegalArgumentException.class, () -> { e = expectThrows(IllegalArgumentException.class, () -> {
QueryBuilders.percolatorQuery("_field", null, "_index", "_type", "_id", null, null, null); QueryBuilders.percolateQuery("_field", null, "_index", "_type", "_id", null, null, null);
}); });
assertThat(e.getMessage(), equalTo("[document_type] is a required argument")); assertThat(e.getMessage(), equalTo("[document_type] is a required argument"));
e = expectThrows(IllegalArgumentException.class, () -> { e = expectThrows(IllegalArgumentException.class, () -> {
QueryBuilders.percolatorQuery("_field", "_document_type", null, "_type", "_id", null, null, null); QueryBuilders.percolateQuery("_field", "_document_type", null, "_type", "_id", null, null, null);
}); });
assertThat(e.getMessage(), equalTo("[index] is a required argument")); assertThat(e.getMessage(), equalTo("[index] is a required argument"));
e = expectThrows(IllegalArgumentException.class, () -> { e = expectThrows(IllegalArgumentException.class, () -> {
QueryBuilders.percolatorQuery("_field", "_document_type", "_index", null, "_id", null, null, null); QueryBuilders.percolateQuery("_field", "_document_type", "_index", null, "_id", null, null, null);
}); });
assertThat(e.getMessage(), equalTo("[type] is a required argument")); assertThat(e.getMessage(), equalTo("[type] is a required argument"));
e = expectThrows(IllegalArgumentException.class, () -> { e = expectThrows(IllegalArgumentException.class, () -> {
QueryBuilders.percolatorQuery("_field", "_document_type", "_index", "_type", null, null, null, null); QueryBuilders.percolateQuery("_field", "_document_type", "_index", "_type", null, null, null, null);
}); });
assertThat(e.getMessage(), equalTo("[id] is a required argument")); assertThat(e.getMessage(), equalTo("[id] is a required argument"));
} }
public void testFromJsonNoDocumentType() throws IOException { public void testFromJsonNoDocumentType() throws IOException {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parseQuery("{\"percolator\" : { \"document\": {}}")); () -> parseQuery("{\"percolate\" : { \"document\": {}}"));
assertThat(e.getMessage(), equalTo("[percolator] query is missing required [document_type] parameter")); assertThat(e.getMessage(), equalTo("[percolate] query is missing required [document_type] parameter"));
} }
private static BytesReference randomSource() { private static BytesReference randomSource() {

View File

@ -54,7 +54,6 @@ import org.apache.lucene.store.Directory;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.percolator.ExtractQueryTermsService; import org.elasticsearch.index.percolator.ExtractQueryTermsService;
import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.percolator.PercolatorFieldMapper;
@ -70,7 +69,7 @@ import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
public class PercolatorQueryTests extends ESTestCase { public class PercolateQueryTests extends ESTestCase {
public final static String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; public final static String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms";
public final static String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; public final static String UNKNOWN_QUERY_FIELD_NAME = "unknown_query";
@ -85,7 +84,7 @@ public class PercolatorQueryTests extends ESTestCase {
private Directory directory; private Directory directory;
private IndexWriter indexWriter; private IndexWriter indexWriter;
private Map<String, Query> queries; private Map<String, Query> queries;
private PercolatorQuery.QueryRegistry queryRegistry; private PercolateQuery.QueryRegistry queryRegistry;
private DirectoryReader directoryReader; private DirectoryReader directoryReader;
@Before @Before
@ -144,7 +143,7 @@ public class PercolatorQueryTests extends ESTestCase {
memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher(); IndexSearcher percolateSearcher = memoryIndex.createSearcher();
PercolatorQuery.Builder builder = new PercolatorQuery.Builder( PercolateQuery.Builder builder = new PercolateQuery.Builder(
"docType", "docType",
queryRegistry, queryRegistry,
new BytesArray("{}"), new BytesArray("{}"),
@ -218,7 +217,7 @@ public class PercolatorQueryTests extends ESTestCase {
memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher(); IndexSearcher percolateSearcher = memoryIndex.createSearcher();
PercolatorQuery.Builder builder = new PercolatorQuery.Builder( PercolateQuery.Builder builder = new PercolateQuery.Builder(
"docType", "docType",
queryRegistry, queryRegistry,
new BytesArray("{}"), new BytesArray("{}"),
@ -335,7 +334,7 @@ public class PercolatorQueryTests extends ESTestCase {
private void duelRun(MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException { private void duelRun(MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException {
IndexSearcher percolateSearcher = memoryIndex.createSearcher(); IndexSearcher percolateSearcher = memoryIndex.createSearcher();
PercolatorQuery.Builder builder1 = new PercolatorQuery.Builder( PercolateQuery.Builder builder1 = new PercolateQuery.Builder(
"docType", "docType",
queryRegistry, queryRegistry,
new BytesArray("{}"), new BytesArray("{}"),
@ -345,7 +344,7 @@ public class PercolatorQueryTests extends ESTestCase {
builder1.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); builder1.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME);
TopDocs topDocs1 = shardSearcher.search(builder1.build(), 10); TopDocs topDocs1 = shardSearcher.search(builder1.build(), 10);
PercolatorQuery.Builder builder2 = new PercolatorQuery.Builder( PercolateQuery.Builder builder2 = new PercolateQuery.Builder(
"docType", "docType",
queryRegistry, queryRegistry,
new BytesArray("{}"), new BytesArray("{}"),

View File

@ -164,7 +164,7 @@ public class SearchModuleTests extends ModuleTestCase {
"nested", "nested",
"parentId", "parentId",
"parent_id", "parent_id",
"percolator", "percolate",
"prefix", "prefix",
"queryString", "queryString",
"query_string", "query_string",

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.MatchPhraseQueryBuilder;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
@ -35,7 +34,7 @@ import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
import static org.elasticsearch.index.query.QueryBuilders.percolatorQuery; import static org.elasticsearch.index.query.QueryBuilders.percolateQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery;
@ -69,7 +68,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
BytesReference source = jsonBuilder().startObject().endObject().bytes(); BytesReference source = jsonBuilder().startObject().endObject().bytes();
logger.info("percolating empty doc"); logger.info("percolating empty doc");
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(percolatorQuery("query", "type", source)) .setQuery(percolateQuery("query", "type", source))
.get(); .get();
assertHitCount(response, 1); assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
@ -77,7 +76,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
source = jsonBuilder().startObject().field("field1", "value").endObject().bytes(); source = jsonBuilder().startObject().field("field1", "value").endObject().bytes();
logger.info("percolating doc with 1 field"); logger.info("percolating doc with 1 field");
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(percolatorQuery("query", "type", source)) .setQuery(percolateQuery("query", "type", source))
.addSort("_uid", SortOrder.ASC) .addSort("_uid", SortOrder.ASC)
.get(); .get();
assertHitCount(response, 2); assertHitCount(response, 2);
@ -87,7 +86,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
source = jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes(); source = jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes();
logger.info("percolating doc with 2 fields"); logger.info("percolating doc with 2 fields");
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(percolatorQuery("query", "type", source)) .setQuery(percolateQuery("query", "type", source))
.addSort("_uid", SortOrder.ASC) .addSort("_uid", SortOrder.ASC)
.get(); .get();
assertHitCount(response, 3); assertHitCount(response, 3);
@ -121,14 +120,14 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
logger.info("percolating empty doc"); logger.info("percolating empty doc");
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(percolatorQuery("query", "type", "test", "type", "1")) .setQuery(percolateQuery("query", "type", "test", "type", "1"))
.get(); .get();
assertHitCount(response, 1); assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
logger.info("percolating doc with 1 field"); logger.info("percolating doc with 1 field");
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(percolatorQuery("query", "type", "test", "type", "2")) .setQuery(percolateQuery("query", "type", "test", "type", "2"))
.addSort("_uid", SortOrder.ASC) .addSort("_uid", SortOrder.ASC)
.get(); .get();
assertHitCount(response, 2); assertHitCount(response, 2);
@ -137,7 +136,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
logger.info("percolating doc with 2 fields"); logger.info("percolating doc with 2 fields");
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(percolatorQuery("query", "type", "test", "type", "3")) .setQuery(percolateQuery("query", "type", "test", "type", "3"))
.addSort("_uid", SortOrder.ASC) .addSort("_uid", SortOrder.ASC)
.get(); .get();
assertHitCount(response, 3); assertHitCount(response, 3);
@ -204,7 +203,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
.field("field2", "the quick brown fox falls down into the well") .field("field2", "the quick brown fox falls down into the well")
.endObject().bytes(); .endObject().bytes();
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(percolatorQuery("query", "type", source)) .setQuery(percolateQuery("query", "type", source))
.addSort("_uid", SortOrder.ASC) .addSort("_uid", SortOrder.ASC)
.get(); .get();
assertHitCount(response, 4); assertHitCount(response, 4);
@ -251,7 +250,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
.field("field1", "The quick brown fox jumps over the lazy dog") .field("field1", "The quick brown fox jumps over the lazy dog")
.endObject().bytes(); .endObject().bytes();
SearchResponse searchResponse = client().prepareSearch() SearchResponse searchResponse = client().prepareSearch()
.setQuery(percolatorQuery("query", "type", document)) .setQuery(percolateQuery("query", "type", document))
.highlighter(new HighlightBuilder().field("field1")) .highlighter(new HighlightBuilder().field("field1"))
.addSort("_uid", SortOrder.ASC) .addSort("_uid", SortOrder.ASC)
.get(); .get();
@ -285,7 +284,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
client().admin().indices().prepareRefresh().get(); client().admin().indices().prepareRefresh().get();
SearchResponse response = client().prepareSearch().setQuery( SearchResponse response = client().prepareSearch().setQuery(
QueryBuilders.percolatorQuery("query", "type", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}")) percolateQuery("query", "type", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}"))
).get(); ).get();
assertHitCount(response, 1); assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getId(), equalTo("2"));
@ -347,7 +346,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
BytesReference source = jsonBuilder().startObject().field("field", "value").endObject().bytes(); BytesReference source = jsonBuilder().startObject().field("field", "value").endObject().bytes();
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(percolatorQuery(queryFieldName, "doc_type", source)) .setQuery(percolateQuery(queryFieldName, "doc_type", source))
.setIndices("test1") .setIndices("test1")
.get(); .get();
assertHitCount(response, 1); assertHitCount(response, 1);
@ -356,7 +355,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
assertThat(response.getHits().getAt(0).index(), equalTo("test1")); assertThat(response.getHits().getAt(0).index(), equalTo("test1"));
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(percolatorQuery("object_field." + queryFieldName, "doc_type", source)) .setQuery(percolateQuery("object_field." + queryFieldName, "doc_type", source))
.setIndices("test2") .setIndices("test2")
.get(); .get();
assertHitCount(response, 1); assertHitCount(response, 1);

View File

@ -1,22 +1,27 @@
[[java-query-percolator-query]] [[java-query-percolate-query]]
==== Percolator query ==== Percolate query
See: See:
* {ref}/query-dsl-percolator-query.html[Percolator Query] * {ref}/query-dsl-percolate-query.html[Percolate Query]
[source,java] [source,java]
-------------------------------------------------- --------------------------------------------------
// create an index with a percolator field with the name 'query':
client.admin().indices().prepareCreate("myIndexName")
.addMapping("query", "query", "type=percolator")
.get();
//This is the query we're registering in the percolator //This is the query we're registering in the percolator
QueryBuilder qb = termQuery("content", "amazing"); QueryBuilder qb = termQuery("content", "amazing");
//Index the query = register it in the percolator //Index the query = register it in the percolator
client.prepareIndex("myIndexName", ".percolator", "myDesignatedQueryName") client.prepareIndex("myIndexName", "query", "myDesignatedQueryName")
.setSource(jsonBuilder() .setSource(jsonBuilder()
.startObject() .startObject()
.field("query", qb) // Register the query .field("query", qb) // Register the query
.endObject()) .endObject())
.setRefresh(true) // Needed when the query shall be available immediately .setRefresh(true) // Needed when the query shall be available immediately
.execute().actionGet(); .get();
-------------------------------------------------- --------------------------------------------------
This indexes the above term query under the name This indexes the above term query under the name
@ -35,7 +40,7 @@ docBuilder.endObject(); //End of the doc field
docBuilder.endObject(); //End of the JSON root object docBuilder.endObject(); //End of the JSON root object
// Percolate, by executing the percolator query in the query dsl: // Percolate, by executing the percolator query in the query dsl:
SearchResponse response = client().prepareSearch("myIndexName") SearchResponse response = client().prepareSearch("myIndexName")
.setQuery(QueryBuilders.percolatorQuery("myDocumentType", docBuilder.bytes())) .setQuery(QueryBuilders.percolateQuery("query", ""myDocumentType", docBuilder.bytes()))
.get(); .get();
//Iterate over the results //Iterate over the results
for(SearchHit hit : response.getHits()) { for(SearchHit hit : response.getHits()) {

View File

@ -27,5 +27,5 @@ include::template-query.asciidoc[]
include::script-query.asciidoc[] include::script-query.asciidoc[]
include::percolator-query.asciidoc[] include::percolate-query.asciidoc[]

View File

@ -58,7 +58,6 @@ Currently available <<modules-threadpool,thread pools>>:
|`index` |`i` |Thread pool used for <<docs-index_,index>>/<<docs-delete,delete>> operations |`index` |`i` |Thread pool used for <<docs-index_,index>>/<<docs-delete,delete>> operations
|`management` |`ma` |Thread pool used for management of Elasticsearch (e.g. cluster management) |`management` |`ma` |Thread pool used for management of Elasticsearch (e.g. cluster management)
|`force_merge` |`fm` |Thread pool used for <<indices-forcemerge,force merge>> operations |`force_merge` |`fm` |Thread pool used for <<indices-forcemerge,force merge>> operations
|`percolate` |`p` |Thread pool used for <<search-percolate,percolator>> operations
|`refresh` |`r` |Thread pool used for <<indices-refresh,refresh>> operations |`refresh` |`r` |Thread pool used for <<indices-refresh,refresh>> operations
|`search` |`s` |Thread pool used for <<search-search,search>>/<<search-count,count>> operations |`search` |`s` |Thread pool used for <<search-search,search>>/<<search-count,count>> operations
|`snapshot` |`sn` |Thread pool used for <<modules-snapshots,snapshot>> operations |`snapshot` |`sn` |Thread pool used for <<modules-snapshots,snapshot>> operations

View File

@ -93,8 +93,7 @@ curl 'localhost:9200/testidx/test/_mtermvectors' -d '{
-------------------------------------------------- --------------------------------------------------
Additionally, just like for the <<docs-termvectors,termvectors>> Additionally, just like for the <<docs-termvectors,termvectors>>
API, term vectors could be generated for user provided documents. The syntax API, term vectors could be generated for user provided documents. The mapping used is
is similar to the <<search-percolate,percolator>> API. The mapping used is
determined by `_index` and `_type`. determined by `_index` and `_type`.
[source,js] [source,js]

View File

@ -284,8 +284,7 @@ curl -XGET 'http://localhost:9200/twitter/tweet/1/_termvectors?pretty=true' -d '
-- --
Term vectors can also be generated for artificial documents, Term vectors can also be generated for artificial documents,
that is for documents not present in the index. The syntax is similar to the that is for documents not present in the index. For example, the following request would
<<search-percolate,percolator>> API. For example, the following request would
return the same results as in example 1. The mapping used is determined by the return the same results as in example 1. The mapping used is determined by the
`index` and `type`. `index` and `type`.

View File

@ -2,7 +2,7 @@
=== Percolator type === Percolator type
The `percolator` field type parses a json structure into a native query and The `percolator` field type parses a json structure into a native query and
stores that query, so that the <<query-dsl-percolator-query,percolator query>> stores that query, so that the <<query-dsl-percolate-query,percolate query>>
can use it to match provided documents. can use it to match provided documents.
Any field that contains a json object can be configured to be a percolator Any field that contains a json object can be configured to be a percolator
@ -73,7 +73,7 @@ fail.
[float] [float]
==== Important Notes ==== Important Notes
Because the percolator query is processing one document at a time, it doesn't support queries and filters that run Because the `percolate` query is processing one document at a time, it doesn't support queries and filters that run
against child documents such as `has_child` and `has_parent`. against child documents such as `has_child` and `has_parent`.
There are a number of queries that fetch data via a get call during query parsing. For example the `terms` query when There are a number of queries that fetch data via a get call during query parsing. For example the `terms` query when

View File

@ -4,7 +4,7 @@
==== Percolator is near-real time ==== Percolator is near-real time
Previously percolators were activated in real-time, i.e. as soon as they were Previously percolators were activated in real-time, i.e. as soon as they were
indexed. Now, changes to the percolator query are visible in near-real time, indexed. Now, changes to the `percolate` query are visible in near-real time,
as soon as the index has been refreshed. This change was required because, in as soon as the index has been refreshed. This change was required because, in
indices created from 5.0 onwards, the terms used in a percolator query are indices created from 5.0 onwards, the terms used in a percolator query are
automatically indexed to allow for more efficient query selection during automatically indexed to allow for more efficient query selection during
@ -13,7 +13,7 @@ percolation.
==== Percolate and multi percolator APIs ==== Percolate and multi percolator APIs
Percolator and multi percolate APIs have been deprecated and will be removed in the next major release. These APIs have Percolator and multi percolate APIs have been deprecated and will be removed in the next major release. These APIs have
been replaced by the `percolator` query that can be used in the search and multi search APIs. been replaced by the `percolate` query that can be used in the search and multi search APIs.
==== Percolator field mapping ==== Percolator field mapping
@ -26,10 +26,7 @@ but new indices no longer accept the `.percolator` type.
==== Percolate document mapping ==== Percolate document mapping
The `percolator` query can no longer accept documents that reference fields The `percolate` query no longer modifies the mappings. Before the percolate API
that don't already exist in the mapping. Before the percolate API allowed this.
The `percolator` query no longer modifies the mappings. Before the percolate API
could be used to dynamically introduce new fields to the mappings based on the could be used to dynamically introduce new fields to the mappings based on the
fields in the document being percolated. This no longer works, because these fields in the document being percolated. This no longer works, because these
unmapped fields are not persisted in the mapping. unmapped fields are not persisted in the mapping.
@ -44,9 +41,9 @@ document and are returned by search requests.
==== Percolating existing document ==== Percolating existing document
When percolating an existing document then also specifying a document as source in the When percolating an existing document then also specifying a document as source in the
`percolator` query is not allowed any more. Before the percolate API allowed and ignored `percolate` query is not allowed any more. Before the percolate API allowed and ignored
the existing document. the existing document.
==== Percolate Stats ==== Percolate Stats
Percolate stats have been replaced with percolator query cache stats in nodes stats and cluster stats APIs. Percolate stats have been replaced with `percolate` query cache stats in nodes stats and cluster stats APIs.

View File

@ -1,8 +1,8 @@
[[query-dsl-percolator-query]] [[query-dsl-percolate-query]]
=== Percolator Query === Percolate Query
The `percolator` query can be used to match queries The `percolate` query can be used to match queries
stored in an index. The `percolator` query itself stored in an index. The `percolate` query itself
contains the document that will be used as query contains the document that will be used as query
to match with the stored queries. to match with the stored queries.
@ -44,7 +44,7 @@ object that represents an actual Elasticsearch query. The
`query` field has been configured to use the `query` field has been configured to use the
<<percolator,percolator field type>>. This field type understands <<percolator,percolator field type>>. This field type understands
the query dsl and stored the query in such a way that it the query dsl and stored the query in such a way that it
can be used later on to match documents defined on the `percolator` query. can be used later on to match documents defined on the `percolate` query.
Register a query in the percolator: Register a query in the percolator:
@ -65,7 +65,7 @@ Match a document to the registered percolator queries:
-------------------------------------------------- --------------------------------------------------
curl -XGET 'localhost:9200/my-index/_search' -d '{ curl -XGET 'localhost:9200/my-index/_search' -d '{
"query" : { "query" : {
"percolator" : { "percolate" : {
"field" : "query", "field" : "query",
"document_type" : "doctype", "document_type" : "doctype",
"document" : { "document" : {
@ -110,7 +110,7 @@ The above request will yield the following response:
} }
-------------------------------------------------- --------------------------------------------------
<1> The percolate query with id `1` matches our document. <1> The query with id `1` matches our document.
[float] [float]
==== Parameters ==== Parameters
@ -123,7 +123,7 @@ The following parameters are required when percolating a document:
`document`:: The source of the document being percolated. `document`:: The source of the document being percolated.
Instead of specifying a the source of the document being percolated, the source can also be retrieved from an already Instead of specifying a the source of the document being percolated, the source can also be retrieved from an already
stored document. The `percolator` query will then internally execute a get request to fetch that document. stored document. The `percolate` query will then internally execute a get request to fetch that document.
In that case the `document` parameter can be substituted with the following parameters: In that case the `document` parameter can be substituted with the following parameters:
@ -138,7 +138,7 @@ In that case the `document` parameter can be substituted with the following para
[float] [float]
==== Percolating an Existing Document ==== Percolating an Existing Document
In order to percolate a newly indexed document, the `percolator` query can be used. Based on the response In order to percolate a newly indexed document, the `percolate` query can be used. Based on the response
from an index request, the `_id` and other meta information can be used to immediately percolate the newly added from an index request, the `_id` and other meta information can be used to immediately percolate the newly added
document. document.
@ -182,7 +182,7 @@ Percolating an existing document, using the index response as basis to build to
curl -XGET "http://localhost:9200/my-index/_search" -d' curl -XGET "http://localhost:9200/my-index/_search" -d'
{ {
"query" : { "query" : {
"percolator" : { "percolate" : {
"field": "query", "field": "query",
"document_type" : "doctype", "document_type" : "doctype",
"index" : "my-index", "index" : "my-index",
@ -201,10 +201,10 @@ case the then the search request would fail with a version conflict error.
The search response returned is identical as in the previous example. The search response returned is identical as in the previous example.
[float] [float]
==== Percolator and highlighting ==== Percolate query and highlighting
The percolator query is handled in a special way when it comes to highlighting. The percolator queries hits are used The `percolate` query is handled in a special way when it comes to highlighting. The queries hits are used
to highlight the document that is provided in the `percolator` query. Whereas with regular highlighting the query in to highlight the document that is provided in the `percolate` query. Whereas with regular highlighting the query in
the search request is used to highlight the hits. the search request is used to highlight the hits.
[float] [float]
@ -212,7 +212,7 @@ the search request is used to highlight the hits.
This example is based on the mapping of the first example. This example is based on the mapping of the first example.
Add a percolator query: Save a query:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -226,7 +226,7 @@ curl -XPUT "http://localhost:9200/my-index/queries/1" -d'
}' }'
-------------------------------------------------- --------------------------------------------------
Add another percolator query: Save another query:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -240,14 +240,14 @@ curl -XPUT "http://localhost:9200/my-index/queries/2" -d'
}' }'
-------------------------------------------------- --------------------------------------------------
Execute a search request with the `percolator` query and highlighting enabled: Execute a search request with the `percolate` query and highlighting enabled:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XGET "http://localhost:9200/my-index/_search" -d' curl -XGET "http://localhost:9200/my-index/_search" -d'
{ {
"query" : { "query" : {
"percolator" : { "percolate" : {
"field": "query", "field": "query",
"document_type" : "doctype", "document_type" : "doctype",
"document" : { "document" : {
@ -320,8 +320,8 @@ This will yield the following response.
} }
-------------------------------------------------- --------------------------------------------------
<1> Instead of the query in the search request highlighting the percolator hits, the percolator queries are highlighting Instead of the query in the search request highlighting the percolator hits, the percolator queries are highlighting
the document defined in the `percolator` query. the document defined in the `percolate` query.
[float] [float]
==== How it Works Under the Hood ==== How it Works Under the Hood

View File

@ -20,7 +20,7 @@ final query to execute.
This query allows a script to act as a filter. Also see the This query allows a script to act as a filter. Also see the
<<query-dsl-function-score-query,`function_score` query>>. <<query-dsl-function-score-query,`function_score` query>>.
<<query-dsl-percolator-query,`percolator` query>>:: <<query-dsl-percolate-query,`percolate` query>>::
This query finds queries that are stored as documents that match with This query finds queries that are stored as documents that match with
the specified document. the specified document.
@ -31,5 +31,5 @@ include::template-query.asciidoc[]
include::script-query.asciidoc[] include::script-query.asciidoc[]
include::percolator-query.asciidoc[] include::percolate-query.asciidoc[]

View File

@ -1,8 +1,8 @@
[[search-percolate]] [[search-percolate]]
== Percolator == Percolator
deprecated[5.0.0,Percolate and multi percolate APIs are deprecated and have been replaced by the new <<query-dsl-percolator-query,`percolator` query>>] deprecated[5.0.0,Percolate and multi percolate APIs are deprecated and have been replaced by the new <<query-dsl-percolate-query,`percolate` query>>]
added[5.0.0,Percolator query modifications only become visible after a refresh has occurred. Previously, they became visible immediately] added[5.0.0,Percolate query modifications only become visible after a refresh has occurred. Previously, they became visible immediately]
added[5.0.0,For indices created on or after version 5.0.0-alpha1 the percolator automatically indexes the query terms with the percolator queries. This allows the percolator to percolate documents more quickly. It is advisable to reindex any pre 5.0.0 indices to take advantage of this new optimization] added[5.0.0,For indices created on or after version 5.0.0-alpha1 the percolator automatically indexes the query terms with the percolator queries. This allows the percolator to percolate documents more quickly. It is advisable to reindex any pre 5.0.0 indices to take advantage of this new optimization]