From 40c22fc654f7b615baf877eb0c6b89bccc3f42cc Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 5 Apr 2016 11:53:28 +0200 Subject: [PATCH] percolator: removed .percolator type instead a field of type `percolator` should be configured before indexing percolator queries * Added an extra `field` parameter to the `percolator` query to indicate what percolator field should be used. This must be an existing field in the mapping of type `percolator`. * The `.percolator` type is now forbidden. (just like any type that starts with a `.`) This only applies for new indices created on 5.0 and later. Indices created on previous versions the .percolator type is still allowed to exist. The new `percolator` field type isn't active in such indices and the `PercolatorQueryCache` knows how to load queries from these legacy indices. The `PercolatorQueryBuilder` will not enforce that the `field` parameter is of type `percolator`. --- .../percolate/TransportPercolateAction.java | 16 +- .../index/mapper/MapperService.java | 47 +- .../percolator/ExtractQueryTermsService.java | 4 + .../percolator/PercolatorFieldMapper.java | 83 ++- .../PercolatorHighlightSubFetchPhase.java | 14 +- .../percolator/PercolatorQueryCache.java | 66 +- .../index/query/PercolatorQuery.java | 26 +- .../index/query/PercolatorQueryBuilder.java | 58 +- .../index/query/QueryBuilders.java | 14 +- .../PercolatorFieldMapperTests.java | 196 +++++- ...PercolatorHighlightSubFetchPhaseTests.java | 4 +- .../percolator/PercolatorQueryCacheTests.java | 49 +- .../query/PercolatorQueryBuilderTests.java | 132 ++-- .../index/query/PercolatorQueryTests.java | 16 +- .../indices/IndicesOptionsIntegrationIT.java | 10 +- .../percolator/MultiPercolatorIT.java | 127 ++-- .../percolator/PercolatorAggregationsIT.java | 32 +- .../PercolatorBackwardsCompatibilityIT.java | 2 +- .../percolator/PercolatorIT.java | 610 ++++++++++-------- .../percolator/PercolatorQuerySearchIT.java | 194 +++++- .../suggest/CompletionSuggestSearchIT.java | 4 +- .../ContextAndHeaderTransportIT.java | 9 +- docs/reference/mapping/types.asciidoc | 4 + .../mapping/types/percolator.asciidoc | 86 +++ .../migration/migrate_5_0/percolator.asciidoc | 11 +- .../query-dsl/percolator-query.asciidoc | 197 ++---- .../test/mpercolate/10_basic.yaml | 13 +- .../rest-api-spec/test/percolate/15_new.yaml | 8 +- .../test/percolate/16_existing_doc.yaml | 8 +- .../test/percolate/17_empty.yaml | 6 + .../percolate/18_highligh_with_query.yaml | 6 +- .../test/percolate/19_nested.yaml | 6 +- 32 files changed, 1336 insertions(+), 722 deletions(-) create mode 100644 docs/reference/mapping/types/percolator.asciidoc diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java index c9bb0c73fdc..ec587a2b33b 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java @@ -20,10 +20,13 @@ package org.elasticsearch.action.percolate; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -53,7 +56,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; public class TransportPercolateAction extends HandledTransportAction { @@ -194,7 +199,8 @@ public class TransportPercolateAction extends HandledTransportAction shardFailures = new ArrayList<>(searchResponse.getShardFailures().length); + for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { + shardFailures.add(new DefaultShardOperationFailedException(shardSearchFailure.index(), shardSearchFailure.shardId(), + shardSearchFailure.getCause())); + } + return new PercolateResponse( searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), searchResponse.getFailedShards(), - Arrays.asList(searchResponse.getShardFailures()), matches, hits.getTotalHits(), searchResponse.getTookInMillis(), (InternalAggregations) searchResponse.getAggregations() + shardFailures, matches, hits.getTotalHits(), searchResponse.getTookInMillis(), (InternalAggregations) searchResponse.getAggregations() ); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index ebd8587f05b..eb471cd818e 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -23,7 +23,9 @@ import com.carrotsearch.hppc.ObjectHashSet; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; +import org.apache.lucene.document.FieldType; import org.elasticsearch.ElasticsearchGenerationException; +import org.elasticsearch.Version; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; @@ -104,7 +106,6 @@ public class MapperService extends AbstractIndexComponent { private final boolean dynamic; private volatile String defaultMappingSource; - private volatile String defaultPercolatorMappingSource; private volatile Map mappers = emptyMap(); @@ -137,15 +138,6 @@ public class MapperService extends AbstractIndexComponent { this.mapperRegistry = mapperRegistry; this.dynamic = this.indexSettings.getValue(INDEX_MAPPER_DYNAMIC_SETTING); - defaultPercolatorMappingSource = "{\n" + - "\"_default_\":{\n" + - "\"properties\" : {\n" + - "\"query\" : {\n" + - "\"type\" : \"percolator\"\n" + - "}\n" + - "}\n" + - "}\n" + - "}"; if (index().getName().equals(ScriptService.SCRIPT_INDEX)){ defaultMappingSource = "{" + "\"_default_\": {" + @@ -160,7 +152,7 @@ public class MapperService extends AbstractIndexComponent { } if (logger.isTraceEnabled()) { - logger.trace("using dynamic[{}], default mapping source[{}], default percolator mapping source[{}]", dynamic, defaultMappingSource, defaultPercolatorMappingSource); + logger.trace("using dynamic[{}], default mapping source[{}]", dynamic, defaultMappingSource); } else if (logger.isDebugEnabled()) { logger.debug("using dynamic[{}]", dynamic); } @@ -288,6 +280,7 @@ public class MapperService extends AbstractIndexComponent { checkNestedFieldsLimit(fullPathObjectMappers); checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size()); checkDepthLimit(fullPathObjectMappers.keySet()); + checkPercolatorFieldLimit(fieldTypes); } Set parentTypes = this.parentTypes; @@ -337,7 +330,12 @@ public class MapperService extends AbstractIndexComponent { } private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) { - return mapper.type().startsWith(".") && !PercolatorFieldMapper.TYPE_NAME.equals(mapper.type()); + boolean legacyIndex = getIndexSettings().getIndexVersionCreated().before(Version.V_5_0_0_alpha1); + if (legacyIndex) { + return mapper.type().startsWith(".") && !PercolatorFieldMapper.LEGACY_TYPE_NAME.equals(mapper.type()); + } else { + return mapper.type().startsWith("."); + } } private boolean assertSerialization(DocumentMapper mapper) { @@ -445,13 +443,26 @@ public class MapperService extends AbstractIndexComponent { } } - public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { - String defaultMappingSource; - if (PercolatorFieldMapper.TYPE_NAME.equals(mappingType)) { - defaultMappingSource = this.defaultPercolatorMappingSource; - } else { - defaultMappingSource = this.defaultMappingSource; + /** + * We only allow upto 1 percolator field per index. + * + * Reasoning here is that the PercolatorQueryCache only supports a single document having a percolator query. + * Also specifying multiple queries per document feels like an anti pattern + */ + private void checkPercolatorFieldLimit(Iterable fieldTypes) { + List percolatorFieldTypes = new ArrayList<>(); + for (MappedFieldType fieldType : fieldTypes) { + if (fieldType instanceof PercolatorFieldMapper.PercolatorFieldType) { + percolatorFieldTypes.add(fieldType.name()); + } } + if (percolatorFieldTypes.size() > 1) { + throw new IllegalArgumentException("Up to one percolator field type is allowed per index, " + + "found the following percolator fields [" + percolatorFieldTypes + "]"); + } + } + + public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { return documentParser.parse(mappingType, mappingSource, applyDefault ? defaultMappingSource : null); } diff --git a/core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java b/core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java index b56ebfb05ef..2f48a0c6439 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java @@ -57,6 +57,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Objects; import java.util.Set; /** @@ -234,6 +235,9 @@ public final class ExtractQueryTermsService { * Creates a boolean query with a should clause for each term on all fields of the specified index reader. */ public static Query createQueryTermsQuery(IndexReader indexReader, String queryMetadataField, String unknownQueryField) throws IOException { + Objects.requireNonNull(queryMetadataField); + Objects.requireNonNull(unknownQueryField); + List extractedTerms = new ArrayList<>(); extractedTerms.add(new Term(unknownQueryField)); Fields fields = MultiFields.getFields(indexReader); diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java index 7d1e3b08939..07dbba79e91 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; @@ -48,53 +49,55 @@ import java.util.Map; public class PercolatorFieldMapper extends FieldMapper { - public static final String TYPE_NAME = ".percolator"; - public static final String NAME = "query"; + @Deprecated + public static final String LEGACY_TYPE_NAME = ".percolator"; public static final String CONTENT_TYPE = "percolator"; - public static final PercolatorFieldType FIELD_TYPE = new PercolatorFieldType(); + private static final PercolatorFieldType FIELD_TYPE = new PercolatorFieldType(); private static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; private static final String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; - private static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field"; - - public static final String EXTRACTED_TERMS_FULL_FIELD_NAME = NAME + "." + EXTRACTED_TERMS_FIELD_NAME; - public static final String UNKNOWN_QUERY_FULL_FIELD_NAME = NAME + "." + UNKNOWN_QUERY_FIELD_NAME; - public static final String QUERY_BUILDER_FULL_FIELD_NAME = NAME + "." + QUERY_BUILDER_FIELD_NAME; + static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field"; public static class Builder extends FieldMapper.Builder { private final QueryShardContext queryShardContext; - public Builder(QueryShardContext queryShardContext) { - super(NAME, FIELD_TYPE, FIELD_TYPE); + public Builder(String fieldName, QueryShardContext queryShardContext) { + super(fieldName, FIELD_TYPE, FIELD_TYPE); this.queryShardContext = queryShardContext; } @Override public PercolatorFieldMapper build(BuilderContext context) { - context.path().add(name); - KeywordFieldMapper extractedTermsField = createExtractQueryFieldBuilder(EXTRACTED_TERMS_FIELD_NAME).build(context); - KeywordFieldMapper unknownQueryField = createExtractQueryFieldBuilder(UNKNOWN_QUERY_FIELD_NAME).build(context); - BinaryFieldMapper queryBuilderField = createQueryBuilderFieldBuilder().build(context); + context.path().add(name()); + KeywordFieldMapper extractedTermsField = createExtractQueryFieldBuilder(EXTRACTED_TERMS_FIELD_NAME, context); + ((PercolatorFieldType) fieldType).queryTermsField = extractedTermsField.fieldType(); + KeywordFieldMapper unknownQueryField = createExtractQueryFieldBuilder(UNKNOWN_QUERY_FIELD_NAME, context); + ((PercolatorFieldType) fieldType).unknownQueryField = unknownQueryField.fieldType(); + BinaryFieldMapper queryBuilderField = createQueryBuilderFieldBuilder(context); + ((PercolatorFieldType) fieldType).queryBuilderField = queryBuilderField.fieldType(); context.path().remove(); - return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, queryShardContext, extractedTermsField, unknownQueryField, queryBuilderField); + setupFieldType(context); + return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(), + multiFieldsBuilder.build(this, context), copyTo, queryShardContext, extractedTermsField, + unknownQueryField, queryBuilderField); } - static KeywordFieldMapper.Builder createExtractQueryFieldBuilder(String name) { + static KeywordFieldMapper createExtractQueryFieldBuilder(String name, BuilderContext context) { KeywordFieldMapper.Builder queryMetaDataFieldBuilder = new KeywordFieldMapper.Builder(name); queryMetaDataFieldBuilder.docValues(false); queryMetaDataFieldBuilder.store(false); queryMetaDataFieldBuilder.indexOptions(IndexOptions.DOCS); - return queryMetaDataFieldBuilder; + return queryMetaDataFieldBuilder.build(context); } - static BinaryFieldMapper.Builder createQueryBuilderFieldBuilder() { + static BinaryFieldMapper createQueryBuilderFieldBuilder(BuilderContext context) { BinaryFieldMapper.Builder builder = new BinaryFieldMapper.Builder(QUERY_BUILDER_FIELD_NAME); builder.docValues(true); builder.indexOptions(IndexOptions.NONE); builder.store(false); builder.fieldType().setDocValuesType(DocValuesType.BINARY); - return builder; + return builder.build(context); } } @@ -102,21 +105,39 @@ public class PercolatorFieldMapper extends FieldMapper { @Override public Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - return new Builder(parserContext.queryShardContext()); + return new Builder(name, parserContext.queryShardContext()); } } - public static final class PercolatorFieldType extends MappedFieldType { + public static class PercolatorFieldType extends MappedFieldType { + + private MappedFieldType queryTermsField; + private MappedFieldType unknownQueryField; + private MappedFieldType queryBuilderField; public PercolatorFieldType() { - setName(NAME); setIndexOptions(IndexOptions.NONE); setDocValuesType(DocValuesType.NONE); setStored(false); } - public PercolatorFieldType(MappedFieldType ref) { + public PercolatorFieldType(PercolatorFieldType ref) { super(ref); + queryTermsField = ref.queryTermsField; + unknownQueryField = ref.unknownQueryField; + queryBuilderField = ref.queryBuilderField; + } + + public String getExtractedTermsField() { + return queryTermsField.name(); + } + + public String getUnknownQueryFieldName() { + return unknownQueryField.name(); + } + + public String getQueryBuilderFieldName() { + return queryBuilderField.name(); } @Override @@ -132,9 +153,9 @@ public class PercolatorFieldMapper extends FieldMapper { private final boolean mapUnmappedFieldAsString; private final QueryShardContext queryShardContext; - private final KeywordFieldMapper queryTermsField; - private final KeywordFieldMapper unknownQueryField; - private final BinaryFieldMapper queryBuilderField; + private KeywordFieldMapper queryTermsField; + private KeywordFieldMapper unknownQueryField; + private BinaryFieldMapper queryBuilderField; public PercolatorFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, QueryShardContext queryShardContext, @@ -151,6 +172,16 @@ public class PercolatorFieldMapper extends FieldMapper { @Override public Mapper parse(ParseContext context) throws IOException { QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext); + DocumentMapper documentMapper = queryShardContext.getMapperService().documentMapper(context.type()); + for (FieldMapper fieldMapper : documentMapper.mappers()) { + if (fieldMapper instanceof PercolatorFieldMapper) { + PercolatorFieldType fieldType = (PercolatorFieldType) fieldMapper.fieldType(); + if (context.doc().getField(fieldType.getQueryBuilderFieldName()) != null) { + throw new IllegalArgumentException("a document can only contain one percolator query"); + } + } + } + XContentParser parser = context.parser(); QueryBuilder queryBuilder = parseQueryBuilder(queryShardContext.newParseContext(parser), parser.getTokenLocation()); // Fetching of terms, shapes and indexed scripts happen during this rewrite: diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java index c1f9720b53b..71cf282d83f 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java @@ -30,8 +30,6 @@ import org.apache.lucene.search.Query; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.PercolatorQuery; import org.elasticsearch.search.SearchParseElement; @@ -80,20 +78,20 @@ public class PercolatorHighlightSubFetchPhase implements FetchSubPhase { createSubSearchContext(context, percolatorLeafReaderContext, percolatorQuery.getDocumentSource()); for (InternalSearchHit hit : hits) { - if (PercolatorFieldMapper.TYPE_NAME.equals(hit.getType())) { - LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs)); - Query query = queriesRegistry.getQueries(ctx).getQuery(hit.docId() - ctx.docBase); + LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs)); + int segmentDocId = hit.docId() - ctx.docBase; + Query query = queriesRegistry.getQueries(ctx).getQuery(segmentDocId); + if (query != null) { subSearchContext.parsedQuery(new ParsedQuery(query)); hitContext.reset( - new InternalSearchHit(0, "unknown", new Text(percolatorQuery.getDocumentType()), Collections.emptyMap()), - percolatorLeafReaderContext, 0, percolatorIndexSearcher + new InternalSearchHit(0, "unknown", new Text(percolatorQuery.getDocumentType()), Collections.emptyMap()), + percolatorLeafReaderContext, 0, percolatorIndexSearcher ); hitContext.cache().clear(); highlightPhase.hitExecute(subSearchContext, hitContext); hit.highlightFields().putAll(hitContext.hit().getHighlightFields()); } } - } @Override diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java index 78d8af06827..2d6f65d4e0b 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java @@ -28,7 +28,10 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -46,8 +49,10 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexWarmer; import org.elasticsearch.index.IndexWarmer.TerminationHandle; -import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine.Searcher; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.query.PercolatorQuery; @@ -63,6 +68,10 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.function.Supplier; +import static org.elasticsearch.index.percolator.PercolatorFieldMapper.LEGACY_TYPE_NAME; +import static org.elasticsearch.index.percolator.PercolatorFieldMapper.PercolatorFieldType; +import static org.elasticsearch.index.percolator.PercolatorFieldMapper.parseQuery; + public final class PercolatorQueryCache extends AbstractIndexComponent implements Closeable, LeafReader.CoreClosedListener, PercolatorQuery.QueryRegistry { @@ -107,7 +116,7 @@ public final class PercolatorQueryCache extends AbstractIndexComponent executor.execute(() -> { try { final long start = System.nanoTime(); - QueriesLeaf queries = loadQueries(ctx, indexShard.indexSettings().getIndexVersionCreated()); + QueriesLeaf queries = loadQueries(ctx, indexShard); cache.put(ctx.reader().getCoreCacheKey(), queries); if (indexShard.warmerService().logger().isTraceEnabled()) { indexShard.warmerService().logger().trace( @@ -127,7 +136,9 @@ public final class PercolatorQueryCache extends AbstractIndexComponent }; } - QueriesLeaf loadQueries(LeafReaderContext context, Version indexVersionCreated) throws IOException { + QueriesLeaf loadQueries(LeafReaderContext context, IndexShard indexShard) throws IOException { + Version indexVersionCreated = indexShard.indexSettings().getIndexVersionCreated(); + MapperService mapperService = indexShard.mapperService(); LeafReader leafReader = context.reader(); ShardId shardId = ShardUtils.extractShardId(leafReader); if (shardId == null) { @@ -135,29 +146,48 @@ public final class PercolatorQueryCache extends AbstractIndexComponent } if (indexSettings.getIndex().equals(shardId.getIndex()) == false) { // percolator cache insanity - String message = "Trying to load queries for index " + shardId.getIndex() + " with cache of index " + indexSettings.getIndex(); + String message = "Trying to load queries for index " + shardId.getIndex() + " with cache of index " + + indexSettings.getIndex(); throw new IllegalStateException(message); } IntObjectHashMap queries = new IntObjectHashMap<>(); boolean legacyLoading = indexVersionCreated.before(Version.V_5_0_0_alpha1); - PostingsEnum postings = leafReader.postings(new Term(TypeFieldMapper.NAME, PercolatorFieldMapper.TYPE_NAME), PostingsEnum.NONE); - if (postings != null) { - if (legacyLoading) { + if (legacyLoading) { + PostingsEnum postings = leafReader.postings(new Term(TypeFieldMapper.NAME, LEGACY_TYPE_NAME), PostingsEnum.NONE); + if (postings != null) { LegacyQueryFieldVisitor visitor = new LegacyQueryFieldVisitor(); for (int docId = postings.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = postings.nextDoc()) { leafReader.document(docId, visitor); queries.put(docId, parseLegacyPercolatorDocument(docId, visitor.source)); visitor.source = null; // reset } - } else { - BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(PercolatorFieldMapper.QUERY_BUILDER_FULL_FIELD_NAME); - if (binaryDocValues != null) { - for (int docId = postings.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = postings.nextDoc()) { - BytesRef queryBuilder = binaryDocValues.get(docId); - if (queryBuilder.length > 0) { - queries.put(docId, parseQueryBuilder(docId, queryBuilder)); + } + } else { + // Each type can have one percolator field mapper, + // So for each type we check if there is a percolator field mapper + // and parse all the queries for the documents of that type. + IndexSearcher indexSearcher = new IndexSearcher(leafReader); + for (DocumentMapper documentMapper : mapperService.docMappers(false)) { + Weight queryWeight = indexSearcher.createNormalizedWeight(documentMapper.typeFilter(), false); + for (FieldMapper fieldMapper : documentMapper.mappers()) { + if (fieldMapper instanceof PercolatorFieldMapper) { + PercolatorFieldType fieldType = (PercolatorFieldType) fieldMapper.fieldType(); + BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(fieldType.getQueryBuilderFieldName()); + if (binaryDocValues != null) { + // use the same leaf reader context the indexSearcher is using too: + Scorer scorer = queryWeight.scorer(leafReader.getContext()); + if (scorer != null) { + DocIdSetIterator iterator = scorer.iterator(); + for (int docId = iterator.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) { + BytesRef qbSource = binaryDocValues.get(docId); + if (qbSource.length > 0) { + queries.put(docId, parseQueryBuilder(docId, qbSource)); + } + } + } } + break; } } } @@ -166,11 +196,11 @@ public final class PercolatorQueryCache extends AbstractIndexComponent return new QueriesLeaf(shardId, queries); } - private Query parseQueryBuilder(int docId, BytesRef queryBuilder) { + private Query parseQueryBuilder(int docId, BytesRef qbSource) { XContent xContent = QUERY_BUILDER_CONTENT_TYPE.xContent(); - try (XContentParser sourceParser = xContent.createParser(queryBuilder.bytes, queryBuilder.offset, queryBuilder.length)) { + try (XContentParser sourceParser = xContent.createParser(qbSource.bytes, qbSource.offset, qbSource.length)) { QueryShardContext context = queryShardContextSupplier.get(); - return PercolatorFieldMapper.parseQuery(context, mapUnmappedFieldsAsString, sourceParser); + return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); } catch (IOException e) { throw new PercolatorException(index(), "failed to parse query builder for document [" + docId + "]", e); } @@ -189,7 +219,7 @@ public final class PercolatorQueryCache extends AbstractIndexComponent } else if (token == XContentParser.Token.START_OBJECT) { if ("query".equals(currentFieldName)) { QueryShardContext context = queryShardContextSupplier.get(); - return PercolatorFieldMapper.parseQuery(context, mapUnmappedFieldsAsString, sourceParser); + return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); } else { sourceParser.skipChildren(); } diff --git a/core/src/main/java/org/elasticsearch/index/query/PercolatorQuery.java b/core/src/main/java/org/elasticsearch/index/query/PercolatorQuery.java index a97009a91e0..70648663515 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PercolatorQuery.java +++ b/core/src/main/java/org/elasticsearch/index/query/PercolatorQuery.java @@ -23,9 +23,11 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SimpleCollector; @@ -56,22 +58,19 @@ public final class PercolatorQuery extends Query implements Accountable { private final IndexSearcher percolatorIndexSearcher; private Query queriesMetaDataQuery; - private final Query percolateTypeQuery; + private Query percolateTypeQuery; /** * @param docType The type of the document being percolated * @param queryRegistry The registry holding all the percolator queries as Lucene queries. * @param documentSource The source of the document being percolated * @param percolatorIndexSearcher The index searcher on top of the in-memory index that holds the document being percolated - * @param percolateTypeQuery A query that identifies all document containing percolator queries */ - public Builder(String docType, QueryRegistry queryRegistry, BytesReference documentSource, IndexSearcher percolatorIndexSearcher, - Query percolateTypeQuery) { + public Builder(String docType, QueryRegistry queryRegistry, BytesReference documentSource, IndexSearcher percolatorIndexSearcher) { this.docType = Objects.requireNonNull(docType); this.documentSource = Objects.requireNonNull(documentSource); this.percolatorIndexSearcher = Objects.requireNonNull(percolatorIndexSearcher); this.queryRegistry = Objects.requireNonNull(queryRegistry); - this.percolateTypeQuery = Objects.requireNonNull(percolateTypeQuery); } /** @@ -87,12 +86,27 @@ public final class PercolatorQuery extends Query implements Accountable { ); } + /** + * @param percolateTypeQuery A query that identifies all document containing percolator queries + */ + public void setPercolateTypeQuery(Query percolateTypeQuery) { + this.percolateTypeQuery = Objects.requireNonNull(percolateTypeQuery); + } + public PercolatorQuery build() { + if (percolateTypeQuery != null && queriesMetaDataQuery != null) { + throw new IllegalStateException("Either filter by deprecated percolator type or by query metadata"); + } + + // The query that selects which percolator queries will be evaluated by MemoryIndex: BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.add(percolateTypeQuery, FILTER); + if (percolateTypeQuery != null) { + builder.add(percolateTypeQuery, FILTER); + } if (queriesMetaDataQuery != null) { builder.add(queriesMetaDataQuery, FILTER); } + return new PercolatorQuery(docType, queryRegistry, documentSource, builder.build(), percolatorIndexSearcher); } diff --git a/core/src/main/java/org/elasticsearch/index/query/PercolatorQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/PercolatorQueryBuilder.java index 82286c1035e..a459131560f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PercolatorQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/PercolatorQueryBuilder.java @@ -53,6 +53,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; @@ -72,6 +73,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder { - mapperService.documentMapper(PercolatorFieldMapper.TYPE_NAME).parse("test", PercolatorFieldMapper.TYPE_NAME, "1", XContentFactory.jsonBuilder().startObject() - .field("query", termQuery("unmapped_field", "value")) + mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() + .field(fieldName, termQuery("unmapped_field", "value")) .endObject().bytes()); }); assertThat(exception.getCause(), instanceOf(QueryShardException.class)); @@ -77,13 +153,14 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { public void testPercolatorFieldMapper_noQuery() throws Exception { - ParsedDocument doc = mapperService.documentMapper(PercolatorFieldMapper.TYPE_NAME).parse("test", PercolatorFieldMapper.TYPE_NAME, "1", XContentFactory.jsonBuilder().startObject() + addQueryMapping(); + ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() .endObject().bytes()); - assertThat(doc.rootDoc().getFields(PercolatorFieldMapper.EXTRACTED_TERMS_FULL_FIELD_NAME).length, equalTo(0)); + assertThat(doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName()).length, equalTo(0)); try { - mapperService.documentMapper(PercolatorFieldMapper.TYPE_NAME).parse("test", PercolatorFieldMapper.TYPE_NAME, "1", XContentFactory.jsonBuilder().startObject() - .nullField("query") + mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() + .nullField(fieldName) .endObject().bytes()); } catch (MapperParsingException e) { assertThat(e.getDetailedMessage(), containsString("query malformed, must start with start_object")); @@ -91,18 +168,91 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { } public void testAllowNoAdditionalSettings() throws Exception { + addQueryMapping(); IndexService indexService = createIndex("test1", Settings.EMPTY); MapperService mapperService = indexService.mapperService(); - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(PercolatorFieldMapper.TYPE_NAME) - .startObject("properties").startObject("query").field("type", "percolator").field("index", "no").endObject().endObject() + String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) + .startObject("properties").startObject(fieldName).field("type", "percolator").field("index", "no").endObject().endObject() .endObject().endObject().string(); try { - mapperService.merge(PercolatorFieldMapper.TYPE_NAME, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); + mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); fail("MapperParsingException expected"); } catch (MapperParsingException e) { - assertThat(e.getMessage(), equalTo("Mapping definition for [query] has unsupported parameters: [index : no]")); + assertThat(e.getMessage(), equalTo("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); } } + // multiple percolator fields are allowed in the mapping, but only one field can be used at index time. + public void testMultiplePercolatorFields() throws Exception { + String typeName = "another_type"; + String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) + .startObject("properties") + .startObject("query_field1").field("type", "percolator").endObject() + .startObject("query_field2").field("type", "percolator").endObject() + .endObject() + .endObject().endObject().string(); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); + }); + assertThat(exception.getMessage(), equalTo("Up to one percolator field type is allowed per index, " + + "found the following percolator fields [[query_field1, query_field2]]")); + } + + // percolator field can be nested under an object field, but only one query can be specified per document + public void testNestedPercolatorField() throws Exception { + String typeName = "another_type"; + String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) + .startObject("properties") + .startObject("object_field") + .field("type", "object") + .startObject("properties") + .startObject("query_field").field("type", "percolator").endObject() + .endObject() + .endObject() + .endObject() + .endObject().endObject().string(); + mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); + + QueryBuilder queryBuilder = matchQuery("field", "value"); + ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", + jsonBuilder().startObject().startObject("object_field") + .field("query_field", queryBuilder) + .endObject().endObject().bytes() + ); + assertThat(doc.rootDoc().getFields().size(), equalTo(18)); // also includes all other meta fields + BytesRef queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); + assertQueryBuilder(queryBuilderAsBytes, queryBuilder); + + doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", + jsonBuilder().startObject() + .startArray("object_field") + .startObject().field("query_field", queryBuilder).endObject() + .endArray() + .endObject().bytes() + ); + assertThat(doc.rootDoc().getFields().size(), equalTo(18)); // also includes all other meta fields + queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); + assertQueryBuilder(queryBuilderAsBytes, queryBuilder); + + MapperParsingException e = expectThrows(MapperParsingException.class, () -> { + mapperService.documentMapper(typeName).parse("test", typeName, "1", + jsonBuilder().startObject() + .startArray("object_field") + .startObject().field("query_field", queryBuilder).endObject() + .startObject().field("query_field", queryBuilder).endObject() + .endArray() + .endObject().bytes() + ); + } + ); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(e.getCause().getMessage(), equalTo("a document can only contain one percolator query")); + } + + private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws IOException { + XContentParser sourceParser = QUERY_BUILDER_CONTENT_TYPE.xContent().createParser(actual.bytes, actual.offset, actual.length); + QueryParseContext qsc = indexService.newQueryShardContext().newParseContext(sourceParser); + assertThat(qsc.parseInnerQueryBuilder(), equalTo(expected)); + } } diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java index 5e37f1ce6e8..db2cbea8947 100644 --- a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java +++ b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java @@ -42,7 +42,7 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase { public void testHitsExecutionNeeded() { PercolatorQuery percolatorQuery = new PercolatorQuery.Builder("", ctx -> null, new BytesArray("{}"), - Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery()) + Mockito.mock(IndexSearcher.class)) .build(); PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(null); @@ -61,7 +61,7 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase { public void testLocatePercolatorQuery() { PercolatorQuery percolatorQuery = new PercolatorQuery.Builder("", ctx -> null, new BytesArray("{}"), - Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery()) + Mockito.mock(IndexSearcher.class)) .build(); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(new MatchAllDocsQuery()), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java index f30f880cc1a..63ebc0ea192 100644 --- a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.index.percolator; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.analysis.core.SimpleAnalyzer; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -56,6 +58,8 @@ import org.elasticsearch.index.IndexWarmer; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.DocumentFieldMappers; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; @@ -134,6 +138,7 @@ public class PercolatorQueryCacheTests extends ESTestCase { boolean legacyFormat = randomBoolean(); Version version = legacyFormat ? Version.V_2_0_0 : Version.CURRENT; + IndexShard indexShard = mockIndexShard(version, legacyFormat); storeQuery("0", indexWriter, termQuery("field1", "value1"), true, legacyFormat); storeQuery("1", indexWriter, wildcardQuery("field1", "v*"), true, legacyFormat); @@ -161,7 +166,7 @@ public class PercolatorQueryCacheTests extends ESTestCase { initialize("field1", "type=keyword", "field2", "type=keyword", "field3", "type=keyword"); - PercolatorQueryCache.QueriesLeaf leaf = cache.loadQueries(indexReader.leaves().get(0), version); + PercolatorQueryCache.QueriesLeaf leaf = cache.loadQueries(indexReader.leaves().get(0), indexShard); assertThat(leaf.queries.size(), equalTo(5)); assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("field1", "value1")))); assertThat(leaf.getQuery(1), equalTo(new WildcardQuery(new Term("field1", "v*")))); @@ -212,7 +217,7 @@ public class PercolatorQueryCacheTests extends ESTestCase { assertThat(e.getMessage(), equalTo("queries not loaded, queries should be have been preloaded during index warming...")); } - IndexShard indexShard = mockIndexShard(); + IndexShard indexShard = mockIndexShard(Version.CURRENT, false); ThreadPool threadPool = mockThreadPool(); IndexWarmer.Listener listener = cache.createListener(threadPool); listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); @@ -259,7 +264,7 @@ public class PercolatorQueryCacheTests extends ESTestCase { initialize("a", "type=keyword"); - IndexShard indexShard = mockIndexShard(); + IndexShard indexShard = mockIndexShard(Version.CURRENT, false); ThreadPool threadPool = mockThreadPool(); IndexWarmer.Listener listener = cache.createListener(threadPool); listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); @@ -312,7 +317,11 @@ public class PercolatorQueryCacheTests extends ESTestCase { Document doc = new Document(); doc.add(new StringField("id", id, Field.Store.NO)); if (typeField) { - doc.add(new StringField(TypeFieldMapper.NAME, PercolatorFieldMapper.TYPE_NAME, Field.Store.NO)); + if (legacy) { + doc.add(new StringField(TypeFieldMapper.NAME, PercolatorFieldMapper.LEGACY_TYPE_NAME, Field.Store.NO)); + } else { + doc.add(new StringField(TypeFieldMapper.NAME, "query", Field.Store.NO)); + } } if (legacy) { BytesReference percolatorQuery = XContentFactory.jsonBuilder().startObject() @@ -326,12 +335,12 @@ public class PercolatorQueryCacheTests extends ESTestCase { BytesRef queryBuilderAsBytes = new BytesRef( XContentFactory.contentBuilder(PercolatorQueryCache.QUERY_BUILDER_CONTENT_TYPE).value(queryBuilder).bytes().toBytes() ); - doc.add(new BinaryDocValuesField(PercolatorFieldMapper.QUERY_BUILDER_FULL_FIELD_NAME, queryBuilderAsBytes)); + doc.add(new BinaryDocValuesField(PercolatorFieldMapper.QUERY_BUILDER_FIELD_NAME, queryBuilderAsBytes)); } indexWriter.addDocument(doc); } - IndexShard mockIndexShard() { + IndexShard mockIndexShard(Version version, boolean legacyFormat) { IndexShard indexShard = mock(IndexShard.class); ShardIndexWarmerService shardIndexWarmerService = mock(ShardIndexWarmerService.class); when(shardIndexWarmerService.logger()).thenReturn(logger); @@ -340,11 +349,37 @@ public class PercolatorQueryCacheTests extends ESTestCase { IndexMetaData.builder("_index").settings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_VERSION_CREATED, version) ).build(), Settings.EMPTY ); when(indexShard.indexSettings()).thenReturn(indexSettings); + + PercolatorFieldMapper.PercolatorFieldType fieldType = mock(PercolatorFieldMapper.PercolatorFieldType.class); + when(fieldType.name()).thenReturn("query"); + when(fieldType.getQueryBuilderFieldName()).thenReturn(PercolatorFieldMapper.QUERY_BUILDER_FIELD_NAME); + PercolatorFieldMapper percolatorFieldMapper = mock(PercolatorFieldMapper.class); + when(percolatorFieldMapper.fieldType()).thenReturn(fieldType); + MapperService mapperService = mock(MapperService.class); + DocumentMapper documentMapper = mock(DocumentMapper.class); + if (legacyFormat) { + when(documentMapper.type()).thenReturn(PercolatorFieldMapper.LEGACY_TYPE_NAME); + when(documentMapper.typeFilter()) + .thenReturn(new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorFieldMapper.LEGACY_TYPE_NAME))); + } else { + when(documentMapper.type()).thenReturn("query"); + when(documentMapper.typeFilter()).thenReturn(new TermQuery(new Term(TypeFieldMapper.NAME, "query"))); + } + + Analyzer analyzer = new SimpleAnalyzer(); + DocumentFieldMappers documentFieldMappers = + new DocumentFieldMappers(Collections.singleton(percolatorFieldMapper), analyzer, analyzer, analyzer); + when(documentMapper.mappers()).thenReturn(documentFieldMappers); + + when(mapperService.docMappers(false)).thenReturn(Collections.singleton(documentMapper)); + + when(indexShard.mapperService()).thenReturn(mapperService); + return indexShard; } diff --git a/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryBuilderTests.java index 4009d37a0a3..976fb6fbfae 100644 --- a/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryBuilderTests.java @@ -24,29 +24,37 @@ import com.fasterxml.jackson.core.JsonParseException; import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.script.Script; import org.hamcrest.Matchers; +import org.junit.BeforeClass; import java.io.IOException; import java.util.Collections; import java.util.Set; -import static java.util.Collections.singleton; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class PercolatorQueryBuilderTests extends AbstractQueryTestCase { - private static final Set SHUFFLE_PROTECTED_FIELDS = singleton(PercolatorQueryBuilder.DOCUMENT_FIELD.getPreferredName()); + private static final Set SHUFFLE_PROTECTED_FIELDS = + Collections.singleton(PercolatorQueryBuilder.DOCUMENT_FIELD.getPreferredName()); + + private static String queryField; + private static String docType; + private String indexedDocumentIndex; private String indexedDocumentType; private String indexedDocumentId; @@ -57,13 +65,25 @@ public class PercolatorQueryBuilderTests extends AbstractQueryTestCase pqb.toQuery(createShardContext())); + assertThat(e.getMessage(), equalTo("query builder must be rewritten first")); QueryBuilder rewrite = pqb.rewrite(createShardContext()); - PercolatorQueryBuilder geoShapeQueryBuilder = new PercolatorQueryBuilder(pqb.getDocumentType(), documentSource); + PercolatorQueryBuilder geoShapeQueryBuilder = new PercolatorQueryBuilder(pqb.getField(), pqb.getDocumentType(), documentSource); assertEquals(geoShapeQueryBuilder, rewrite); } public void testIndexedDocumentDoesNotExist() throws IOException { indexedDocumentExists = false; PercolatorQueryBuilder pqb = doCreateTestQueryBuilder(true); - try { - pqb.rewrite(createShardContext()); - fail("ResourceNotFoundException expected"); - } catch (ResourceNotFoundException e) { - String expectedString = "indexed document [" + indexedDocumentIndex + "/" + indexedDocumentType + "/" + - indexedDocumentId + "] couldn't be found"; - assertThat(e.getMessage() , equalTo(expectedString)); - } + ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> pqb.rewrite(createShardContext())); + String expectedString = "indexed document [" + indexedDocumentIndex + "/" + indexedDocumentType + "/" + + indexedDocumentId + "] couldn't be found"; + assertThat(e.getMessage() , equalTo(expectedString)); } // overwrite this test, because adding bogus field to the document part is valid and that would make the test fail @@ -172,51 +184,47 @@ public class PercolatorQueryBuilderTests extends AbstractQueryTestCase { + QueryBuilders.percolatorQuery(null, null, new BytesArray("{}")); + }); + assertThat(e.getMessage(), equalTo("[field] is a required argument")); + + e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.percolatorQuery("_field", null, new BytesArray("{}"))); + assertThat(e.getMessage(), equalTo("[document_type] is a required argument")); + + e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.percolatorQuery("_field", "_document_type", null)); + assertThat(e.getMessage(), equalTo("[document] is a required argument")); + + e = expectThrows(IllegalArgumentException.class, () -> { + QueryBuilders.percolatorQuery(null, null, "_index", "_type", "_id", null, null, null); + }); + assertThat(e.getMessage(), equalTo("[field] is a required argument")); + + e = expectThrows(IllegalArgumentException.class, () -> { + QueryBuilders.percolatorQuery("_field", null, "_index", "_type", "_id", null, null, null); + }); + assertThat(e.getMessage(), equalTo("[document_type] is a required argument")); + + e = expectThrows(IllegalArgumentException.class, () -> { + QueryBuilders.percolatorQuery("_field", "_document_type", null, "_type", "_id", null, null, null); + }); + assertThat(e.getMessage(), equalTo("[index] is a required argument")); + + e = expectThrows(IllegalArgumentException.class, () -> { + QueryBuilders.percolatorQuery("_field", "_document_type", "_index", null, "_id", null, null, null); + }); + assertThat(e.getMessage(), equalTo("[type] is a required argument")); + + e = expectThrows(IllegalArgumentException.class, () -> { + QueryBuilders.percolatorQuery("_field", "_document_type", "_index", "_type", null, null, null, null); + }); + assertThat(e.getMessage(), equalTo("[id] is a required argument")); } public void testFromJsonNoDocumentType() throws IOException { - try { - parseQuery("{\"percolator\" : { \"document\": {}}"); - fail("IllegalArgumentException expected"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("[percolator] query is missing required [document_type] parameter")); - } + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parseQuery("{\"percolator\" : { \"document\": {}}")); + assertThat(e.getMessage(), equalTo("[percolator] query is missing required [document_type] parameter")); } private static BytesReference randomSource() { diff --git a/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryTests.java b/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryTests.java index af8ad994b7a..7ed947c7db0 100644 --- a/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/PercolatorQueryTests.java @@ -54,6 +54,7 @@ import org.apache.lucene.store.Directory; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.percolator.ExtractQueryTermsService; import org.elasticsearch.index.percolator.PercolatorFieldMapper; @@ -147,8 +148,7 @@ public class PercolatorQueryTests extends ESTestCase { "docType", queryRegistry, new BytesArray("{}"), - percolateSearcher, - new MatchAllDocsQuery() + percolateSearcher ); builder.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); // no scoring, wrapping it in a constant score query: @@ -222,8 +222,7 @@ public class PercolatorQueryTests extends ESTestCase { "docType", queryRegistry, new BytesArray("{}"), - percolateSearcher, - new MatchAllDocsQuery() + percolateSearcher ); builder.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); Query query = builder.build(); @@ -326,7 +325,7 @@ public class PercolatorQueryTests extends ESTestCase { ParseContext.Document document = new ParseContext.Document(); ExtractQueryTermsService.extractQueryTerms(query, document, EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME, EXTRACTED_TERMS_FIELD_TYPE); - document.add(new StoredField(UidFieldMapper.NAME, Uid.createUid(PercolatorFieldMapper.TYPE_NAME, id))); + document.add(new StoredField(UidFieldMapper.NAME, Uid.createUid(PercolatorFieldMapper.LEGACY_TYPE_NAME, id))); assert extraFields.length % 2 == 0; for (int i = 0; i < extraFields.length; i++) { document.add(new StringField(extraFields[i], extraFields[++i], Field.Store.NO)); @@ -340,8 +339,7 @@ public class PercolatorQueryTests extends ESTestCase { "docType", queryRegistry, new BytesArray("{}"), - percolateSearcher, - new MatchAllDocsQuery() + percolateSearcher ); // enables the optimization that prevents queries from being evaluated that don't match builder1.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); @@ -351,9 +349,9 @@ public class PercolatorQueryTests extends ESTestCase { "docType", queryRegistry, new BytesArray("{}"), - percolateSearcher, - new MatchAllDocsQuery() + percolateSearcher ); + builder2.setPercolateTypeQuery(new MatchAllDocsQuery()); TopDocs topDocs2 = shardSearcher.search(builder2.build(), 10); assertThat(topDocs1.totalHits, equalTo(topDocs2.totalHits)); assertThat(topDocs1.scoreDocs.length, equalTo(topDocs2.scoreDocs.length)); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 10bdbeb887b..3be16393e2a 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -75,7 +75,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { } public void testSpecifiedIndexUnavailableMultipleIndices() throws Exception { - createIndex("test1"); + assertAcked(prepareCreate("test1").addMapping("query", "query", "type=percolator")); ensureYellow(); // Verify defaults @@ -136,7 +136,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(getSettings("test1", "test2").setIndicesOptions(options), false); options = IndicesOptions.strictExpandOpen(); - assertAcked(prepareCreate("test2")); + assertAcked(prepareCreate("test2").addMapping("query", "query", "type=percolator")); ensureYellow(); verify(search("test1", "test2").setIndicesOptions(options), false); verify(msearch(options, "test1", "test2").setIndicesOptions(options), false); @@ -158,7 +158,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { } public void testSpecifiedIndexUnavailableSingleIndexThatIsClosed() throws Exception { - assertAcked(prepareCreate("test1")); + assertAcked(prepareCreate("test1").addMapping("query", "query", "type=percolator")); // we need to wait until all shards are allocated since recovery from // gateway will fail unless the majority of the replicas was allocated // pre-closing. with lots of replicas this will fail. @@ -264,7 +264,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(getMapping("test1").setIndicesOptions(options), false); verify(getSettings("test1").setIndicesOptions(options), false); - assertAcked(prepareCreate("test1")); + assertAcked(prepareCreate("test1").addMapping("query", "query", "type=percolator")); ensureYellow(); options = IndicesOptions.strictExpandOpenAndForbidClosed(); @@ -357,7 +357,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(getMapping(indices).setIndicesOptions(options), false); verify(getSettings(indices).setIndicesOptions(options), false); - assertAcked(prepareCreate("foobar")); + assertAcked(prepareCreate("foobar").addMapping("query", "query", "type=percolator")); client().prepareIndex("foobar", "type", "1").setSource("k", "v").setRefresh(true).execute().actionGet(); // Verify defaults for wildcards, with one wildcard expression and one existing index diff --git a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java index 2d22a133811..6ec9ab24960 100644 --- a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.client.Requests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; @@ -55,97 +54,105 @@ import static org.hamcrest.Matchers.nullValue; /** */ public class MultiPercolatorIT extends ESIntegTestCase { + + private final static String INDEX_NAME = "queries"; + private final static String TYPE_NAME = "query"; + public void testBasics() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text")); + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("type", "field1", "type=text")); ensureGreen(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); MultiPercolateResponse response = client().prepareMultiPercolate() .add(client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))) .add(client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject()))) .add(client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject()))) .add(client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject()))) .add(client().preparePercolate() // non existing doc, so error element - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("5"))) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("5"))) .execute().actionGet(); MultiPercolateResponse.Item item = response.getItems()[0]; assertMatchCount(item.getResponse(), 2L); assertThat(item.getResponse().getMatches(), arrayWithSize(2)); assertThat(item.getErrorMessage(), nullValue()); - assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("1", "4")); + assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "4")); item = response.getItems()[1]; assertThat(item.getErrorMessage(), nullValue()); assertMatchCount(item.getResponse(), 2L); assertThat(item.getResponse().getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("2", "4")); + assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4")); item = response.getItems()[2]; assertThat(item.getErrorMessage(), nullValue()); assertMatchCount(item.getResponse(), 4L); - assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4")); + assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4")); item = response.getItems()[3]; assertThat(item.getErrorMessage(), nullValue()); assertMatchCount(item.getResponse(), 1L); assertThat(item.getResponse().getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContaining("4")); + assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContaining("4")); item = response.getItems()[4]; assertThat(item.getResponse(), nullValue()); assertThat(item.getErrorMessage(), notNullValue()); - assertThat(item.getErrorMessage(), containsString("[test/type/5] doesn't exist")); + assertThat(item.getErrorMessage(), containsString("[" + INDEX_NAME + "/type/5] doesn't exist")); } public void testWithRouting() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text")); + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("type", "field1", "type=text")); ensureGreen(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setRouting("a") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setRouting("a") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "3") .setRouting("a") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "4") .setRouting("a") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); @@ -153,69 +160,69 @@ public class MultiPercolatorIT extends ESIntegTestCase { MultiPercolateResponse response = client().prepareMultiPercolate() .add(client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setRouting("a") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))) .add(client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setRouting("a") .setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject()))) .add(client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setRouting("a") .setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject()))) .add(client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setRouting("a") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject()))) .add(client().preparePercolate() // non existing doc, so error element - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setRouting("a") - .setGetRequest(Requests.getRequest("test").type("type").id("5"))) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("5"))) .execute().actionGet(); MultiPercolateResponse.Item item = response.getItems()[0]; assertMatchCount(item.getResponse(), 2L); assertThat(item.getResponse().getMatches(), arrayWithSize(2)); assertThat(item.getErrorMessage(), nullValue()); - assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("1", "4")); + assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "4")); item = response.getItems()[1]; assertThat(item.getErrorMessage(), nullValue()); assertMatchCount(item.getResponse(), 2L); assertThat(item.getResponse().getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("2", "4")); + assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4")); item = response.getItems()[2]; assertThat(item.getErrorMessage(), nullValue()); assertMatchCount(item.getResponse(), 4L); - assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4")); + assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4")); item = response.getItems()[3]; assertThat(item.getErrorMessage(), nullValue()); assertMatchCount(item.getResponse(), 1L); assertThat(item.getResponse().getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContaining("4")); + assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContaining("4")); item = response.getItems()[4]; assertThat(item.getResponse(), nullValue()); assertThat(item.getErrorMessage(), notNullValue()); - assertThat(item.getErrorMessage(), containsString("[test/type/5] doesn't exist")); + assertThat(item.getErrorMessage(), containsString("[" + INDEX_NAME + "/type/5] doesn't exist")); } public void testExistingDocsOnly() throws Exception { - createIndex("test"); + prepareCreate(INDEX_NAME).addMapping(TYPE_NAME, "query", "type=percolator").get(); int numQueries = randomIntBetween(50, 100); logger.info("--> register a queries"); for (int i = 0; i < numQueries; i++) { - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) + client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); } - client().prepareIndex("test", "type", "1") + client().prepareIndex(INDEX_NAME, "type", "1") .setSource(jsonBuilder().startObject().field("field", "a")) .execute().actionGet(); refresh(); @@ -225,8 +232,8 @@ public class MultiPercolatorIT extends ESIntegTestCase { for (int i = 0; i < numPercolateRequest; i++) { builder.add( client().preparePercolate() - .setGetRequest(Requests.getRequest("test").type("type").id("1")) - .setIndices("test").setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1")) + .setIndices(INDEX_NAME).setDocumentType("type") .setSize(numQueries) ); } @@ -244,8 +251,9 @@ public class MultiPercolatorIT extends ESIntegTestCase { for (int i = 0; i < numPercolateRequest; i++) { builder.add( client().preparePercolate() - .setGetRequest(Requests.getRequest("test").type("type").id("2")) - .setIndices("test").setDocumentType("type").setSize(numQueries) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2")) + .setIndices(INDEX_NAME).setDocumentType("type").setSize(numQueries) + ); } @@ -262,14 +270,14 @@ public class MultiPercolatorIT extends ESIntegTestCase { for (int i = 0; i < numPercolateRequest; i++) { builder.add( client().preparePercolate() - .setGetRequest(Requests.getRequest("test").type("type").id("2")) - .setIndices("test").setDocumentType("type").setSize(numQueries) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2")) + .setIndices(INDEX_NAME).setDocumentType("type").setSize(numQueries) ); } builder.add( client().preparePercolate() - .setGetRequest(Requests.getRequest("test").type("type").id("1")) - .setIndices("test").setDocumentType("type").setSize(numQueries) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1")) + .setIndices(INDEX_NAME).setDocumentType("type").setSize(numQueries) ); response = builder.execute().actionGet(); @@ -280,13 +288,13 @@ public class MultiPercolatorIT extends ESIntegTestCase { } public void testWithDocsOnly() throws Exception { - createIndex("test"); + prepareCreate(INDEX_NAME).addMapping(TYPE_NAME, "query", "type=percolator").get(); ensureGreen(); int numQueries = randomIntBetween(50, 100); logger.info("--> register a queries"); for (int i = 0; i < numQueries; i++) { - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) + client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); } @@ -297,7 +305,7 @@ public class MultiPercolatorIT extends ESIntegTestCase { for (int i = 0; i < numPercolateRequest; i++) { builder.add( client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSize(numQueries) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field", "a").endObject()))); } @@ -315,7 +323,7 @@ public class MultiPercolatorIT extends ESIntegTestCase { for (int i = 0; i < numPercolateRequest; i++) { builder.add( client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSource("illegal json")); } @@ -331,13 +339,13 @@ public class MultiPercolatorIT extends ESIntegTestCase { for (int i = 0; i < numPercolateRequest; i++) { builder.add( client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSource("illegal json")); } builder.add( client().preparePercolate() .setSize(numQueries) - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field", "a").endObject()))); response = builder.execute().actionGet(); @@ -350,8 +358,8 @@ public class MultiPercolatorIT extends ESIntegTestCase { public void testNestedMultiPercolation() throws IOException { initNestedIndexAndPercolation(); MultiPercolateRequestBuilder mpercolate= client().prepareMultiPercolate(); - mpercolate.add(client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getNotMatchingNestedDoc())).setIndices("nestedindex").setDocumentType("company")); - mpercolate.add(client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getMatchingNestedDoc())).setIndices("nestedindex").setDocumentType("company")); + mpercolate.add(client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getNotMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company")); + mpercolate.add(client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company")); MultiPercolateResponse response = mpercolate.get(); assertEquals(response.getItems()[0].getResponse().getMatches().length, 0); assertEquals(response.getItems()[1].getResponse().getMatches().length, 1); @@ -361,23 +369,24 @@ public class MultiPercolatorIT extends ESIntegTestCase { public void testStartTimeIsPropagatedToShardRequests() throws Exception { // See: https://github.com/elastic/elasticsearch/issues/15908 internalCluster().ensureAtLeastNumDataNodes(2); - client().admin().indices().prepareCreate("test") + client().admin().indices().prepareCreate(INDEX_NAME) .setSettings(Settings.builder() .put("index.number_of_shards", 1) .put("index.number_of_replicas", 1) ) .addMapping("type", "date_field", "type=date,format=strict_date_optional_time||epoch_millis") + .addMapping(TYPE_NAME, "query", "type=percolator") .get(); ensureGreen(); - client().prepareIndex("test", ".percolator", "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", rangeQuery("date_field").lt("now+90d")).endObject()) .setRefresh(true) .get(); for (int i = 0; i < 32; i++) { MultiPercolateResponse response = client().prepareMultiPercolate() - .add(client().preparePercolate().setDocumentType("type").setIndices("test") + .add(client().preparePercolate().setDocumentType("type").setIndices(INDEX_NAME) .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("date_field", "2015-07-21T10:28:01-07:00"))) .get(); assertThat(response.getItems()[0].getResponse().getCount(), equalTo(1L)); @@ -392,10 +401,12 @@ public class MultiPercolatorIT extends ESIntegTestCase { .startObject("name").field("type", "text").endObject().endObject().endObject().endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate("nestedindex").addMapping("company", mapping)); - ensureGreen("nestedindex"); + assertAcked(client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("company", mapping)); + ensureGreen(INDEX_NAME); - client().prepareIndex("nestedindex", PercolatorFieldMapper.TYPE_NAME, "Q").setSource(jsonBuilder().startObject() + client().prepareIndex(INDEX_NAME, TYPE_NAME, "Q").setSource(jsonBuilder().startObject() .field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND), ScoreMode.Avg)).endObject()).get(); refresh(); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java index 8cf529a130a..167110f0333 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java @@ -52,9 +52,14 @@ import static org.hamcrest.Matchers.notNullValue; */ public class PercolatorAggregationsIT extends ESIntegTestCase { + private final static String INDEX_NAME = "queries"; + private final static String TYPE_NAME = "query"; + // Just test the integration with facets and aggregations, not the facet and aggregation functionality! public void testAggregations() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text", "field2", "type=keyword")); + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("type", "field1", "type=text", "field2", "type=keyword")); ensureGreen(); int numQueries = scaledRandomIntBetween(250, 500); @@ -70,7 +75,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { String value = values[i % numUniqueQueries]; expectedCount[i % numUniqueQueries]++; QueryBuilder queryBuilder = matchQuery("field1", value); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) + client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", queryBuilder).field("field2", "b").endObject()).execute() .actionGet(); } @@ -79,7 +84,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { for (int i = 0; i < numQueries; i++) { String value = values[i % numUniqueQueries]; PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate() - .setIndices("test") + .setIndices(INDEX_NAME) .setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject())) .setSize(expectedCount[i % numUniqueQueries]); @@ -119,7 +124,9 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { // Just test the integration with facets and aggregations, not the facet and aggregation functionality! public void testAggregationsAndPipelineAggregations() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "field1", "type=text", "field2", "type=keyword")); + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("type", "field1", "type=text", "field2", "type=keyword")); ensureGreen(); int numQueries = scaledRandomIntBetween(250, 500); @@ -135,7 +142,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { String value = values[i % numUniqueQueries]; expectedCount[i % numUniqueQueries]++; QueryBuilder queryBuilder = matchQuery("field1", value); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) + client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", queryBuilder).field("field2", "b").endObject()).execute() .actionGet(); } @@ -144,7 +151,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { for (int i = 0; i < numQueries; i++) { String value = values[i % numUniqueQueries]; PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate() - .setIndices("test") + .setIndices(INDEX_NAME) .setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject())) .setSize(expectedCount[i % numUniqueQueries]); @@ -193,9 +200,11 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { } public void testSignificantAggs() throws Exception { - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .execute().actionGet(); ensureGreen(); - PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate().setIndices("test").setDocumentType("type") + PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "value").endObject())) .addAggregation(AggregationBuilders.significantTerms("a").field("field2")); PercolateResponse response = percolateRequestBuilder.get(); @@ -203,7 +212,8 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { } public void testSingleShardAggregations() throws Exception { - assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1)) + assertAcked(prepareCreate(INDEX_NAME).setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1)) + .addMapping(TYPE_NAME, "query", "type=percolator") .addMapping("type", "field1", "type=text", "field2", "type=keyword")); ensureGreen(); @@ -213,7 +223,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { for (int i = 0; i < numQueries; i++) { String value = "value0"; QueryBuilder queryBuilder = matchQuery("field1", value); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) + client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", queryBuilder).field("field2", i % 3 == 0 ? "b" : "a").endObject()) .execute() .actionGet(); @@ -223,7 +233,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { for (int i = 0; i < numQueries; i++) { String value = "value0"; PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate() - .setIndices("test") + .setIndices(INDEX_NAME) .setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject())) .setSize(numQueries); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java index a52dfa48c32..fbad26a5b6b 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java @@ -60,7 +60,7 @@ public class PercolatorBackwardsCompatibilityIT extends ESIntegTestCase { assertThat(state.metaData().indices().get(INDEX_NAME).getUpgradedVersion(), equalTo(Version.CURRENT)); assertThat(state.metaData().indices().get(INDEX_NAME).getMappings().size(), equalTo(2)); assertThat(state.metaData().indices().get(INDEX_NAME).getMappings().get(".percolator"), notNullValue()); - // important: verify that the query field in the .percolator mapping is of type object (from 3.0.0 this is of type percolator) + // important: verify that the query field in the .percolator mapping is of type object (from 5.x this is of type percolator) MappingMetaData mappingMetaData = state.metaData().indices().get(INDEX_NAME).getMappings().get(".percolator"); assertThat(XContentMapValues.extractValue("properties.query.type", mappingMetaData.sourceAsMap()), equalTo("object")); assertThat(state.metaData().indices().get(INDEX_NAME).getMappings().get("message"), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 7b1a93c48d6..052a8910104 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryShardException; @@ -96,81 +95,87 @@ import static org.hamcrest.Matchers.nullValue; */ public class PercolatorIT extends ESIntegTestCase { + private final static String INDEX_NAME = "queries"; + private final static String TYPE_NAME = "query"; + public void testSimple1() throws Exception { - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate(INDEX_NAME).addMapping(TYPE_NAME, "query", "type=percolator").get(); ensureGreen(); logger.info("--> Add dummy doc"); - client().prepareIndex("test", "type", "1").setSource("field1", "value").execute().actionGet(); + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1").setSource("field1", "value").execute().actionGet(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); logger.info("--> Percolate doc with field1=b"); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject())) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "4")); logger.info("--> Percolate doc with field1=c"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject())) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("2", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4")); logger.info("--> Percolate doc with field1=b c"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject())) .execute().actionGet(); assertMatchCount(response, 4L); assertThat(response.getMatches(), arrayWithSize(4)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4")); logger.info("--> Percolate doc with field1=d"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject())) .execute().actionGet(); assertMatchCount(response, 1L); assertThat(response.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContaining("4")); logger.info("--> Percolate non existing doc"); try { client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("5")) + .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("5")) .execute().actionGet(); fail("Exception should have been thrown"); } catch (ResourceNotFoundException e) { - assertThat(e.getMessage(), equalTo("percolate document [test/type/5] doesn't exist")); + assertThat(e.getMessage(), equalTo("percolate document [queries/type/5] doesn't exist")); } } public void testSimple2() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=long", "field2", "type=text")); + assertAcked(prepareCreate(INDEX_NAME) + .addMapping("type1", "field1", "type=long", "field2", "type=text") + .addMapping(TYPE_NAME, "query", "type=percolator") + ); ensureGreen(); // introduce the doc @@ -180,58 +185,60 @@ public class PercolatorIT extends ESIntegTestCase { .endObject().endObject(); PercolateResponse response = client().preparePercolate().setSource(doc) - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .execute().actionGet(); assertMatchCount(response, 0L); assertThat(response.getMatches(), emptyArray()); // add first query... - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "test1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "test1") .setSource(XContentFactory.jsonBuilder().startObject().field("query", termQuery("field2", "value")).endObject()) .execute().actionGet(); refresh(); response = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setSource(doc).execute().actionGet(); assertMatchCount(response, 1L); assertThat(response.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("test1")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContaining("test1")); // add second query... - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "test2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "test2") .setSource(XContentFactory.jsonBuilder().startObject().field("query", termQuery("field1", 1)).endObject()) .execute().actionGet(); refresh(); response = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(doc) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("test1", "test2")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("test1", "test2")); - client().prepareDelete("test", PercolatorFieldMapper.TYPE_NAME, "test2").execute().actionGet(); + client().prepareDelete(INDEX_NAME, TYPE_NAME, "test2").execute().actionGet(); refresh(); response = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(doc).execute().actionGet(); assertMatchCount(response, 1L); assertThat(response.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("test1")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContaining("test1")); } public void testPercolateQueriesWithRouting() throws Exception { - client().admin().indices().prepareCreate("test") + client().admin().indices().prepareCreate(INDEX_NAME) .setSettings(Settings.builder().put("index.number_of_shards", 2)) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("type", "field1", "type=string") .execute().actionGet(); ensureGreen(); logger.info("--> register a queries"); for (int i = 1; i <= 100; i++) { - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) + client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .setRouting(Integer.toString(i % 2)) .execute().actionGet(); @@ -240,7 +247,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Percolate doc with no routing"); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())) .setSize(100) .execute().actionGet(); @@ -249,7 +256,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Percolate doc with routing=0"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())) .setSize(100) .setRouting("0") @@ -259,7 +266,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Percolate doc with routing=1"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())) .setSize(100) .setRouting("1") @@ -269,12 +276,12 @@ public class PercolatorIT extends ESIntegTestCase { } public void storePercolateQueriesOnRecreatedIndex() throws Exception { - createIndex("test"); + prepareCreate(INDEX_NAME).addMapping(TYPE_NAME, "query", "type=percolator").get(); ensureGreen(); - client().prepareIndex("my-queries-index", "test", "1").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "test", "1").setSource("field1", "value1").execute().actionGet(); logger.info("--> register a query"); - client().prepareIndex("my-queries-index", PercolatorFieldMapper.TYPE_NAME, "kuku1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "kuku1") .setSource(jsonBuilder().startObject() .field("color", "blue") .field("query", termQuery("field1", "value1")) @@ -286,9 +293,9 @@ public class PercolatorIT extends ESIntegTestCase { createIndex("test"); ensureGreen(); - client().prepareIndex("my-queries-index", "test", "1").setSource("field1", "value1").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "test", "1").setSource("field1", "value1").execute().actionGet(); logger.info("--> register a query"); - client().prepareIndex("my-queries-index", PercolatorFieldMapper.TYPE_NAME, "kuku2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "kuku2") .setSource(jsonBuilder().startObject() .field("color", "blue") .field("query", termQuery("field1", "value1")) @@ -311,11 +318,14 @@ public class PercolatorIT extends ESIntegTestCase { .endObject() .endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(builder).addMapping("doc", mapping)); + assertAcked(prepareCreate(INDEX_NAME).setSettings(builder) + .addMapping("doc", mapping) + .addMapping(TYPE_NAME, "query", "type=percolator") + ); ensureGreen(); logger.info("--> register a query"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject() .field("source", "productizer") .field("query", QueryBuilders.constantScoreQuery(QueryBuilders.queryStringQuery("filingcategory:s"))) @@ -325,7 +335,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); PercolateResponse percolate = client().preparePercolate() - .setIndices("test").setDocumentType("doc") + .setIndices(INDEX_NAME).setDocumentType("doc") .setSource(jsonBuilder().startObject() .startObject("doc").field("filingcategory", "s").endObject() .field("query", termQuery("source", "productizer")) @@ -337,13 +347,14 @@ public class PercolatorIT extends ESIntegTestCase { } public void testCreateIndexAndThenRegisterPercolator() throws Exception { - prepareCreate("test") + prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") .addMapping("type1", "field1", "type=text") .get(); ensureGreen(); logger.info("--> register a query"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "kuku") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "kuku") .setSource(jsonBuilder().startObject() .field("color", "blue") .field("query", termQuery("field1", "value1")) @@ -351,14 +362,14 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); refresh(); SearchResponse countResponse = client().prepareSearch().setSize(0) - .setQuery(matchAllQuery()).setTypes(PercolatorFieldMapper.TYPE_NAME) + .setQuery(matchAllQuery()).setTypes(TYPE_NAME) .execute().actionGet(); assertThat(countResponse.getHits().totalHits(), equalTo(1L)); for (int i = 0; i < 10; i++) { PercolateResponse percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject()) .execute().actionGet(); assertMatchCount(percolate, 1L); @@ -367,7 +378,7 @@ public class PercolatorIT extends ESIntegTestCase { for (int i = 0; i < 10; i++) { PercolateResponse percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType("type1") .setPreference("_local") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject()) .execute().actionGet(); @@ -377,20 +388,23 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> delete the index"); - client().admin().indices().prepareDelete("test").execute().actionGet(); + client().admin().indices().prepareDelete(INDEX_NAME).execute().actionGet(); logger.info("--> make sure percolated queries for it have been deleted as well"); countResponse = client().prepareSearch().setSize(0) - .setQuery(matchAllQuery()).setTypes(PercolatorFieldMapper.TYPE_NAME) + .setQuery(matchAllQuery()).setTypes(TYPE_NAME) .execute().actionGet(); assertHitCount(countResponse, 0L); } public void testMultiplePercolators() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text")); + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("type1", "field1", "type=text") + ); ensureGreen(); logger.info("--> register a query 1"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "kuku") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "kuku") .setSource(jsonBuilder().startObject() .field("color", "blue") .field("query", termQuery("field1", "value1")) @@ -399,7 +413,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); logger.info("--> register a query 2"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "bubu") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "bubu") .setSource(jsonBuilder().startObject() .field("color", "green") .field("query", termQuery("field1", "value2")) @@ -408,32 +422,33 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); PercolateResponse percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject()) .execute().actionGet(); assertMatchCount(percolate, 1L); assertThat(percolate.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("kuku")); + assertThat(convertFromTextArray(percolate.getMatches(), INDEX_NAME), arrayContaining("kuku")); percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value2").endObject().endObject()) .execute().actionGet(); assertMatchCount(percolate, 1L); assertThat(percolate.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("bubu")); + assertThat(convertFromTextArray(percolate.getMatches(), INDEX_NAME), arrayContaining("bubu")); } public void testDynamicAddingRemovingQueries() throws Exception { assertAcked( - prepareCreate("test") + prepareCreate(INDEX_NAME) .addMapping("type1", "field1", "type=text") + .addMapping(TYPE_NAME, "query", "type=percolator") ); ensureGreen(); logger.info("--> register a query 1"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "kuku") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "kuku") .setSource(jsonBuilder().startObject() .field("color", "blue") .field("query", termQuery("field1", "value1")) @@ -442,15 +457,15 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); PercolateResponse percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject()) .execute().actionGet(); assertMatchCount(percolate, 1L); assertThat(percolate.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("kuku")); + assertThat(convertFromTextArray(percolate.getMatches(), INDEX_NAME), arrayContaining("kuku")); logger.info("--> register a query 2"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "bubu") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "bubu") .setSource(jsonBuilder().startObject() .field("color", "green") .field("query", termQuery("field1", "value2")) @@ -459,15 +474,15 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value2").endObject().endObject()) .execute().actionGet(); assertMatchCount(percolate, 1L); assertThat(percolate.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("bubu")); + assertThat(convertFromTextArray(percolate.getMatches(), INDEX_NAME), arrayContaining("bubu")); logger.info("--> register a query 3"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "susu") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "susu") .setSource(jsonBuilder().startObject() .field("color", "red") .field("query", termQuery("field1", "value2")) @@ -479,18 +494,18 @@ public class PercolatorIT extends ESIntegTestCase { .setDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "value2").endObject())) .setQueryBuilder(termQuery("color", "red")); percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(sourceBuilder) .execute().actionGet(); assertMatchCount(percolate, 1L); assertThat(percolate.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("susu")); + assertThat(convertFromTextArray(percolate.getMatches(), INDEX_NAME), arrayContaining("susu")); logger.info("--> deleting query 1"); - client().prepareDelete("test", PercolatorFieldMapper.TYPE_NAME, "kuku").setRefresh(true).execute().actionGet(); + client().prepareDelete(INDEX_NAME, TYPE_NAME, "kuku").setRefresh(true).execute().actionGet(); percolate = client().preparePercolate() - .setIndices("test").setDocumentType("type1") + .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").startObject("type1") .field("field1", "value1") .endObject().endObject().endObject()) @@ -500,230 +515,242 @@ public class PercolatorIT extends ESIntegTestCase { } public void testPercolateStatistics() throws Exception { - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); + client().admin().indices().prepareCreate("test2") + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); ensureGreen(); logger.info("--> register a query"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); - client().prepareIndex("test2", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex("test2", TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); logger.info("--> First percolate request"); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSource(jsonBuilder().startObject().startObject("doc").field("field", "val").endObject().endObject()) .execute().actionGet(); assertMatchCount(response, 1L); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("1")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContaining("1")); - NumShards numShards = getNumShards("test"); + NumShards numShards = getNumShards(INDEX_NAME); - IndicesStatsResponse indicesResponse = client().admin().indices().prepareStats("test").execute().actionGet(); + IndicesStatsResponse indicesResponse = client().admin().indices().prepareStats(INDEX_NAME).execute().actionGet(); assertThat(indicesResponse.getTotal().getPercolatorCache().getNumQueries(), equalTo((long)numShards.dataCopies)); // number of copies } public void testPercolatingExistingDocs() throws Exception { - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); ensureGreen(); logger.info("--> Adding docs"); - client().prepareIndex("test", "type", "1").setSource("field1", "b").execute().actionGet(); - client().prepareIndex("test", "type", "2").setSource("field1", "c").execute().actionGet(); - client().prepareIndex("test", "type", "3").setSource("field1", "b c").execute().actionGet(); - client().prepareIndex("test", "type", "4").setSource("field1", "d").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "1").setSource("field1", "b").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "2").setSource("field1", "c").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "3").setSource("field1", "b c").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "4").setSource("field1", "d").execute().actionGet(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); logger.info("--> Percolate existing doc with id 1"); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("1")) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1")) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "4")); logger.info("--> Percolate existing doc with id 2"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("2")) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2")) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("2", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4")); logger.info("--> Percolate existing doc with id 3"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("3")) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("3")) .execute().actionGet(); assertMatchCount(response, 4L); assertThat(response.getMatches(), arrayWithSize(4)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4")); logger.info("--> Percolate existing doc with id 4"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("4")) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("4")) .execute().actionGet(); assertMatchCount(response, 1L); assertThat(response.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContaining("4")); } public void testPercolatingExistingDocs_routing() throws Exception { - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .execute().actionGet(); ensureGreen(); logger.info("--> Adding docs"); - client().prepareIndex("test", "type", "1").setSource("field1", "b").setRouting("4").execute().actionGet(); - client().prepareIndex("test", "type", "2").setSource("field1", "c").setRouting("3").execute().actionGet(); - client().prepareIndex("test", "type", "3").setSource("field1", "b c").setRouting("2").execute().actionGet(); - client().prepareIndex("test", "type", "4").setSource("field1", "d").setRouting("1").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "1").setSource("field1", "b").setRouting("4").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "2").setSource("field1", "c").setRouting("3").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "3").setSource("field1", "b c").setRouting("2").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "4").setSource("field1", "d").setRouting("1").execute().actionGet(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); logger.info("--> Percolate existing doc with id 1"); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("1").routing("4")) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1").routing("4")) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "4")); logger.info("--> Percolate existing doc with id 2"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("2").routing("3")) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2").routing("3")) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("2", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4")); logger.info("--> Percolate existing doc with id 3"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("3").routing("2")) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("3").routing("2")) .execute().actionGet(); assertMatchCount(response, 4L); assertThat(response.getMatches(), arrayWithSize(4)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4")); logger.info("--> Percolate existing doc with id 4"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("4").routing("1")) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("4").routing("1")) .execute().actionGet(); assertMatchCount(response, 1L); assertThat(response.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContaining("4")); } public void testPercolatingExistingDocs_versionCheck() throws Exception { - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); ensureGreen(); logger.info("--> Adding docs"); - client().prepareIndex("test", "type", "1").setSource("field1", "b").execute().actionGet(); - client().prepareIndex("test", "type", "2").setSource("field1", "c").execute().actionGet(); - client().prepareIndex("test", "type", "3").setSource("field1", "b c").execute().actionGet(); - client().prepareIndex("test", "type", "4").setSource("field1", "d").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "1").setSource("field1", "b").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "2").setSource("field1", "c").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "3").setSource("field1", "b c").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "4").setSource("field1", "d").execute().actionGet(); logger.info("--> registering queries"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); logger.info("--> Percolate existing doc with id 2 and version 1"); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("2").version(1L)) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2").version(1L)) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("2", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4")); logger.info("--> Percolate existing doc with id 2 and version 2"); try { client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("2").version(2L)) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2").version(2L)) .execute().actionGet(); fail("Error should have been thrown"); } catch (VersionConflictEngineException e) { } logger.info("--> Index doc with id for the second time"); - client().prepareIndex("test", "type", "2").setSource("field1", "c").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "2").setSource("field1", "c").execute().actionGet(); logger.info("--> Percolate existing doc with id 2 and version 2"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") - .setGetRequest(Requests.getRequest("test").type("type").id("2").version(2L)) + .setIndices(INDEX_NAME).setDocumentType("type") + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2").version(2L)) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("2", "4")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4")); } public void testPercolateMultipleIndicesAndAliases() throws Exception { - createIndex("test1", "test2"); + prepareCreate(INDEX_NAME).addMapping(TYPE_NAME, "query", "type=percolator").get(); + prepareCreate(INDEX_NAME + "2").addMapping(TYPE_NAME, "query", "type=percolator").get(); ensureGreen(); logger.info("--> registering queries"); for (int i = 1; i <= 10; i++) { - String index = i % 2 == 0 ? "test1" : "test2"; - client().prepareIndex(index, PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) + String index = i % 2 == 0 ? INDEX_NAME : INDEX_NAME + "2"; + client().prepareIndex(index, TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); } @@ -731,7 +758,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Percolate doc to index test1"); PercolateResponse response = client().preparePercolate() - .setIndices("test1").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()) .execute().actionGet(); assertMatchCount(response, 5L); @@ -739,7 +766,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Percolate doc to index test2"); response = client().preparePercolate() - .setIndices("test2").setDocumentType("type") + .setIndices(INDEX_NAME + "2").setDocumentType("type") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()) .execute().actionGet(); assertMatchCount(response, 5L); @@ -747,7 +774,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Percolate doc to index test1 and test2"); response = client().preparePercolate() - .setIndices("test1", "test2").setDocumentType("type") + .setIndices(INDEX_NAME, INDEX_NAME + "2").setDocumentType("type") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()) .execute().actionGet(); assertMatchCount(response, 10L); @@ -755,7 +782,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Percolate doc to index test2 and test3, with ignore missing"); response = client().preparePercolate() - .setIndices("test1", "test3").setDocumentType("type") + .setIndices(INDEX_NAME , INDEX_NAME + "3").setDocumentType("type") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()) .execute().actionGet(); @@ -764,9 +791,9 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Adding aliases"); IndicesAliasesResponse aliasesResponse = client().admin().indices().prepareAliases() - .addAlias("test1", "my-alias1") - .addAlias("test2", "my-alias1") - .addAlias("test2", "my-alias2") + .addAlias(INDEX_NAME, "my-alias1") + .addAlias(INDEX_NAME + "2", "my-alias1") + .addAlias(INDEX_NAME + "2", "my-alias2") .setTimeout(TimeValue.timeValueHours(10)) .execute().actionGet(); assertTrue(aliasesResponse.isAcknowledged()); @@ -779,7 +806,7 @@ public class PercolatorIT extends ESIntegTestCase { assertMatchCount(response, 10L); assertThat(response.getMatches(), arrayWithSize(10)); for (PercolateResponse.Match match : response) { - assertThat(match.getIndex().string(), anyOf(equalTo("test1"), equalTo("test2"))); + assertThat(match.getIndex().string(), anyOf(equalTo(INDEX_NAME), equalTo(INDEX_NAME + "2"))); } logger.info("--> Percolate doc to my-alias2"); @@ -790,21 +817,22 @@ public class PercolatorIT extends ESIntegTestCase { assertMatchCount(response, 5L); assertThat(response.getMatches(), arrayWithSize(5)); for (PercolateResponse.Match match : response) { - assertThat(match.getIndex().string(), equalTo("test2")); + assertThat(match.getIndex().string(), equalTo(INDEX_NAME + "2")); } } public void testPercolateWithAliasFilter() throws Exception { - assertAcked(prepareCreate("my-index") - .addMapping(PercolatorFieldMapper.TYPE_NAME, "a", "type=keyword") + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("my-type", "a", "type=keyword") .addAlias(new Alias("a").filter(QueryBuilders.termQuery("a", "a"))) .addAlias(new Alias("b").filter(QueryBuilders.termQuery("a", "b"))) .addAlias(new Alias("c").filter(QueryBuilders.termQuery("a", "c"))) ); - client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("a", "a").endObject()) .get(); - client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("a", "b").endObject()) .get(); refresh(); @@ -872,33 +900,35 @@ public class PercolatorIT extends ESIntegTestCase { } public void testCountPercolation() throws Exception { - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); ensureGreen(); logger.info("--> Add dummy doc"); - client().prepareIndex("test", "type", "1").setSource("field1", "value").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "1").setSource("field1", "value").execute().actionGet(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); logger.info("--> Count percolate doc with field1=b"); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type").setOnlyCount(true) + .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject())) .execute().actionGet(); assertMatchCount(response, 2L); @@ -906,7 +936,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Count percolate doc with field1=c"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type").setOnlyCount(true) + .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject())) .execute().actionGet(); assertMatchCount(response, 2L); @@ -914,7 +944,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Count percolate doc with field1=b c"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type").setOnlyCount(true) + .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject())) .execute().actionGet(); assertMatchCount(response, 4L); @@ -922,7 +952,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Count percolate doc with field1=d"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type").setOnlyCount(true) + .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject())) .execute().actionGet(); assertMatchCount(response, 1L); @@ -931,78 +961,82 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Count percolate non existing doc"); try { client().preparePercolate() - .setIndices("test").setDocumentType("type").setOnlyCount(true) - .setGetRequest(Requests.getRequest("test").type("type").id("5")) + .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("5")) .execute().actionGet(); fail("Exception should have been thrown"); } catch (ResourceNotFoundException e) { - assertThat(e.getMessage(), equalTo("percolate document [test/type/5] doesn't exist")); + assertThat(e.getMessage(), equalTo("percolate document [" + INDEX_NAME + "/type/5] doesn't exist")); } } public void testCountPercolatingExistingDocs() throws Exception { - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); ensureGreen(); logger.info("--> Adding docs"); - client().prepareIndex("test", "type", "1").setSource("field1", "b").execute().actionGet(); - client().prepareIndex("test", "type", "2").setSource("field1", "c").execute().actionGet(); - client().prepareIndex("test", "type", "3").setSource("field1", "b c").execute().actionGet(); - client().prepareIndex("test", "type", "4").setSource("field1", "d").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "1").setSource("field1", "b").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "2").setSource("field1", "c").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "3").setSource("field1", "b c").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "type", "4").setSource("field1", "d").execute().actionGet(); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); logger.info("--> Count percolate existing doc with id 1"); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type").setOnlyCount(true) - .setGetRequest(Requests.getRequest("test").type("type").id("1")) + .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1")) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), nullValue()); logger.info("--> Count percolate existing doc with id 2"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type").setOnlyCount(true) - .setGetRequest(Requests.getRequest("test").type("type").id("2")) + .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2")) .execute().actionGet(); assertMatchCount(response, 2L); assertThat(response.getMatches(), nullValue()); logger.info("--> Count percolate existing doc with id 3"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type").setOnlyCount(true) - .setGetRequest(Requests.getRequest("test").type("type").id("3")) + .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("3")) .execute().actionGet(); assertMatchCount(response, 4L); assertThat(response.getMatches(), nullValue()); logger.info("--> Count percolate existing doc with id 4"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type").setOnlyCount(true) - .setGetRequest(Requests.getRequest("test").type("type").id("4")) + .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("4")) .execute().actionGet(); assertMatchCount(response, 1L); assertThat(response.getMatches(), nullValue()); } public void testPercolateSizingWithQueryAndFilter() throws Exception { - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); ensureGreen(); int numLevels = randomIntBetween(1, 25); @@ -1011,7 +1045,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> register {} queries", totalQueries); for (int level = 1; level <= numLevels; level++) { for (int query = 1; query <= numQueriesPerLevel; query++) { - client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, level + "-" + query) + client().prepareIndex(INDEX_NAME, TYPE_NAME, level + "-" + query) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("level", level).endObject()) .execute().actionGet(); } @@ -1020,7 +1054,7 @@ public class PercolatorIT extends ESIntegTestCase { boolean onlyCount = randomBoolean(); PercolateResponse response = client().preparePercolate() - .setIndices("my-index").setDocumentType("my-type") + .setIndices(INDEX_NAME).setDocumentType("my-type") .setOnlyCount(onlyCount) .setPercolateDoc(docBuilder().setDoc("field", "value")) .setSize((int) totalQueries) @@ -1032,7 +1066,7 @@ public class PercolatorIT extends ESIntegTestCase { int size = randomIntBetween(0, (int) totalQueries - 1); response = client().preparePercolate() - .setIndices("my-index").setDocumentType("my-type") + .setIndices(INDEX_NAME).setDocumentType("my-type") .setOnlyCount(onlyCount) .setPercolateDoc(docBuilder().setDoc("field", "value")) .setSize(size) @@ -1049,7 +1083,7 @@ public class PercolatorIT extends ESIntegTestCase { for (int i = 0; i < runs; i++) { onlyCount = randomBoolean(); response = client().preparePercolate() - .setIndices("my-index").setDocumentType("my-type") + .setIndices(INDEX_NAME).setDocumentType("my-type") .setOnlyCount(onlyCount) .setPercolateDoc(docBuilder().setDoc("field", "value")) .setPercolateQuery(termQuery("level", 1 + randomInt(numLevels - 1))) @@ -1064,7 +1098,7 @@ public class PercolatorIT extends ESIntegTestCase { for (int i = 0; i < runs; i++) { onlyCount = randomBoolean(); response = client().preparePercolate() - .setIndices("my-index").setDocumentType("my-type") + .setIndices(INDEX_NAME).setDocumentType("my-type") .setOnlyCount(onlyCount) .setPercolateDoc(docBuilder().setDoc("field", "value")) .setPercolateQuery(termQuery("level", 1 + randomInt(numLevels - 1))) @@ -1080,7 +1114,7 @@ public class PercolatorIT extends ESIntegTestCase { onlyCount = randomBoolean(); size = randomIntBetween(0, (int) numQueriesPerLevel - 1); response = client().preparePercolate() - .setIndices("my-index").setDocumentType("my-type") + .setIndices(INDEX_NAME).setDocumentType("my-type") .setOnlyCount(onlyCount) .setSize(size) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1094,18 +1128,20 @@ public class PercolatorIT extends ESIntegTestCase { } public void testPercolateScoreAndSorting() throws Exception { - createIndex("my-index"); + prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); ensureGreen(); // Add a dummy doc, that shouldn't never interfere with percolate operations. - client().prepareIndex("my-index", "my-type", "1").setSource("field", "value").execute().actionGet(); + client().prepareIndex(INDEX_NAME, "my-type", "1").setSource("field", "value").execute().actionGet(); Map> controlMap = new HashMap<>(); long numQueries = randomIntBetween(100, 250); logger.info("--> register {} queries", numQueries); for (int i = 0; i < numQueries; i++) { int value = randomInt(10); - client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, Integer.toString(i)) + client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)) .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("level", i).field("field1", value).endObject()) .execute().actionGet(); if (!controlMap.containsKey(value)) { @@ -1120,7 +1156,7 @@ public class PercolatorIT extends ESIntegTestCase { int runs = randomInt(27); for (int i = 0; i < runs; i++) { int size = randomIntBetween(1, 50); - PercolateResponse response = client().preparePercolate().setIndices("my-index").setDocumentType("my-type") + PercolateResponse response = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("my-type") .setScore(true) .setSize(size) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1137,7 +1173,7 @@ public class PercolatorIT extends ESIntegTestCase { // Sort the queries by the score for (int i = 0; i < runs; i++) { int size = randomIntBetween(1, 10); - PercolateResponse response = client().preparePercolate().setIndices("my-index").setDocumentType("my-type") + PercolateResponse response = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("my-type") .setSortByScore(true) .setSize(size) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1150,7 +1186,7 @@ public class PercolatorIT extends ESIntegTestCase { for (PercolateResponse.Match match : response) { assertThat(match.getId().string(), equalTo(Integer.toString(expectedId))); assertThat(match.getScore(), equalTo((float) expectedId)); - assertThat(match.getIndex().string(), equalTo("my-index")); + assertThat(match.getIndex().string(), equalTo(INDEX_NAME)); expectedId--; } } @@ -1160,7 +1196,7 @@ public class PercolatorIT extends ESIntegTestCase { int value = usedValues.get(randomInt(usedValues.size() - 1)); NavigableSet levels = controlMap.get(value); int size = randomIntBetween(1, levels.size()); - PercolateResponse response = client().preparePercolate().setIndices("my-index").setDocumentType("my-type") + PercolateResponse response = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("my-type") .setSortByScore(true) .setSize(size) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1177,24 +1213,26 @@ public class PercolatorIT extends ESIntegTestCase { int controlLevel = levelIterator.next(); assertThat(match.getId().string(), equalTo(Integer.toString(controlLevel))); assertThat(match.getScore(), equalTo((float) controlLevel)); - assertThat(match.getIndex().string(), equalTo("my-index")); + assertThat(match.getIndex().string(), equalTo(INDEX_NAME)); } } } public void testPercolateSortingWithNoSize() throws Exception { - createIndex("my-index"); + prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); ensureGreen(); - client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("level", 1).endObject()) .execute().actionGet(); - client().prepareIndex("my-index", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("level", 2).endObject()) .execute().actionGet(); refresh(); - PercolateResponse response = client().preparePercolate().setIndices("my-index").setDocumentType("my-type") + PercolateResponse response = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("my-type") .setSortByScore(true) .setSize(2) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1208,10 +1246,12 @@ public class PercolatorIT extends ESIntegTestCase { } public void testPercolateOnEmptyIndex() throws Exception { - client().admin().indices().prepareCreate("my-index").execute().actionGet(); + prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); ensureGreen(); - PercolateResponse response = client().preparePercolate().setIndices("my-index").setDocumentType("my-type") + PercolateResponse response = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("my-type") .setSortByScore(true) .setSize(2) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1228,36 +1268,39 @@ public class PercolatorIT extends ESIntegTestCase { } else if (randomBoolean()) { fieldMapping.append(",index_options=offsets"); } - assertAcked(prepareCreate("test").addMapping("type", "field1", fieldMapping.toString())); + assertAcked(prepareCreate(INDEX_NAME) + .addMapping("type", "field1", fieldMapping.toString()) + .addMapping(TYPE_NAME, "query", "type=percolator") + ); logger.info("--> register a queries"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "brown fox")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "lazy dog")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "3") .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "jumps")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "4") .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "dog")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "5") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "5") .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "fox")).endObject()) .execute().actionGet(); refresh(); logger.info("--> Percolate doc with field1=The quick brown fox jumps over the lazy dog"); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1")) .execute().actionGet(); assertMatchCount(response, 5L); assertThat(response.getMatches(), arrayWithSize(5)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); PercolateResponse.Match[] matches = response.getMatches(); Arrays.sort(matches, (a, b) -> a.getId().compareTo(b.getId())); @@ -1270,7 +1313,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Query percolate doc with field1=The quick brown fox jumps over the lazy dog"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1")) @@ -1278,7 +1321,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); assertMatchCount(response, 5L); assertThat(response.getMatches(), arrayWithSize(5)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); matches = response.getMatches(); Arrays.sort(matches, new Comparator() { @@ -1296,7 +1339,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Query percolate with score for doc with field1=The quick brown fox jumps over the lazy dog"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1")) @@ -1305,7 +1348,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); assertNoFailures(response); assertThat(response.getMatches(), arrayWithSize(5)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); matches = response.getMatches(); Arrays.sort(matches, new Comparator() { @@ -1328,7 +1371,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Top percolate for doc with field1=The quick brown fox jumps over the lazy dog"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1")) @@ -1337,7 +1380,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); assertMatchCount(response, 5L); assertThat(response.getMatches(), arrayWithSize(5)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); matches = response.getMatches(); Arrays.sort(matches, new Comparator() { @@ -1360,7 +1403,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Top percolate for doc with field1=The quick brown fox jumps over the lazy dog"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1").highlightQuery(QueryBuilders.matchQuery("field1", "jumps"))) @@ -1369,7 +1412,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); assertMatchCount(response, 5L); assertThat(response.getMatches(), arrayWithSize(5)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); matches = response.getMatches(); Arrays.sort(matches, new Comparator() { @@ -1391,23 +1434,23 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy dog")); // Highlighting an existing doc - client().prepareIndex("test", "type", "1") + client().prepareIndex(INDEX_NAME, "type", "1") .setSource(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()) .get(); refresh(); logger.info("--> Top percolate for doc with field1=The quick brown fox jumps over the lazy dog"); response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) - .setGetRequest(Requests.getRequest("test").type("type").id("1")) + .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1")) .setHighlightBuilder(new HighlightBuilder().field("field1")) .setPercolateQuery(functionScoreQuery(new WeightBuilder().setWeight(5.5f))) .setSortByScore(true) .execute().actionGet(); assertMatchCount(response, 5L); assertThat(response.getMatches(), arrayWithSize(5)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4", "5")); matches = response.getMatches(); Arrays.sort(matches, new Comparator() { @@ -1430,11 +1473,13 @@ public class PercolatorIT extends ESIntegTestCase { } public void testPercolateNonMatchingConstantScoreQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("doc", "message", "type=text")); + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("doc", "message", "type=text")); ensureGreen(); logger.info("--> register a query"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject() .field("query", QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() .must(QueryBuilders.queryStringQuery("root")) @@ -1445,7 +1490,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); PercolateResponse percolate = client().preparePercolate() - .setIndices("test").setDocumentType("doc") + .setIndices(INDEX_NAME).setDocumentType("doc") .setSource(jsonBuilder().startObject() .startObject("doc").field("message", "A new bonsai tree ").endObject() .endObject()) @@ -1456,9 +1501,9 @@ public class PercolatorIT extends ESIntegTestCase { public void testNestedPercolation() throws IOException { initNestedIndexAndPercolation(); - PercolateResponse response = client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getNotMatchingNestedDoc())).setIndices("nestedindex").setDocumentType("company").get(); + PercolateResponse response = client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getNotMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company").get(); assertEquals(response.getMatches().length, 0); - response = client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getMatchingNestedDoc())).setIndices("nestedindex").setDocumentType("company").get(); + response = client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company").get(); assertEquals(response.getMatches().length, 1); assertEquals(response.getMatches()[0].getId().string(), "Q"); } @@ -1468,50 +1513,54 @@ public class PercolatorIT extends ESIntegTestCase { XContentBuilder doc = jsonBuilder(); doc.startObject(); doc.field("some_unnested_field", "value"); - PercolateResponse response = client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(doc)).setIndices("nestedindex").setDocumentType("company").get(); + PercolateResponse response = client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(doc)).setIndices(INDEX_NAME).setDocumentType("company").get(); assertNoFailures(response); } public void testNestedPercolationOnExistingDoc() throws IOException { initNestedIndexAndPercolation(); - client().prepareIndex("nestedindex", "company", "notmatching").setSource(getNotMatchingNestedDoc()).get(); - client().prepareIndex("nestedindex", "company", "matching").setSource(getMatchingNestedDoc()).get(); + client().prepareIndex(INDEX_NAME, "company", "notmatching").setSource(getNotMatchingNestedDoc()).get(); + client().prepareIndex(INDEX_NAME, "company", "matching").setSource(getMatchingNestedDoc()).get(); refresh(); - PercolateResponse response = client().preparePercolate().setGetRequest(Requests.getRequest("nestedindex").type("company").id("notmatching")).setDocumentType("company").setIndices("nestedindex").get(); + PercolateResponse response = client().preparePercolate().setGetRequest(Requests.getRequest(INDEX_NAME).type("company").id("notmatching")).setDocumentType("company").setIndices(INDEX_NAME).get(); assertEquals(response.getMatches().length, 0); - response = client().preparePercolate().setGetRequest(Requests.getRequest("nestedindex").type("company").id("matching")).setDocumentType("company").setIndices("nestedindex").get(); + response = client().preparePercolate().setGetRequest(Requests.getRequest(INDEX_NAME).type("company").id("matching")).setDocumentType("company").setIndices(INDEX_NAME).get(); assertEquals(response.getMatches().length, 1); assertEquals(response.getMatches()[0].getId().string(), "Q"); } public void testDontReportDeletedPercolatorDocs() throws Exception { - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .execute().actionGet(); ensureGreen(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .get(); refresh(); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field", "value").endObject())) .setPercolateQuery(QueryBuilders.matchAllQuery()) .get(); assertMatchCount(response, 1L); assertThat(response.getMatches(), arrayWithSize(1)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1")); } public void testAddQueryWithNoMapping() throws Exception { - client().admin().indices().prepareCreate("test").get(); + client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .get(); ensureGreen(); try { - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME) + client().prepareIndex(INDEX_NAME, TYPE_NAME) .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "value")).endObject()) .get(); fail(); @@ -1520,7 +1569,7 @@ public class PercolatorIT extends ESIntegTestCase { } try { - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME) + client().prepareIndex(INDEX_NAME, TYPE_NAME) .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(0).to(1)).endObject()) .get(); fail(); @@ -1530,27 +1579,28 @@ public class PercolatorIT extends ESIntegTestCase { } public void testPercolatorQueryWithNowRange() throws Exception { - client().admin().indices().prepareCreate("test") + client().admin().indices().prepareCreate(INDEX_NAME) .addMapping("my-type", "timestamp", "type=date,format=epoch_millis") + .addMapping(TYPE_NAME, "query", "type=percolator") .get(); ensureGreen(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", rangeQuery("timestamp").from("now-1d").to("now")).endObject()) .get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") .setSource(jsonBuilder().startObject().field("query", constantScoreQuery(rangeQuery("timestamp").from("now-1d").to("now"))).endObject()) .get(); refresh(); logger.info("--> Percolate doc with field1=b"); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("my-type") + .setIndices(INDEX_NAME).setDocumentType("my-type") .setPercolateDoc(docBuilder().setDoc("timestamp", System.currentTimeMillis())) .get(); assertMatchCount(response, 2L); assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2")); + assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2")); } void initNestedIndexAndPercolation() throws IOException { @@ -1560,10 +1610,13 @@ public class PercolatorIT extends ESIntegTestCase { .startObject("name").field("type", "text").endObject().endObject().endObject().endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate("nestedindex").addMapping("company", mapping)); - ensureGreen("nestedindex"); + assertAcked(client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping("company", mapping) + .addMapping(TYPE_NAME, "query", "type=percolator") + ); + ensureGreen(INDEX_NAME); - client().prepareIndex("nestedindex", PercolatorFieldMapper.TYPE_NAME, "Q").setSource(jsonBuilder().startObject() + client().prepareIndex(INDEX_NAME, TYPE_NAME, "Q").setSource(jsonBuilder().startObject() .field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND), ScoreMode.Avg)).endObject()).get(); refresh(); @@ -1690,17 +1743,19 @@ public class PercolatorIT extends ESIntegTestCase { " },\n" + "\"text\":\"foo\""+ "}"; - assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping)); - ensureGreen("test"); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q1).setId("q1").get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q2).setId("q2").get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q3).setId("q3").get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q4).setId("q4").get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q5).setId("q5").get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME).setSource(q6).setId("q6").get(); + assertAcked(client().admin().indices().prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("doc", mapping)); + ensureGreen(INDEX_NAME); + client().prepareIndex(INDEX_NAME, TYPE_NAME).setSource(q1).setId("q1").get(); + client().prepareIndex(INDEX_NAME, TYPE_NAME).setSource(q2).setId("q2").get(); + client().prepareIndex(INDEX_NAME, TYPE_NAME).setSource(q3).setId("q3").get(); + client().prepareIndex(INDEX_NAME, TYPE_NAME).setSource(q4).setId("q4").get(); + client().prepareIndex(INDEX_NAME, TYPE_NAME).setSource(q5).setId("q5").get(); + client().prepareIndex(INDEX_NAME, TYPE_NAME).setSource(q6).setId("q6").get(); refresh(); PercolateResponse response = client().preparePercolate() - .setIndices("test").setDocumentType("doc") + .setIndices(INDEX_NAME).setDocumentType("doc") .setPercolateDoc(docBuilder().setDoc(doc)) .get(); assertMatchCount(response, 3L); @@ -1713,12 +1768,12 @@ public class PercolatorIT extends ESIntegTestCase { } assertTrue(expectedIds.isEmpty()); response = client().preparePercolate().setOnlyCount(true) - .setIndices("test").setDocumentType("doc") + .setIndices(INDEX_NAME).setDocumentType("doc") .setPercolateDoc(docBuilder().setDoc(doc)) .get(); assertMatchCount(response, 3L); response = client().preparePercolate().setScore(randomBoolean()).setSortByScore(randomBoolean()).setOnlyCount(randomBoolean()).setSize(10).setPercolateQuery(QueryBuilders.termQuery("text", "foo")) - .setIndices("test").setDocumentType("doc") + .setIndices(INDEX_NAME).setDocumentType("doc") .setPercolateDoc(docBuilder().setDoc(doc)) .get(); assertMatchCount(response, 3L); @@ -1729,14 +1784,15 @@ public class PercolatorIT extends ESIntegTestCase { Settings.Builder settings = Settings.builder() .put(indexSettings()) .put("index.percolator.map_unmapped_fields_as_string", true); - assertAcked(prepareCreate("test") + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") .setSettings(settings)); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME) + client().prepareIndex(INDEX_NAME, TYPE_NAME) .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "value")).endObject()).get(); refresh(); logger.info("--> Percolate doc with field1=value"); PercolateResponse response1 = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "value").endObject())) .execute().actionGet(); assertMatchCount(response1, 1L); @@ -1748,16 +1804,17 @@ public class PercolatorIT extends ESIntegTestCase { Settings.Builder settings = Settings.builder() .put(indexSettings()) .put("index.percolator.map_unmapped_fields_as_string", true); - assertAcked(prepareCreate("test") + assertAcked(prepareCreate(INDEX_NAME) .setSettings(settings) + .addMapping(TYPE_NAME, "query", "type=percolator") .addMapping("type", "location", "type=geo_shape")); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", geoShapeQuery("location", ShapeBuilders.newEnvelope(new Coordinate(0d, 50d), new Coordinate(2d, 40d)))).endObject()) .get(); refresh(); PercolateResponse response1 = client().preparePercolate() - .setIndices("test").setDocumentType("type") + .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject() .startObject("location") .field("type", "point") @@ -1785,9 +1842,11 @@ public class PercolatorIT extends ESIntegTestCase { .endObject() .endObject(); - assertAcked(prepareCreate("index").addMapping("mapping", mapping)); + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("mapping", mapping)); try { - client().prepareIndex("index", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", nestedQuery("nested", matchQuery("nested.name", "value"), ScoreMode.Avg).innerHit(new InnerHitBuilder())).endObject()) .execute().actionGet(); fail("Expected a parse error, because inner_hits isn't supported in the percolate api"); @@ -1801,15 +1860,19 @@ public class PercolatorIT extends ESIntegTestCase { // We don't fail p/c queries, but those queries are unusable because only a single document can be provided in // the percolate api - assertAcked(prepareCreate("index").addMapping("child", "_parent", "type=parent").addMapping("parent")); - client().prepareIndex("index", PercolatorFieldMapper.TYPE_NAME, "1") + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("child", "_parent", "type=parent").addMapping("parent")); + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", hasChildQuery("child", matchAllQuery(), ScoreMode.None)).endObject()) .execute().actionGet(); } public void testPercolateDocumentWithParentField() throws Exception { - assertAcked(prepareCreate("index").addMapping("child", "_parent", "type=parent").addMapping("parent")); - client().prepareIndex("index", PercolatorFieldMapper.TYPE_NAME, "1") + assertAcked(prepareCreate(INDEX_NAME) + .addMapping(TYPE_NAME, "query", "type=percolator") + .addMapping("child", "_parent", "type=parent").addMapping("parent")); + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); refresh(); @@ -1824,13 +1887,14 @@ public class PercolatorIT extends ESIntegTestCase { } public void testFilterByNow() throws Exception { - client().prepareIndex("index", PercolatorFieldMapper.TYPE_NAME, "1") + prepareCreate(INDEX_NAME).addMapping(TYPE_NAME, "query", "type=percolator").get(); + client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).field("created", "2015-07-10T14:41:54+0000").endObject()) .get(); refresh(); PercolateResponse response = client().preparePercolate() - .setIndices("index") + .setIndices(INDEX_NAME) .setDocumentType("type") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}")) .setPercolateQuery(rangeQuery("created").lte("now")) diff --git a/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java index d9f592c745d..b5c43c3c19c 100644 --- a/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java @@ -21,8 +21,8 @@ package org.elasticsearch.search.percolator; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.query.MatchPhraseQueryBuilder; +import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.highlight.HighlightBuilder; @@ -42,21 +42,24 @@ import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.hamcrest.Matchers.equalTo; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.startsWith; public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { public void testPercolatorQuery() throws Exception { createIndex("test", client().admin().indices().prepareCreate("test") .addMapping("type", "field1", "type=keyword", "field2", "type=keyword") + .addMapping("queries", "query", "type=percolator") ); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex("test", "queries", "1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex("test", "queries", "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "value")).endObject()) .get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex("test", "queries", "3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "value")) .must(matchQuery("field2", "value")) @@ -66,7 +69,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { BytesReference source = jsonBuilder().startObject().endObject().bytes(); logger.info("percolating empty doc"); SearchResponse response = client().prepareSearch() - .setQuery(percolatorQuery("type", source)) + .setQuery(percolatorQuery("query", "type", source)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); @@ -74,7 +77,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { source = jsonBuilder().startObject().field("field1", "value").endObject().bytes(); logger.info("percolating doc with 1 field"); response = client().prepareSearch() - .setQuery(percolatorQuery("type", source)) + .setQuery(percolatorQuery("query", "type", source)) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 2); @@ -84,7 +87,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { source = jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes(); logger.info("percolating doc with 2 fields"); response = client().prepareSearch() - .setQuery(percolatorQuery("type", source)) + .setQuery(percolatorQuery("query", "type", source)) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 3); @@ -93,19 +96,70 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { assertThat(response.getHits().getAt(2).getId(), equalTo("3")); } + public void testPercolatorQueryExistingDocument() throws Exception { + createIndex("test", client().admin().indices().prepareCreate("test") + .addMapping("type", "field1", "type=keyword", "field2", "type=keyword") + .addMapping("queries", "query", "type=percolator") + ); + + client().prepareIndex("test", "queries", "1") + .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) + .get(); + client().prepareIndex("test", "queries", "2") + .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "value")).endObject()) + .get(); + client().prepareIndex("test", "queries", "3") + .setSource(jsonBuilder().startObject().field("query", boolQuery() + .must(matchQuery("field1", "value")) + .must(matchQuery("field2", "value")) + ).endObject()).get(); + + client().prepareIndex("test", "type", "1").setSource("{}").get(); + client().prepareIndex("test", "type", "2").setSource("field1", "value").get(); + client().prepareIndex("test", "type", "3").setSource("field1", "value", "field2", "value").get(); + client().admin().indices().prepareRefresh().get(); + + logger.info("percolating empty doc"); + SearchResponse response = client().prepareSearch() + .setQuery(percolatorQuery("query", "type", "test", "type", "1")) + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + + logger.info("percolating doc with 1 field"); + response = client().prepareSearch() + .setQuery(percolatorQuery("query", "type", "test", "type", "2")) + .addSort("_uid", SortOrder.ASC) + .get(); + assertHitCount(response, 2); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + + logger.info("percolating doc with 2 fields"); + response = client().prepareSearch() + .setQuery(percolatorQuery("query", "type", "test", "type", "3")) + .addSort("_uid", SortOrder.ASC) + .get(); + assertHitCount(response, 3); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + } + public void testPercolatorSpecificQueries() throws Exception { createIndex("test", client().admin().indices().prepareCreate("test") .addMapping("type", "field1", "type=text", "field2", "type=text") + .addMapping("queries", "query", "type=percolator") ); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex("test", "queries", "1") .setSource(jsonBuilder().startObject().field("query", commonTermsQuery("field1", "quick brown fox")).endObject()) .get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex("test", "queries", "2") .setSource(jsonBuilder().startObject().field("query", multiMatchQuery("quick brown fox", "field1", "field2") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)).endObject()) .get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex("test", "queries", "3") .setSource(jsonBuilder().startObject().field("query", spanNearQuery(spanTermQuery("field1", "quick"), 0) .clause(spanTermQuery("field1", "brown")) @@ -115,7 +169,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .get(); client().admin().indices().prepareRefresh().get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex("test", "queries", "4") .setSource(jsonBuilder().startObject().field("query", spanNotQuery( spanNearQuery(spanTermQuery("field1", "quick"), 0) @@ -130,7 +184,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .get(); // doesn't match - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "5") + client().prepareIndex("test", "queries", "5") .setSource(jsonBuilder().startObject().field("query", spanNotQuery( spanNearQuery(spanTermQuery("field1", "quick"), 0) @@ -150,7 +204,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .field("field2", "the quick brown fox falls down into the well") .endObject().bytes(); SearchResponse response = client().prepareSearch() - .setQuery(percolatorQuery("type", source)) + .setQuery(percolatorQuery("query", "type", source)) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 4); @@ -165,22 +219,30 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { } public void testPercolatorQueryWithHighlighting() throws Exception { + StringBuilder fieldMapping = new StringBuilder("type=text") + .append(",store=").append(randomBoolean()); + if (randomBoolean()) { + fieldMapping.append(",term_vector=with_positions_offsets"); + } else if (randomBoolean()) { + fieldMapping.append(",index_options=offsets"); + } createIndex("test", client().admin().indices().prepareCreate("test") - .addMapping("type", "field1", "type=text") + .addMapping("type", "field1", fieldMapping) + .addMapping("queries", "query", "type=percolator") ); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex("test", "queries", "1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "brown fox")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex("test", "queries", "2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "lazy dog")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "3") + client().prepareIndex("test", "queries", "3") .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "jumps")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex("test", "queries", "4") .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "dog")).endObject()) .execute().actionGet(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "5") + client().prepareIndex("test", "queries", "5") .setSource(jsonBuilder().startObject().field("query", termQuery("field1", "fox")).endObject()) .execute().actionGet(); client().admin().indices().prepareRefresh().get(); @@ -189,7 +251,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .field("field1", "The quick brown fox jumps over the lazy dog") .endObject().bytes(); SearchResponse searchResponse = client().prepareSearch() - .setQuery(percolatorQuery("type", document)) + .setQuery(percolatorQuery("query", "type", document)) .highlighter(new HighlightBuilder().field("field1")) .addSort("_uid", SortOrder.ASC) .get(); @@ -210,23 +272,109 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { public void testTakePositionOffsetGapIntoAccount() throws Exception { createIndex("test", client().admin().indices().prepareCreate("test") .addMapping("type", "field", "type=text,position_increment_gap=5") + .addMapping("queries", "query", "type=percolator") ); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "1") + client().prepareIndex("test", "queries", "1") .setSource(jsonBuilder().startObject().field("query", new MatchPhraseQueryBuilder("field", "brown fox").slop(4)).endObject()) .get(); - client().prepareIndex("test", PercolatorFieldMapper.TYPE_NAME, "2") + client().prepareIndex("test", "queries", "2") .setSource(jsonBuilder().startObject().field("query", new MatchPhraseQueryBuilder("field", "brown fox").slop(5)).endObject()) .get(); client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch().setQuery( - QueryBuilders.percolatorQuery("type", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}")) + QueryBuilders.percolatorQuery("query", "type", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}")) ).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } + public void testIllegalMappings() throws Exception { + String queryFieldName = randomAsciiOfLength(8); + MapperParsingException e = expectThrows(MapperParsingException.class, () -> { + createIndex("test", client().admin().indices().prepareCreate("test") + .addMapping("doc_type", "field", "type=keyword") + .addMapping("query_type1", queryFieldName, "type=percolator") + .addMapping("query_type2", queryFieldName, "type=percolator", "second_query_field", "type=percolator") + .addMapping("query_type3", jsonBuilder().startObject().startObject("query_type3").startObject("properties") + .startObject("object_field") + .field("type", "object") + .startObject("properties") + .startObject(queryFieldName) + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject().endObject()) + ); + }); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(e.getCause().getMessage(), startsWith("Up to one percolator field type is allowed per index")); + } + + public void testWithMultiplePercolatorFields() throws Exception { + String queryFieldName = randomAsciiOfLength(8); + createIndex("test1", client().admin().indices().prepareCreate("test1") + .addMapping("doc_type", "field", "type=keyword") + .addMapping("query_type", queryFieldName, "type=percolator")); + createIndex("test2", client().admin().indices().prepareCreate("test2") + .addMapping("doc_type", "field", "type=keyword") + .addMapping("query_type", jsonBuilder().startObject().startObject("query_type").startObject("properties") + .startObject("object_field") + .field("type", "object") + .startObject("properties") + .startObject(queryFieldName) + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject().endObject()) + ); + + // Acceptable: + client().prepareIndex("test1", "query_type", "1") + .setSource(jsonBuilder().startObject().field(queryFieldName, matchQuery("field", "value")).endObject()) + .get(); + client().prepareIndex("test2", "query_type", "1") + .setSource(jsonBuilder().startObject().startObject("object_field") + .field(queryFieldName, matchQuery("field", "value")) + .endObject().endObject()) + .get(); + client().admin().indices().prepareRefresh().get(); + + BytesReference source = jsonBuilder().startObject().field("field", "value").endObject().bytes(); + SearchResponse response = client().prepareSearch() + .setQuery(percolatorQuery(queryFieldName, "doc_type", source)) + .setIndices("test1") + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(0).type(), equalTo("query_type")); + assertThat(response.getHits().getAt(0).index(), equalTo("test1")); + + response = client().prepareSearch() + .setQuery(percolatorQuery("object_field." + queryFieldName, "doc_type", source)) + .setIndices("test2") + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(0).type(), equalTo("query_type")); + assertThat(response.getHits().getAt(0).index(), equalTo("test2")); + + // Unacceptable: + MapperParsingException e = expectThrows(MapperParsingException.class, () -> { + client().prepareIndex("test2", "query_type", "1") + .setSource(jsonBuilder().startObject().startArray("object_field") + .startObject().field(queryFieldName, matchQuery("field", "value")).endObject() + .startObject().field(queryFieldName, matchQuery("field", "value")).endObject() + .endArray().endObject()) + .get(); + }); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(e.getCause().getMessage(), equalTo("a document can only contain one percolator query")); + } + } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index a5f0d4a42de..b539329a9e8 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Requests; import org.elasticsearch.common.settings.Settings; @@ -350,8 +349,9 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { client().prepareIndex(INDEX, TYPE, "" + i) .setSource(source).execute().actionGet(); } + client().admin().indices().preparePutMapping(INDEX).setType("query").setSource("query", "type=percolator").get(); - client().prepareIndex(INDEX, PercolatorFieldMapper.TYPE_NAME, "4") + client().prepareIndex(INDEX, "query", "4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java index c730d70bb0b..30476adc18c 100644 --- a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java @@ -219,10 +219,11 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase { public void testThatPercolatingExistingDocumentGetRequestContainsContextAndHeaders() throws Exception { Client client = transportClient(); - client.prepareIndex(lookupIndex, ".percolator", "1") - .setSource( - jsonBuilder() - .startObject().startObject("query").startObject("match").field("name", "star wars").endObject().endObject().endObject()) + client.admin().indices().preparePutMapping(lookupIndex).setType("query").setSource("query", "type=percolator").get(); + client.prepareIndex(lookupIndex, "query", "1") + .setSource(jsonBuilder().startObject() + .startObject("query").startObject("match").field("name", "star wars").endObject().endObject() + .endObject()) .get(); client.prepareIndex(lookupIndex, "type", "1") .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index 30d6bd56b1f..f309d1a485f 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -41,6 +41,8 @@ Attachment datatype:: which supports indexing `attachments` like Microsoft Office formats, Open Document formats, ePub, HTML, etc. into an `attachment` datatype. +<>:: Accepts queries from the query-dsl + [float] === Multi-fields @@ -83,6 +85,8 @@ include::types/text.asciidoc[] include::types/token-count.asciidoc[] +include::types/percolator.asciidoc[] + diff --git a/docs/reference/mapping/types/percolator.asciidoc b/docs/reference/mapping/types/percolator.asciidoc new file mode 100644 index 00000000000..d972e5efe68 --- /dev/null +++ b/docs/reference/mapping/types/percolator.asciidoc @@ -0,0 +1,86 @@ +[[percolator]] +=== Percolator type + +The `percolator` field type parses a json structure into a native query and +stores that query, so that the <> +can use it to match provided documents. + +Any field that contains a json object can be configured to be a percolator +field. The percolator field type has no settings. Just configuring the `percolator` +field type is sufficient to instruct Elasticsearch to treat a field as a +query. + +If the following mapping configures the `percolator` field type for the +`query` field: + +[source,js] +-------------------------------------------------- +{ + "properties": { + "query": { + "type": "percolator" + } + } +} +-------------------------------------------------- + +Then the following json snippet can be indexed as a native query: + +[source,js] +-------------------------------------------------- +{ + "query" : { + "match" : { + "field" : "value" + } + } +} +-------------------------------------------------- + +[IMPORTANT] +===================================== + +Fields referred to in a percolator query must *already* exist in the mapping +associated with the index used for percolation. In order to make sure these fields exist, +add or update a mapping via the <> or <> APIs. +Fields referred in a percolator query may exist in any type of the index containing the `percolator` field type. + +Also an index can only contain up to one percolator field mapping. Multiple percolator fields will be rejected by the +put index and put mapping APIs. + +===================================== + +[float] +==== Dedicated Percolator Index + +Percolate queries can be added to any index. Instead of adding percolate queries to the index the data resides in, +these queries can also be added to a dedicated index. The advantage of this is that this dedicated percolator index +can have its own index settings (For example the number of primary and replica shards). If you choose to have a dedicated +percolate index, you need to make sure that the mappings from the normal index are also available on the percolate index. +Otherwise percolate queries can be parsed incorrectly. + +[float] +==== Forcing Unmapped Fields to be Handled as Strings + +In certain cases it is unknown what kind of percolator queries do get registered, and if no field mapping exists for fields +that are referred by percolator queries then adding a percolator query fails. This means the mapping needs to be updated +to have the field with the appropriate settings, and then the percolator query can be added. But sometimes it is sufficient +if all unmapped fields are handled as if these were default string fields. In those cases one can configure the +`index.percolator.map_unmapped_fields_as_string` setting to `true` (default to `false`) and then if a field referred in +a percolator query does not exist, it will be handled as a default string field so that adding the percolator query doesn't +fail. + +[float] +==== Important Notes + +Because the percolator query is processing one document at a time, it doesn't support queries and filters that run +against child documents such as `has_child` and `has_parent`. + +There are a number of queries that fetch data via a get call during query parsing. For example the `terms` query when +using terms lookup, `template` query when using indexed scripts and `geo_shape` when using pre-indexed shapes. When these +queries are indexed by the `percolator` field type then the get call is executed once. So each time the `percolator` +query evaluates these queries, the fetches terms, shapes etc. as the were upon index time will be used. + +The `wildcard` and `regexp` query natively use a lot of memory and because the percolator keeps the queries into memory +this can easily take up the available memory in the heap space. If possible try to use a `prefix` query or ngramming to +achieve the same result (with way less memory being used). \ No newline at end of file diff --git a/docs/reference/migration/migrate_5_0/percolator.asciidoc b/docs/reference/migration/migrate_5_0/percolator.asciidoc index 73f262afb70..0d8e46917f9 100644 --- a/docs/reference/migration/migrate_5_0/percolator.asciidoc +++ b/docs/reference/migration/migrate_5_0/percolator.asciidoc @@ -15,7 +15,16 @@ percolation. Percolator and multi percolate APIs have been deprecated and will be removed in the next major release. These APIs have been replaced by the `percolator` query that can be used in the search and multi search APIs. -==== Percolator mapping +==== Percolator field mapping + +The `.percolator` type can no longer be used to index percolator queries. + +Instead a <> must be configured prior to indexing percolator queries. + +Indices with a `.percolator` type created on a version before 5.0.0 can still be used, +but new indices no longer accept the `.percolator` type. + +==== Percolate document mapping The `percolator` query can no longer accept documents that reference fields that don't already exist in the mapping. Before the percolate API allowed this. diff --git a/docs/reference/query-dsl/percolator-query.asciidoc b/docs/reference/query-dsl/percolator-query.asciidoc index f2b1e2b9805..17563bb9e8c 100644 --- a/docs/reference/query-dsl/percolator-query.asciidoc +++ b/docs/reference/query-dsl/percolator-query.asciidoc @@ -1,49 +1,56 @@ [[query-dsl-percolator-query]] === Percolator Query -Traditionally you design documents based on your data, store them into an index, and then define queries via the search API -in order to retrieve these documents. The percolator works in the opposite direction. First you store queries into an -index and then you use the `percolator` query to search for the queries which match a specified document (or documents). - -The reason that queries can be stored comes from the fact that in Elasticsearch both documents and queries are defined in -JSON. This allows you to embed queries into documents via the index API. Elasticsearch can extract the query from a -document and make it available for search via the `percolator` query. Since documents are also defined as JSON, -you can define a document in the `percolator` query. - -[IMPORTANT] -===================================== - -Fields referred to in a percolator query must *already* exist in the mapping -associated with the index used for percolation. In order to make sure these fields exist, -add or update a mapping via the <> or <> APIs. - -===================================== +The `percolator` query can be used to match queries +stored in an index. The `percolator` query itself +contains the document that will be used as query +to match with the stored queries. [float] === Sample Usage -Create an index with a mapping for the field `message`: +Create an index with two mappings: [source,js] -------------------------------------------------- -curl -XPUT 'localhost:9200/my-index' -d '{ +curl -XPUT "http://localhost:9200/my-index" -d' +{ "mappings": { - "my-type": { + "doctype": { "properties": { "message": { "type": "string" } } + }, + "queries": { + "properties": { + "query": { + "type": "percolator" + } + } } } }' -------------------------------------------------- +The `doctype` mapping is the mapping used to preprocess +the document defined in the `percolator` query before it +gets indexed into a temporary index. + +The `queries` mapping is the mapping used for indexing +the query documents. The `query` field will hold a json +object that represents an actual Elasticsearch query. The +`query` field has been configured to use the +<>. This field type understands +the query dsl and stored the query in such a way that it +can be used later on to match documents defined on the `percolator` query. + Register a query in the percolator: [source,js] -------------------------------------------------- -curl -XPUT 'localhost:9200/my-index/.percolator/1' -d '{ +curl -XPUT 'localhost:9200/my-index/queries/1' -d '{ "query" : { "match" : { "message" : "bonsai tree" @@ -59,7 +66,8 @@ Match a document to the registered percolator queries: curl -XGET 'localhost:9200/my-index/_search' -d '{ "query" : { "percolator" : { - "document_type" : "my-type", + "field" : "query", + "document_type" : "doctype", "document" : { "message" : "A new bonsai tree in the office" } @@ -82,13 +90,13 @@ The above request will yield the following response: }, "hits": { "total": 1, - "max_score": 0, + "max_score": 0.5716521, "hits": [ { <1> "_index": "my-index", - "_type": ".percolator", + "_type": "queries", "_id": "1", - "_score": 0, + "_score": 0.5716521, "_source": { "query": { "match": { @@ -104,73 +112,14 @@ The above request will yield the following response: <1> The percolate query with id `1` matches our document. -[float] -=== Indexing Percolator Queries - -Percolate queries are stored as documents in a specific format and in an arbitrary index under a reserved type with the -name `.percolator`. The query itself is placed as is in a JSON object under the top level field `query`. - -[source,js] --------------------------------------------------- -{ - "query" : { - "match" : { - "field" : "value" - } - } -} --------------------------------------------------- - -Since this is just an ordinary document, any field can be added to this document. This can be useful later on to only -percolate documents by specific queries. - -[source,js] --------------------------------------------------- -{ - "query" : { - "match" : { - "field" : "value" - } - }, - "priority" : "high" -} --------------------------------------------------- - -Just as with any other type, the `.percolator` type has a mapping, which you can configure via the mappings APIs. -The default percolate mapping doesn't index the query field, only stores it. - -Because `.percolate` is a type it also has a mapping. By default the following mapping is active: - -[source,js] --------------------------------------------------- -{ - ".percolator" : { - "properties" : { - "query" : { - "type" : "percolator" - } - } - } -} --------------------------------------------------- - -If needed, this mapping can be modified with the update mapping API. - -In order to un-register a percolate query the delete API can be used. So if the previous added query needs to be deleted -the following delete requests needs to be executed: - -[source,js] --------------------------------------------------- -curl -XDELETE localhost:9200/my-index/.percolator/1 --------------------------------------------------- - [float] ==== Parameters The following parameters are required when percolating a document: [horizontal] -`document_type`:: The type / mapping of the document being percolated. This is parameter is always required. +`field`:: The field of type `percolator` and that holds the indexed queries. This is a required parameter. +`document_type`:: The type / mapping of the document being percolated. This is a required parameter. `document`:: The source of the document being percolated. Instead of specifying a the source of the document being percolated, the source can also be retrieved from an already @@ -186,15 +135,6 @@ In that case the `document` parameter can be substituted with the following para `preference`:: Optionally, preference to be used to fetch document to percolate. `version`:: Optionally, the expected version of the document to be fetched. -[float] -==== Dedicated Percolator Index - -Percolate queries can be added to any index. Instead of adding percolate queries to the index the data resides in, -these queries can also be added to a dedicated index. The advantage of this is that this dedicated percolator index -can have its own index settings (For example the number of primary and replica shards). If you choose to have a dedicated -percolate index, you need to make sure that the mappings from the normal index are also available on the percolate index. -Otherwise percolate queries can be parsed incorrectly. - [float] ==== Percolating an Existing Document @@ -243,7 +183,8 @@ curl -XGET "http://localhost:9200/my-index/_search" -d' { "query" : { "percolator" : { - "document_type" : "my-type", + "field": "query", + "document_type" : "doctype", "index" : "my-index", "type" : "message", "id" : "1", @@ -275,7 +216,7 @@ Add a percolator query: [source,js] -------------------------------------------------- -curl -XPUT "http://localhost:9200/my-index/.percolator/1" -d' +curl -XPUT "http://localhost:9200/my-index/queries/1" -d' { "query" : { "match" : { @@ -289,7 +230,7 @@ Add another percolator query: [source,js] -------------------------------------------------- -curl -XPUT "http://localhost:9200/my-index/.percolator/2" -d' +curl -XPUT "http://localhost:9200/my-index/queries/2" -d' { "query" : { "match" : { @@ -299,7 +240,7 @@ curl -XPUT "http://localhost:9200/my-index/.percolator/2" -d' }' -------------------------------------------------- -Execute a search request with `percolator` and highlighting enabled: +Execute a search request with the `percolator` query and highlighting enabled: [source,js] -------------------------------------------------- @@ -307,7 +248,8 @@ curl -XGET "http://localhost:9200/my-index/_search" -d' { "query" : { "percolator" : { - "document_type" : "my-type", + "field": "query", + "document_type" : "doctype", "document" : { "message" : "The quick brown fox jumps over the lazy dog" } @@ -326,7 +268,7 @@ This will yield the following response. [source,js] -------------------------------------------------- { - "took": 14, + "took": 83, "timed_out": false, "_shards": { "total": 5, @@ -335,13 +277,13 @@ This will yield the following response. }, "hits": { "total": 2, - "max_score": 0, + "max_score": 0.5446649, "hits": [ { "_index": "my-index", - "_type": ".percolator", + "_type": "queries", "_id": "2", - "_score": 0, + "_score": 0.5446649, "_source": { "query": { "match": { @@ -351,15 +293,15 @@ This will yield the following response. }, "highlight": { "message": [ - "The quick brown fox jumps over the lazy dog" <1> + "The quick brown fox jumps over the lazy dog" ] } }, { "_index": "my-index", - "_type": ".percolator", + "_type": "queries", "_id": "1", - "_score": 0, + "_score": 0.5446649, "_source": { "query": { "match": { @@ -369,7 +311,7 @@ This will yield the following response. }, "highlight": { "message": [ - "The quick brown fox jumps over the lazy dog" <1> + "The quick brown fox jumps over the lazy dog" ] } } @@ -384,41 +326,14 @@ This will yield the following response. [float] ==== How it Works Under the Hood -When indexing a document that contains a query in an index and the `.percolator` type, the query part of the documents gets -parsed into a Lucene query and is kept in memory until that percolator document is removed or the index containing the -`.percolator` type gets removed. So, all the active percolator queries are kept in memory. +When indexing a document into an index that has the <> mapping configured, the query +part of the documents gets parsed into a Lucene query and is kept in memory until that percolator document is removed. +So, all the active percolator queries are kept in memory. At search time, the document specified in the request gets parsed into a Lucene document and is stored in a in-memory -Lucene index. This in-memory index can just hold this one document and it is optimized for that. Then all the queries -that are registered to the index that the searh request is targeted for, are going to be executed on this single document +temporary Lucene index. This in-memory index can just hold this one document and it is optimized for that. Then all the queries +that are registered to the index that the search request is targeted for, are going to be executed on this single document in-memory index. This happens on each shard the search request needs to execute. By using `routing` or additional queries the amount of percolator queries that need to be executed can be reduced and thus -the time the search API needs to run can be decreased. - -[float] -==== Important Notes - -Because the percolator query is processing one document at a time, it doesn't support queries and filters that run -against child documents such as `has_child` and `has_parent`. - -The percolator doesn't work with queries like `template` and `geo_shape` queries when these queries fetch documents -to substitute parts of the query. The reason is that the percolator stores the query terms during indexing in order to -speedup percolating in certain cases and this doesn't work if part of the query is defined in another document. -There is no way to know for the percolator to know if an external document has changed and even if this was the case the -percolator query has to be reindexed. - -The `wildcard` and `regexp` query natively use a lot of memory and because the percolator keeps the queries into memory -this can easily take up the available memory in the heap space. If possible try to use a `prefix` query or ngramming to -achieve the same result (with way less memory being used). - -[float] -==== Forcing Unmapped Fields to be Handled as Strings - -In certain cases it is unknown what kind of percolator queries do get registered, and if no field mapping exists for fields -that are referred by percolator queries then adding a percolator query fails. This means the mapping needs to be updated -to have the field with the appropriate settings, and then the percolator query can be added. But sometimes it is sufficient -if all unmapped fields are handled as if these were default string fields. In those cases one can configure the -`index.percolator.map_unmapped_fields_as_string` setting to `true` (default to `false`) and then if a field referred in -a percolator query does not exist, it will be handled as a default string field so that adding the percolator query doesn't -fail. \ No newline at end of file +the time the search API needs to run can be decreased. \ No newline at end of file diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mpercolate/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mpercolate/10_basic.yaml index 66d62e49635..ab75d6abe29 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mpercolate/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mpercolate/10_basic.yaml @@ -1,5 +1,16 @@ --- "Basic multi-percolate": + + - do: + indices.create: + index: percolator_index + body: + mappings: + queries: + properties: + query: + type: percolator + - do: index: index: percolator_index @@ -10,7 +21,7 @@ - do: index: index: percolator_index - type: .percolator + type: queries id: test_percolator body: query: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/15_new.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/15_new.yaml index 63c5b9a44e8..45532d95757 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/15_new.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/15_new.yaml @@ -4,11 +4,17 @@ - do: indices.create: index: test_index + body: + mappings: + queries: + properties: + query: + type: percolator - do: index: index: test_index - type: .percolator + type: queries id: test_percolator body: query: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/16_existing_doc.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/16_existing_doc.yaml index c6f12131f35..9da8b33b996 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/16_existing_doc.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/16_existing_doc.yaml @@ -4,11 +4,17 @@ - do: indices.create: index: percolator_index + body: + mappings: + queries: + properties: + query: + type: percolator - do: index: index: percolator_index - type: .percolator + type: queries id: test_percolator body: query: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/17_empty.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/17_empty.yaml index 0cd1ac5bb8d..a6a56bb4636 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/17_empty.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/17_empty.yaml @@ -4,6 +4,12 @@ - do: indices.create: index: test_index + body: + mappings: + queries: + properties: + query: + type: percolator - do: indices.refresh: {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml index 83490c4d9a7..8f3287ddd02 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml @@ -10,11 +10,15 @@ properties: foo: type: text + queries: + properties: + query: + type: percolator - do: index: index: test_index - type: .percolator + type: queries id: test_percolator body: query: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/19_nested.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/19_nested.yaml index 2713f5755c4..eff66846da6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/19_nested.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/19_nested.yaml @@ -14,6 +14,10 @@ setup: properties: name: type: text + queries: + properties: + query: + type: percolator - do: @@ -21,7 +25,7 @@ setup: - do: index: index: nestedindex - type: ".percolator" + type: "queries" id: query body: { "query": { "nested": { "path": "employee", "score_mode": "avg", "query": { "match": { "employee.name": { "query": "virginia potts", "operator": "and"} } } } } } - do: