percolator: Store the QueryBuilder's Writable representation instead of its XContent representation.
The Writeble representation is less heavy to parse and that will benefit percolate performance and throughput. The query builder's binary format has now the same bwc guarentees as the xcontent format. Added a qa test that verifies that percolator queries written in older versions are still readable by the current version.
This commit is contained in:
parent
db90455afd
commit
7c3735bdc4
|
@ -21,9 +21,9 @@ package org.elasticsearch.index;
|
|||
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.TriFunction;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -40,7 +40,6 @@ import org.elasticsearch.index.shard.IndexEventListener;
|
|||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.shard.IndexingOperationListener;
|
||||
import org.elasticsearch.index.shard.SearchOperationListener;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.similarity.BM25SimilarityProvider;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
|
@ -330,7 +329,8 @@ public final class IndexModule {
|
|||
Client client,
|
||||
IndicesQueryCache indicesQueryCache,
|
||||
MapperRegistry mapperRegistry,
|
||||
IndicesFieldDataCache indicesFieldDataCache)
|
||||
IndicesFieldDataCache indicesFieldDataCache,
|
||||
NamedWriteableRegistry namedWriteableRegistry)
|
||||
throws IOException {
|
||||
final IndexEventListener eventListener = freeze();
|
||||
IndexSearcherWrapperFactory searcherWrapperFactory = indexSearcherWrapper.get() == null
|
||||
|
@ -364,7 +364,7 @@ public final class IndexModule {
|
|||
return new IndexService(indexSettings, environment, xContentRegistry, new SimilarityService(indexSettings, similarities),
|
||||
shardStoreDeleter, analysisRegistry, engineFactory.get(), circuitBreakerService, bigArrays, threadPool, scriptService,
|
||||
client, queryCache, store, eventListener, searcherWrapperFactory, mapperRegistry,
|
||||
indicesFieldDataCache, searchOperationListeners, indexOperationListeners);
|
||||
indicesFieldDataCache, searchOperationListeners, indexOperationListeners, namedWriteableRegistry);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.client.Client;
|
|||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
|
@ -100,6 +101,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
|||
private final IndexCache indexCache;
|
||||
private final MapperService mapperService;
|
||||
private final NamedXContentRegistry xContentRegistry;
|
||||
private final NamedWriteableRegistry namedWriteableRegistry;
|
||||
private final SimilarityService similarityService;
|
||||
private final EngineFactory engineFactory;
|
||||
private final IndexWarmer warmer;
|
||||
|
@ -142,11 +144,13 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
|||
MapperRegistry mapperRegistry,
|
||||
IndicesFieldDataCache indicesFieldDataCache,
|
||||
List<SearchOperationListener> searchOperationListeners,
|
||||
List<IndexingOperationListener> indexingOperationListeners) throws IOException {
|
||||
List<IndexingOperationListener> indexingOperationListeners,
|
||||
NamedWriteableRegistry namedWriteableRegistry) throws IOException {
|
||||
super(indexSettings);
|
||||
this.indexSettings = indexSettings;
|
||||
this.xContentRegistry = xContentRegistry;
|
||||
this.similarityService = similarityService;
|
||||
this.namedWriteableRegistry = namedWriteableRegistry;
|
||||
this.mapperService = new MapperService(indexSettings, registry.build(indexSettings), xContentRegistry, similarityService,
|
||||
mapperRegistry,
|
||||
// we parse all percolator queries as they would be parsed on shard 0
|
||||
|
@ -464,8 +468,11 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
|||
* {@link IndexReader}-specific optimizations, such as rewriting containing range queries.
|
||||
*/
|
||||
public QueryShardContext newQueryShardContext(int shardId, IndexReader indexReader, LongSupplier nowInMillis, String clusterAlias) {
|
||||
return new QueryShardContext(shardId, indexSettings, indexCache.bitsetFilterCache(), indexFieldData::getForField, mapperService(),
|
||||
similarityService(), scriptService, xContentRegistry, client, indexReader, nowInMillis, clusterAlias);
|
||||
return new QueryShardContext(
|
||||
shardId, indexSettings, indexCache.bitsetFilterCache(), indexFieldData::getForField, mapperService(),
|
||||
similarityService(), scriptService, xContentRegistry,
|
||||
namedWriteableRegistry, client, indexReader,
|
||||
nowInMillis, clusterAlias);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.util.concurrent.CountDown;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -34,13 +35,17 @@ import java.util.function.LongSupplier;
|
|||
*/
|
||||
public class QueryRewriteContext {
|
||||
private final NamedXContentRegistry xContentRegistry;
|
||||
private final NamedWriteableRegistry writeableRegistry;
|
||||
protected final Client client;
|
||||
protected final LongSupplier nowInMillis;
|
||||
private final List<BiConsumer<Client, ActionListener<?>>> asyncActions = new ArrayList<>();
|
||||
|
||||
public QueryRewriteContext(
|
||||
NamedXContentRegistry xContentRegistry, NamedWriteableRegistry writeableRegistry,Client client,
|
||||
LongSupplier nowInMillis) {
|
||||
|
||||
public QueryRewriteContext(NamedXContentRegistry xContentRegistry, Client client, LongSupplier nowInMillis) {
|
||||
this.xContentRegistry = xContentRegistry;
|
||||
this.writeableRegistry = writeableRegistry;
|
||||
this.client = client;
|
||||
this.nowInMillis = nowInMillis;
|
||||
}
|
||||
|
@ -59,6 +64,10 @@ public class QueryRewriteContext {
|
|||
return nowInMillis.getAsLong();
|
||||
}
|
||||
|
||||
public NamedWriteableRegistry getWriteableRegistry() {
|
||||
return writeableRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an instance of {@link QueryShardContext} if available of null otherwise
|
||||
*/
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.client.Client;
|
|||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -102,8 +103,9 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
public QueryShardContext(int shardId, IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache,
|
||||
Function<MappedFieldType, IndexFieldData<?>> indexFieldDataLookup, MapperService mapperService,
|
||||
SimilarityService similarityService, ScriptService scriptService, NamedXContentRegistry xContentRegistry,
|
||||
Client client, IndexReader reader, LongSupplier nowInMillis, String clusterAlias) {
|
||||
super(xContentRegistry, client, nowInMillis);
|
||||
NamedWriteableRegistry namedWriteableRegistry,Client client, IndexReader reader, LongSupplier nowInMillis,
|
||||
String clusterAlias) {
|
||||
super(xContentRegistry, namedWriteableRegistry,client, nowInMillis);
|
||||
this.shardId = shardId;
|
||||
this.similarityService = similarityService;
|
||||
this.mapperService = mapperService;
|
||||
|
@ -120,8 +122,8 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
|
||||
public QueryShardContext(QueryShardContext source) {
|
||||
this(source.shardId, source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService,
|
||||
source.similarityService, source.scriptService, source.getXContentRegistry(), source.client,
|
||||
source.reader, source.nowInMillis, source.clusterAlias);
|
||||
source.similarityService, source.scriptService, source.getXContentRegistry(), source.getWriteableRegistry(),
|
||||
source.client, source.reader, source.nowInMillis, source.clusterAlias);
|
||||
this.types = source.getTypes();
|
||||
}
|
||||
|
||||
|
|
|
@ -453,7 +453,8 @@ public class IndicesService extends AbstractLifecycleComponent
|
|||
client,
|
||||
indicesQueryCache,
|
||||
mapperRegistry,
|
||||
indicesFieldDataCache);
|
||||
indicesFieldDataCache,
|
||||
namedWriteableRegistry);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1232,7 +1233,7 @@ public class IndicesService extends AbstractLifecycleComponent
|
|||
* Returns a new {@link QueryRewriteContext} with the given <tt>now</tt> provider
|
||||
*/
|
||||
public QueryRewriteContext getRewriteContext(LongSupplier nowInMillis) {
|
||||
return new QueryRewriteContext(xContentRegistry, client, nowInMillis);
|
||||
return new QueryRewriteContext(xContentRegistry, namedWriteableRegistry, client, nowInMillis);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -140,7 +140,7 @@ public class IndexModuleTests extends ESTestCase {
|
|||
private IndexService newIndexService(IndexModule module) throws IOException {
|
||||
return module.newIndexService(nodeEnvironment, xContentRegistry(), deleter, circuitBreakerService, bigArrays, threadPool,
|
||||
scriptService, null, indicesQueryCache, mapperRegistry,
|
||||
new IndicesFieldDataCache(settings, listener));
|
||||
new IndicesFieldDataCache(settings, listener), writableRegistry());
|
||||
}
|
||||
|
||||
public void testWrapperIsBound() throws IOException {
|
||||
|
|
|
@ -74,8 +74,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
}
|
||||
|
||||
public void testIsFieldWithinQueryEmptyReader() throws IOException {
|
||||
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), null,
|
||||
() -> nowInMillis);
|
||||
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> nowInMillis);
|
||||
IndexReader reader = new MultiReader();
|
||||
DateFieldType ft = new DateFieldType();
|
||||
ft.setName("my_date");
|
||||
|
@ -85,8 +84,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
|
||||
private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
|
||||
DateTimeZone zone, DateMathParser alternateFormat) throws IOException {
|
||||
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), null,
|
||||
() -> nowInMillis);
|
||||
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> nowInMillis);
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02",
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20",
|
||||
|
@ -133,8 +131,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
DateFieldType ft2 = new DateFieldType();
|
||||
ft2.setName("my_date2");
|
||||
|
||||
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), null,
|
||||
() -> nowInMillis);
|
||||
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> nowInMillis);
|
||||
assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null, context));
|
||||
IOUtils.close(reader, w, dir);
|
||||
}
|
||||
|
@ -169,7 +166,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
QueryShardContext context = new QueryShardContext(0,
|
||||
new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(),
|
||||
indexSettings),
|
||||
null, null, null, null, null, xContentRegistry(), null, null, () -> nowInMillis, null);
|
||||
null, null, null, null, null, xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null);
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
ft.setName("field");
|
||||
String date = "2015-10-12T14:10:55";
|
||||
|
@ -191,7 +188,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build();
|
||||
QueryShardContext context = new QueryShardContext(0,
|
||||
new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings),
|
||||
null, null, null, null, null, xContentRegistry(), null, null, () -> nowInMillis, null);
|
||||
null, null, null, null, null, xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null);
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
ft.setName("field");
|
||||
String date1 = "2015-10-12T14:10:55";
|
||||
|
|
|
@ -83,7 +83,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
|
|||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings);
|
||||
QueryShardContext context = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(),
|
||||
null, null, () -> nowInMillis, null);
|
||||
writableRegistry(), null, null, () -> nowInMillis, null);
|
||||
RangeFieldMapper.RangeFieldType ft = new RangeFieldMapper.RangeFieldType(type, Version.CURRENT);
|
||||
ft.setName(FIELDNAME);
|
||||
ft.setIndexOptions(IndexOptions.DOCS);
|
||||
|
|
|
@ -66,7 +66,7 @@ public class QueryShardContextTests extends ESTestCase {
|
|||
QueryShardContext context = new QueryShardContext(
|
||||
0, indexSettings, null, mappedFieldType ->
|
||||
mappedFieldType.fielddataBuilder().build(indexSettings, mappedFieldType, null, null, null)
|
||||
, mapperService, null, null, xContentRegistry(), null, null,
|
||||
, mapperService, null, null, xContentRegistry(), writableRegistry(), null, null,
|
||||
() -> nowInMillis, null);
|
||||
|
||||
context.setAllowUnmappedFields(false);
|
||||
|
@ -111,7 +111,7 @@ public class QueryShardContextTests extends ESTestCase {
|
|||
QueryShardContext context = new QueryShardContext(
|
||||
0, indexSettings, null, mappedFieldType ->
|
||||
mappedFieldType.fielddataBuilder().build(indexSettings, mappedFieldType, null, null, mapperService)
|
||||
, mapperService, null, null, xContentRegistry(), null, null,
|
||||
, mapperService, null, null, xContentRegistry(), writableRegistry(), null, null,
|
||||
() -> nowInMillis, clusterAlias);
|
||||
|
||||
IndexFieldData<?> forField = context.getForField(mapper.fieldType());
|
||||
|
|
|
@ -37,7 +37,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
|
|||
IndexService indexService = createIndex("test");
|
||||
IndexReader reader = new MultiReader();
|
||||
QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(),
|
||||
null, null, xContentRegistry(), null, reader, null, null);
|
||||
null, null, xContentRegistry(), writableRegistry(), null, reader, null, null);
|
||||
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
||||
assertEquals(Relation.DISJOINT, range.getRelation(context));
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
|
|||
indexService.mapperService().merge("type",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
||||
QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(),
|
||||
null, null, xContentRegistry(), null, null, null, null);
|
||||
null, null, xContentRegistry(), writableRegistry(), null, null, null, null);
|
||||
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
||||
// can't make assumptions on a missing reader, so it must return INTERSECT
|
||||
assertEquals(Relation.INTERSECTS, range.getRelation(context));
|
||||
|
@ -73,7 +73,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
|
|||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
||||
IndexReader reader = new MultiReader();
|
||||
QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(),
|
||||
null, null, xContentRegistry(), null, reader, null, null);
|
||||
null, null, xContentRegistry(), writableRegistry(), null, reader, null, null);
|
||||
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
||||
// no values -> DISJOINT
|
||||
assertEquals(Relation.DISJOINT, range.getRelation(context));
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.function.Supplier;
|
|||
public class RewriteableTests extends ESTestCase {
|
||||
|
||||
public void testRewrite() throws IOException {
|
||||
QueryRewriteContext context = new QueryRewriteContext(null, null, null);
|
||||
QueryRewriteContext context = new QueryRewriteContext(null, null, null, null);
|
||||
TestRewriteable rewrite = Rewriteable.rewrite(new TestRewriteable(randomIntBetween(0, Rewriteable.MAX_REWRITE_ROUNDS)), context,
|
||||
randomBoolean());
|
||||
assertEquals(rewrite.numRewrites, 0);
|
||||
|
@ -47,7 +47,7 @@ public class RewriteableTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRewriteAndFetch() throws ExecutionException, InterruptedException {
|
||||
QueryRewriteContext context = new QueryRewriteContext(null, null, null);
|
||||
QueryRewriteContext context = new QueryRewriteContext(null, null, null, null);
|
||||
PlainActionFuture<TestRewriteable> future = new PlainActionFuture<>();
|
||||
Rewriteable.rewriteAndFetch(new TestRewriteable(randomIntBetween(0, Rewriteable.MAX_REWRITE_ROUNDS), true), context, future);
|
||||
TestRewriteable rewrite = future.get();
|
||||
|
@ -65,7 +65,7 @@ public class RewriteableTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRewriteList() throws IOException {
|
||||
QueryRewriteContext context = new QueryRewriteContext(null, null, null);
|
||||
QueryRewriteContext context = new QueryRewriteContext(null, null, null, null);
|
||||
List<TestRewriteable> rewriteableList = new ArrayList();
|
||||
int numInstances = randomIntBetween(1, 10);
|
||||
rewriteableList.add(new TestRewriteable(randomIntBetween(1, Rewriteable.MAX_REWRITE_ROUNDS)));
|
||||
|
|
|
@ -177,7 +177,7 @@ public class SimpleQueryParserTests extends ESTestCase {
|
|||
IndexMetaData indexState = IndexMetaData.builder("index").settings(indexSettings).build();
|
||||
IndexSettings settings = new IndexSettings(indexState, Settings.EMPTY);
|
||||
QueryShardContext mockShardContext = new QueryShardContext(0, settings, null, null, null, null, null, xContentRegistry(),
|
||||
null, null, System::currentTimeMillis, null) {
|
||||
writableRegistry(), null, null, System::currentTimeMillis, null) {
|
||||
@Override
|
||||
public MappedFieldType fieldMapper(String name) {
|
||||
return new MockFieldMapper.FakeFieldType();
|
||||
|
@ -191,7 +191,7 @@ public class SimpleQueryParserTests extends ESTestCase {
|
|||
|
||||
// Now check what happens if foo.quote does not exist
|
||||
mockShardContext = new QueryShardContext(0, settings, null, null, null, null, null, xContentRegistry(),
|
||||
null, null, System::currentTimeMillis, null) {
|
||||
writableRegistry(), null, null, System::currentTimeMillis, null) {
|
||||
@Override
|
||||
public MappedFieldType fieldMapper(String name) {
|
||||
if (name.equals("foo.quote")) {
|
||||
|
|
|
@ -267,7 +267,7 @@ public class AggregatorFactoriesTests extends ESTestCase {
|
|||
AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addAggregator(filterAggBuilder)
|
||||
.addPipelineAggregator(pipelineAgg);
|
||||
AggregatorFactories.Builder rewritten = builder
|
||||
.rewrite(new QueryRewriteContext(xContentRegistry, null, () -> 0L));
|
||||
.rewrite(new QueryRewriteContext(xContentRegistry, null, null, () -> 0L));
|
||||
assertNotSame(builder, rewritten);
|
||||
List<AggregationBuilder> aggregatorFactories = rewritten.getAggregatorFactories();
|
||||
assertEquals(1, aggregatorFactories.size());
|
||||
|
@ -281,7 +281,7 @@ public class AggregatorFactoriesTests extends ESTestCase {
|
|||
|
||||
// Check that a further rewrite returns the same aggregation factories builder
|
||||
AggregatorFactories.Builder secondRewritten = rewritten
|
||||
.rewrite(new QueryRewriteContext(xContentRegistry, null, () -> 0L));
|
||||
.rewrite(new QueryRewriteContext(xContentRegistry, null, null, () -> 0L));
|
||||
assertSame(rewritten, secondRewritten);
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ public class ExtendedBoundsTests extends ESTestCase {
|
|||
SearchContext context = mock(SearchContext.class);
|
||||
QueryShardContext qsc = new QueryShardContext(0,
|
||||
new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), null, null, null, null,
|
||||
null, xContentRegistry(), null, null, () -> now, null);
|
||||
null, xContentRegistry(), writableRegistry(), null, null, () -> now, null);
|
||||
when(context.getQueryShardContext()).thenReturn(qsc);
|
||||
FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime");
|
||||
DocValueFormat format = new DocValueFormat.DateTime(formatter, DateTimeZone.UTC);
|
||||
|
|
|
@ -199,6 +199,6 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
|
|||
Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine);
|
||||
ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS);
|
||||
return new QueryShardContext(0, mapperService.getIndexSettings(), null, null, mapperService, null, scriptService,
|
||||
xContentRegistry(), null, null, System::currentTimeMillis, null);
|
||||
xContentRegistry(), writableRegistry(), null, null, System::currentTimeMillis, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -448,7 +448,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
|
|||
}
|
||||
|
||||
private SearchSourceBuilder rewrite(SearchSourceBuilder searchSourceBuilder) throws IOException {
|
||||
return Rewriteable.rewrite(searchSourceBuilder, new QueryRewriteContext(xContentRegistry(), null, Long
|
||||
.valueOf(1)::longValue));
|
||||
return Rewriteable.rewrite(searchSourceBuilder, new QueryRewriteContext(xContentRegistry(), writableRegistry(),
|
||||
null, Long.valueOf(1)::longValue));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -273,7 +273,7 @@ public class HighlightBuilderTests extends ESTestCase {
|
|||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings);
|
||||
// shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter
|
||||
QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(),
|
||||
null, null, System::currentTimeMillis, null) {
|
||||
namedWriteableRegistry, null, null, System::currentTimeMillis, null) {
|
||||
@Override
|
||||
public MappedFieldType fieldMapper(String name) {
|
||||
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name);
|
||||
|
|
|
@ -137,7 +137,7 @@ public class QueryRescoreBuilderTests extends ESTestCase {
|
|||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings);
|
||||
// shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer
|
||||
QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(),
|
||||
null, null, () -> nowInMillis, null) {
|
||||
namedWriteableRegistry, null, null, () -> nowInMillis, null) {
|
||||
@Override
|
||||
public MappedFieldType fieldMapper(String name) {
|
||||
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name);
|
||||
|
|
|
@ -191,7 +191,7 @@ public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends EST
|
|||
});
|
||||
long nowInMillis = randomNonNegativeLong();
|
||||
return new QueryShardContext(0, idxSettings, bitsetFilterCache, ifds::getForField, null, null, scriptService,
|
||||
xContentRegistry(), null, null, () -> nowInMillis, null) {
|
||||
xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null) {
|
||||
@Override
|
||||
public MappedFieldType fieldMapper(String name) {
|
||||
return provideMappedFieldType(name);
|
||||
|
|
|
@ -178,7 +178,7 @@ public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBui
|
|||
when(scriptService.compile(any(Script.class), any())).then(invocation -> new TestTemplateService.MockTemplateScript.Factory(
|
||||
((Script) invocation.getArguments()[0]).getIdOrCode()));
|
||||
QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, null, null, mapperService, null, scriptService,
|
||||
xContentRegistry(), null, null, System::currentTimeMillis, null);
|
||||
xContentRegistry(), namedWriteableRegistry, null, null, System::currentTimeMillis, null);
|
||||
|
||||
SuggestionContext suggestionContext = suggestionBuilder.build(mockShardContext);
|
||||
assertEquals(toBytesRef(suggestionBuilder.text()), suggestionContext.getText());
|
||||
|
|
|
@ -59,6 +59,171 @@ Fields referred in a percolator query may exist in any type of the index contain
|
|||
|
||||
=====================================
|
||||
|
||||
[float]
|
||||
==== Reindexing your percolator queries
|
||||
|
||||
Reindexing percolator queries is sometimes required to benefit from improvements made to the `percolator` field type in
|
||||
new releases.
|
||||
|
||||
Reindexing percolator queries can be reindexed by using the <<docs-reindex,reindex api>>.
|
||||
Lets take a look at the following index with a percolator field type:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT index
|
||||
{
|
||||
"mappings": {
|
||||
"doc" : {
|
||||
"properties": {
|
||||
"query" : {
|
||||
"type" : "percolator"
|
||||
},
|
||||
"body" : {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
POST _aliases
|
||||
{
|
||||
"actions": [
|
||||
{
|
||||
"add": {
|
||||
"index": "index",
|
||||
"alias": "queries" <1>
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
PUT queries/doc/1?refresh
|
||||
{
|
||||
"query" : {
|
||||
"match" : {
|
||||
"body" : "quick brown fox"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
<1> It is always recommended to define an alias for your index, so that in case of a reindex systems / applications
|
||||
don't need to be changed to know that the percolator queries are now in a different index.
|
||||
|
||||
Lets say you're going to upgrade to a new major version and in order for the new Elasticsearch version to still be able
|
||||
to read your queries you need to reindex your queries into a new index on the current Elasticsearch version:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT new_index
|
||||
{
|
||||
"mappings": {
|
||||
"doc" : {
|
||||
"properties": {
|
||||
"query" : {
|
||||
"type" : "percolator"
|
||||
},
|
||||
"body" : {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
POST /_reindex?refresh
|
||||
{
|
||||
"source": {
|
||||
"index": "index"
|
||||
},
|
||||
"dest": {
|
||||
"index": "new_index"
|
||||
}
|
||||
}
|
||||
|
||||
POST _aliases
|
||||
{
|
||||
"actions": [ <1>
|
||||
{
|
||||
"remove": {
|
||||
"index" : "index",
|
||||
"alias": "queries"
|
||||
}
|
||||
},
|
||||
{
|
||||
"add": {
|
||||
"index": "new_index",
|
||||
"alias": "queries"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
<1> If you have an alias don't forget to point it to the new index.
|
||||
|
||||
Executing the `percolate` query via the `queries` alias:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /queries/_search
|
||||
{
|
||||
"query": {
|
||||
"percolate" : {
|
||||
"field" : "query",
|
||||
"document" : {
|
||||
"body" : "fox jumps over the lazy dog"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
now returns matches from the new index:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"took": 3,
|
||||
"timed_out": false,
|
||||
"_shards": {
|
||||
"total": 5,
|
||||
"successful": 5,
|
||||
"skipped" : 0,
|
||||
"failed": 0
|
||||
},
|
||||
"hits": {
|
||||
"total": 1,
|
||||
"max_score": 0.2876821,
|
||||
"hits": [
|
||||
{
|
||||
"_index": "new_index", <1>
|
||||
"_type": "doc",
|
||||
"_id": "1",
|
||||
"_score": 0.2876821,
|
||||
"_source": {
|
||||
"query": {
|
||||
"match": {
|
||||
"body": "quick brown fox"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"took": 3,/"took": "$body.took",/]
|
||||
|
||||
<1> Percolator query hit is now being presented from the new index.
|
||||
|
||||
[float]
|
||||
==== Dedicated Percolator Index
|
||||
|
||||
|
|
|
@ -49,6 +49,10 @@ import org.elasticsearch.action.get.GetRequest;
|
|||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
|
@ -62,6 +66,7 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperForType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -75,7 +80,9 @@ import org.elasticsearch.index.query.QueryShardException;
|
|||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Collection;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Supplier;
|
||||
|
@ -529,7 +536,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
QueryShardContext percolateShardContext = wrap(context);
|
||||
|
||||
PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType;
|
||||
PercolateQuery.QueryStore queryStore = createStore(pft, percolateShardContext, mapUnmappedFieldsAsString);
|
||||
PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, percolateShardContext, mapUnmappedFieldsAsString);
|
||||
return pft.percolateQuery(queryStore, document, docSearcher);
|
||||
}
|
||||
|
||||
|
@ -575,16 +582,42 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
}
|
||||
}
|
||||
|
||||
private static PercolateQuery.QueryStore createStore(PercolatorFieldMapper.FieldType fieldType,
|
||||
static PercolateQuery.QueryStore createStore(MappedFieldType queryBuilderFieldType,
|
||||
QueryShardContext context,
|
||||
boolean mapUnmappedFieldsAsString) {
|
||||
Version indexVersion = context.indexVersionCreated();
|
||||
NamedWriteableRegistry registry = context.getWriteableRegistry();
|
||||
return ctx -> {
|
||||
LeafReader leafReader = ctx.reader();
|
||||
BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(fieldType.queryBuilderField.name());
|
||||
BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(queryBuilderFieldType.name());
|
||||
if (binaryDocValues == null) {
|
||||
return docId -> null;
|
||||
}
|
||||
|
||||
if (indexVersion.onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
return docId -> {
|
||||
if (binaryDocValues.advanceExact(docId)) {
|
||||
BytesRef qbSource = binaryDocValues.binaryValue();
|
||||
try (InputStream in = new ByteArrayInputStream(qbSource.bytes, qbSource.offset, qbSource.length)) {
|
||||
try (StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), registry)) {
|
||||
input.setVersion(indexVersion);
|
||||
// Query builder's content is stored via BinaryFieldMapper, which has a custom encoding
|
||||
// to encode multiple binary values into a single binary doc values field.
|
||||
// This is the reason we need to first need to read the number of values and
|
||||
// then the length of the field value in bytes.
|
||||
int numValues = input.readVInt();
|
||||
assert numValues == 1;
|
||||
int valueLength = input.readVInt();
|
||||
assert valueLength > 0;
|
||||
QueryBuilder queryBuilder = input.readNamedWriteable(QueryBuilder.class);
|
||||
assert in.read() == -1;
|
||||
return PercolatorFieldMapper.toQuery(context, mapUnmappedFieldsAsString, queryBuilder);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
return docId -> {
|
||||
if (binaryDocValues.advanceExact(docId)) {
|
||||
BytesRef qbSource = binaryDocValues.binaryValue();
|
||||
|
@ -601,6 +634,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -39,9 +39,11 @@ import org.apache.lucene.search.TermInSetQuery;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
||||
import org.elasticsearch.common.hash.MurmurHash3;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -69,6 +71,7 @@ import org.elasticsearch.index.query.QueryShardException;
|
|||
import org.elasticsearch.index.query.Rewriteable;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
|
@ -331,16 +334,32 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
Rewriteable.rewriteAndFetch(queryBuilder, queryShardContext, future);
|
||||
queryBuilder = future.actionGet();
|
||||
|
||||
Version indexVersion = context.mapperService().getIndexSettings().getIndexVersionCreated();
|
||||
createQueryBuilderField(indexVersion, queryBuilderField, queryBuilder, context);
|
||||
Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder);
|
||||
processQuery(query, context);
|
||||
return null;
|
||||
}
|
||||
|
||||
static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbField,
|
||||
QueryBuilder queryBuilder, ParseContext context) throws IOException {
|
||||
if (indexVersion.onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) {
|
||||
try (OutputStreamStreamOutput out = new OutputStreamStreamOutput(stream)) {
|
||||
out.setVersion(indexVersion);
|
||||
out.writeNamedWriteable(queryBuilder);
|
||||
byte[] queryBuilderAsBytes = stream.toByteArray();
|
||||
qbField.parse(context.createExternalValueContext(queryBuilderAsBytes));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) {
|
||||
queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap()));
|
||||
builder.flush();
|
||||
byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes());
|
||||
context.doc().add(new Field(queryBuilderField.name(), queryBuilderAsBytes, queryBuilderField.fieldType()));
|
||||
context.doc().add(new Field(qbField.name(), queryBuilderAsBytes, qbField.fieldType()));
|
||||
}
|
||||
}
|
||||
|
||||
Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder);
|
||||
processQuery(query, context);
|
||||
return null;
|
||||
}
|
||||
|
||||
void processQuery(Query query, ParseContext context) {
|
||||
|
|
|
@ -43,15 +43,16 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.hash.MurmurHash3;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
|
@ -70,8 +71,10 @@ import org.elasticsearch.index.query.QueryShardContext;
|
|||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.query.RangeQueryBuilder;
|
||||
import org.elasticsearch.index.query.Rewriteable;
|
||||
import org.elasticsearch.index.query.ScriptQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder;
|
||||
import org.elasticsearch.indices.TermsLookup;
|
||||
import org.elasticsearch.join.ParentJoinPlugin;
|
||||
import org.elasticsearch.join.query.HasChildQueryBuilder;
|
||||
|
@ -83,7 +86,9 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
|
|||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -95,7 +100,6 @@ import java.util.Map;
|
|||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
|
@ -123,6 +127,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
return pluginList(InternalSettingsPlugin.class, PercolatorPlugin.class, FoolMeScriptPlugin.class, ParentJoinPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NamedWriteableRegistry writableRegistry() {
|
||||
return getInstanceFromNode(NamedWriteableRegistry.class);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
indexService = createIndex("test");
|
||||
|
@ -542,9 +551,18 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws IOException {
|
||||
XContentParser sourceParser = createParser(PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent(),
|
||||
new BytesArray(actual));
|
||||
assertThat(parseInnerQueryBuilder(sourceParser), equalTo(expected));
|
||||
try (InputStream in = new ByteArrayInputStream(actual.bytes, actual.offset, actual.length)) {
|
||||
try (StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), writableRegistry())) {
|
||||
// Query builder's content is stored via BinaryFieldMapper, which has a custom encoding
|
||||
// to encode multiple binary values into a single binary doc values field.
|
||||
// This is the reason we need to first need to read the number of values and
|
||||
// then the length of the field value in bytes.
|
||||
input.readVInt();
|
||||
input.readVInt();
|
||||
QueryBuilder queryBuilder = input.readNamedWriteable(QueryBuilder.class);
|
||||
assertThat(queryBuilder, equalTo(expected));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testEmptyName() throws Exception {
|
||||
|
@ -580,8 +598,18 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
BytesRef querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
|
||||
Map<String, Object> parsedQuery = XContentHelper.convertToMap(new BytesArray(querySource), true).v2();
|
||||
assertEquals(Script.DEFAULT_SCRIPT_LANG, XContentMapValues.extractValue("script.script.lang", parsedQuery));
|
||||
try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) {
|
||||
try (StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), writableRegistry())) {
|
||||
// Query builder's content is stored via BinaryFieldMapper, which has a custom encoding
|
||||
// to encode multiple binary values into a single binary doc values field.
|
||||
// This is the reason we need to first need to read the number of values and
|
||||
// then the length of the field value in bytes.
|
||||
input.readVInt();
|
||||
input.readVInt();
|
||||
ScriptQueryBuilder queryBuilder = (ScriptQueryBuilder) input.readNamedWriteable(QueryBuilder.class);
|
||||
assertEquals(Script.DEFAULT_SCRIPT_LANG, queryBuilder.script().getLang());
|
||||
}
|
||||
}
|
||||
|
||||
query = jsonBuilder();
|
||||
query.startObject();
|
||||
|
@ -608,9 +636,16 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
|
||||
parsedQuery = XContentHelper.convertToMap(new BytesArray(querySource), true).v2();
|
||||
assertEquals(Script.DEFAULT_SCRIPT_LANG,
|
||||
((List) XContentMapValues.extractValue("function_score.functions.script_score.script.lang", parsedQuery)).get(0));
|
||||
try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) {
|
||||
try (StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), writableRegistry())) {
|
||||
input.readVInt();
|
||||
input.readVInt();
|
||||
FunctionScoreQueryBuilder queryBuilder = (FunctionScoreQueryBuilder) input.readNamedWriteable(QueryBuilder.class);
|
||||
ScriptScoreFunctionBuilder function = (ScriptScoreFunctionBuilder)
|
||||
queryBuilder.filterFunctionBuilders()[0].getScoreFunction();
|
||||
assertEquals(Script.DEFAULT_SCRIPT_LANG, function.getScript().getLang());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testEncodeRange() {
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.percolator;
|
||||
|
||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.NoMergePolicy;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.BinaryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class QueryBuilderStoreTests extends ESTestCase {
|
||||
|
||||
@Override
|
||||
protected NamedWriteableRegistry writableRegistry() {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
return new NamedWriteableRegistry(searchModule.getNamedWriteables());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NamedXContentRegistry xContentRegistry() {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
return new NamedXContentRegistry(searchModule.getNamedXContents());
|
||||
}
|
||||
|
||||
public void testStoringQueryBuilders() throws IOException {
|
||||
try (Directory directory = newDirectory()) {
|
||||
TermQueryBuilder[] queryBuilders = new TermQueryBuilder[randomIntBetween(1, 16)];
|
||||
IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer());
|
||||
config.setMergePolicy(NoMergePolicy.INSTANCE);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder(
|
||||
new Mapper.BuilderContext(settings, new ContentPath(0)));
|
||||
|
||||
Version version = randomBoolean() ? Version.V_5_6_0 : Version.V_6_0_0_beta1;
|
||||
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config)) {
|
||||
for (int i = 0; i < queryBuilders.length; i++) {
|
||||
queryBuilders[i] = new TermQueryBuilder(randomAlphaOfLength(4), randomAlphaOfLength(8));
|
||||
ParseContext parseContext = mock(ParseContext.class);
|
||||
ParseContext.Document document = new ParseContext.Document();
|
||||
when(parseContext.doc()).thenReturn(document);
|
||||
PercolatorFieldMapper.createQueryBuilderField(version,
|
||||
fieldMapper, queryBuilders[i], parseContext);
|
||||
indexWriter.addDocument(document);
|
||||
}
|
||||
}
|
||||
|
||||
QueryShardContext queryShardContext = mock(QueryShardContext.class);
|
||||
when(queryShardContext.indexVersionCreated()).thenReturn(version);
|
||||
when(queryShardContext.getWriteableRegistry()).thenReturn(writableRegistry());
|
||||
when(queryShardContext.getXContentRegistry()).thenReturn(xContentRegistry());
|
||||
when(queryShardContext.getForField(fieldMapper.fieldType()))
|
||||
.thenReturn(new BytesBinaryDVIndexFieldData(new Index("index", "uuid"), fieldMapper.name()));
|
||||
PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(fieldMapper.fieldType(), queryShardContext, false);
|
||||
|
||||
try (IndexReader indexReader = DirectoryReader.open(directory)) {
|
||||
LeafReaderContext leafContext = indexReader.leaves().get(0);
|
||||
CheckedFunction<Integer, Query, IOException> queries = queryStore.getQueries(leafContext);
|
||||
assertEquals(queryBuilders.length, leafContext.reader().numDocs());
|
||||
for (int i = 0; i < queryBuilders.length; i++) {
|
||||
TermQuery query = (TermQuery) queries.apply(i);
|
||||
assertEquals(queryBuilders[i].fieldName(), query.getTerm().field());
|
||||
assertEquals(queryBuilders[i].value(), query.getTerm().text());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,96 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||
import org.elasticsearch.gradle.Version
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
|
||||
apply plugin: 'elasticsearch.standalone-test'
|
||||
|
||||
// This is a top level task which we will add dependencies to below.
|
||||
// It is a single task that can be used to backcompat tests against all versions.
|
||||
task bwcTest {
|
||||
description = 'Runs backwards compatibility tests.'
|
||||
group = 'verification'
|
||||
}
|
||||
|
||||
// For now test against the current version:
|
||||
Version currentVersion = Version.fromString(VersionProperties.elasticsearch.minus('-SNAPSHOT'))
|
||||
Version[] versions = [currentVersion]
|
||||
// TODO: uncomment when there is a released version with: https://github.com/elastic/elasticsearch/pull/25456
|
||||
// versions = indexCompatVersions
|
||||
for (Version version : versions) {
|
||||
String baseName = "v${version}"
|
||||
|
||||
Task oldQueryBuilderTest = tasks.create(name: "${baseName}#oldQueryBuilderTest", type: RestIntegTestTask) {
|
||||
mustRunAfter(precommit)
|
||||
}
|
||||
tasks.getByName("${baseName}#oldQueryBuilderTestRunner").configure {
|
||||
systemProperty 'tests.is_old_cluster', 'true'
|
||||
systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT")
|
||||
}
|
||||
|
||||
configure(extensions.findByName("${baseName}#oldQueryBuilderTestCluster")) {
|
||||
distribution = 'zip'
|
||||
// TODO: uncomment when there is a released version with: https://github.com/elastic/elasticsearch/pull/25456
|
||||
// bwcVersion = version
|
||||
// numBwcNodes = 1
|
||||
numNodes = 1
|
||||
clusterName = 'query_builder_bwc'
|
||||
setting 'http.content_type.required', 'true'
|
||||
}
|
||||
|
||||
Task upgradedQueryBuilderTest = tasks.create(name: "${baseName}#upgradedQueryBuilderTest", type: RestIntegTestTask) {
|
||||
dependsOn(oldQueryBuilderTest, "${baseName}#oldQueryBuilderTestCluster#stop")
|
||||
}
|
||||
|
||||
configure(extensions.findByName("${baseName}#upgradedQueryBuilderTestCluster")) {
|
||||
dependsOn oldQueryBuilderTest,
|
||||
"${baseName}#oldQueryBuilderTestCluster#stop"
|
||||
distribution = 'zip'
|
||||
clusterName = 'query_builder_bwc'
|
||||
numNodes = 1
|
||||
dataDir = { nodeNum -> oldQueryBuilderTest.nodes[nodeNum].dataDir }
|
||||
cleanShared = false // We want to keep snapshots made by the old cluster!
|
||||
}
|
||||
|
||||
tasks.getByName("${baseName}#upgradedQueryBuilderTestRunner").configure {
|
||||
systemProperty 'tests.is_old_cluster', 'false'
|
||||
systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT")
|
||||
}
|
||||
|
||||
Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") {
|
||||
dependsOn = [upgradedQueryBuilderTest]
|
||||
}
|
||||
|
||||
if (project.bwc_tests_enabled) {
|
||||
bwcTest.dependsOn(versionBwcTest)
|
||||
}
|
||||
}
|
||||
|
||||
test.enabled = false // no unit tests for rolling upgrades, only the rest integration test
|
||||
|
||||
// basic integ tests includes testing bwc against the most recent version
|
||||
task integTest {
|
||||
if (project.bwc_tests_enabled) {
|
||||
dependsOn = ["v${versions[-1]}#bwcTest"]
|
||||
}
|
||||
}
|
||||
|
||||
check.dependsOn(integTest)
|
|
@ -0,0 +1,222 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bwc;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.http.entity.ContentType;
|
||||
import org.elasticsearch.client.http.entity.StringEntity;
|
||||
import org.elasticsearch.client.http.util.EntityUtils;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.DisMaxQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchPhraseQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.Operator;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.RangeQueryBuilder;
|
||||
import org.elasticsearch.index.query.SpanNearQueryBuilder;
|
||||
import org.elasticsearch.index.query.SpanTermQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
||||
public class QueryBuilderBWCIT extends ESRestTestCase {
|
||||
|
||||
private static final List<Object[]> CANDIDATES = new ArrayList<>();
|
||||
|
||||
static {
|
||||
addCandidate("\"match\": { \"keyword_field\": \"value\"}", new MatchQueryBuilder("keyword_field", "value"));
|
||||
addCandidate(
|
||||
"\"match\": { \"keyword_field\": {\"query\": \"value\", \"operator\": \"and\"} }",
|
||||
new MatchQueryBuilder("keyword_field", "value").operator(Operator.AND)
|
||||
);
|
||||
addCandidate(
|
||||
"\"match\": { \"keyword_field\": {\"query\": \"value\", \"analyzer\": \"english\"} }",
|
||||
new MatchQueryBuilder("keyword_field", "value").analyzer("english")
|
||||
);
|
||||
addCandidate(
|
||||
"\"match\": { \"keyword_field\": {\"query\": \"value\", \"minimum_should_match\": 3} }",
|
||||
new MatchQueryBuilder("keyword_field", "value").minimumShouldMatch("3")
|
||||
);
|
||||
addCandidate(
|
||||
"\"match\": { \"keyword_field\": {\"query\": \"value\", \"fuzziness\": \"auto\"} }",
|
||||
new MatchQueryBuilder("keyword_field", "value").fuzziness(Fuzziness.AUTO)
|
||||
);
|
||||
addCandidate("\"match_phrase\": { \"keyword_field\": \"value\"}", new MatchPhraseQueryBuilder("keyword_field", "value"));
|
||||
addCandidate(
|
||||
"\"match_phrase\": { \"keyword_field\": {\"query\": \"value\", \"slop\": 3}}",
|
||||
new MatchPhraseQueryBuilder("keyword_field", "value").slop(3)
|
||||
);
|
||||
addCandidate("\"range\": { \"long_field\": {\"gte\": 1, \"lte\": 9}}", new RangeQueryBuilder("long_field").from(1).to(9));
|
||||
addCandidate(
|
||||
"\"bool\": { \"must_not\": [{\"match_all\": {}}], \"must\": [{\"match_all\": {}}], " +
|
||||
"\"filter\": [{\"match_all\": {}}], \"should\": [{\"match_all\": {}}]}",
|
||||
new BoolQueryBuilder().mustNot(new MatchAllQueryBuilder()).must(new MatchAllQueryBuilder())
|
||||
.filter(new MatchAllQueryBuilder()).should(new MatchAllQueryBuilder())
|
||||
);
|
||||
addCandidate(
|
||||
"\"dis_max\": {\"queries\": [{\"match_all\": {}},{\"match_all\": {}},{\"match_all\": {}}], \"tie_breaker\": 0.01}",
|
||||
new DisMaxQueryBuilder().add(new MatchAllQueryBuilder()).add(new MatchAllQueryBuilder()).add(new MatchAllQueryBuilder())
|
||||
.tieBreaker(0.01f)
|
||||
);
|
||||
addCandidate(
|
||||
"\"constant_score\": {\"query\": {\"match_all\": {}}, \"boost\": 0.1}",
|
||||
new ConstantScoreQueryBuilder(new MatchAllQueryBuilder()).boost(0.1f)
|
||||
);
|
||||
addCandidate(
|
||||
"\"function_score\": {\"query\": {\"match_all\": {}}," +
|
||||
"\"functions\": [{\"random_score\": {}, \"filter\": {\"match_all\": {}}, \"weight\": 0.2}]}",
|
||||
new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(new MatchAllQueryBuilder(),
|
||||
new RandomScoreFunctionBuilder().setWeight(0.2f))})
|
||||
);
|
||||
addCandidate(
|
||||
"\"span_near\": {\"clauses\": [{ \"span_term\": { \"keyword_field\": \"value1\" }}, " +
|
||||
"{ \"span_term\": { \"keyword_field\": \"value2\" }}]}",
|
||||
new SpanNearQueryBuilder(new SpanTermQueryBuilder("keyword_field", "value1"), 0)
|
||||
.addClause(new SpanTermQueryBuilder("keyword_field", "value2"))
|
||||
);
|
||||
addCandidate(
|
||||
"\"span_near\": {\"clauses\": [{ \"span_term\": { \"keyword_field\": \"value1\" }}, " +
|
||||
"{ \"span_term\": { \"keyword_field\": \"value2\" }}], \"slop\": 2}",
|
||||
new SpanNearQueryBuilder(new SpanTermQueryBuilder("keyword_field", "value1"), 2)
|
||||
.addClause(new SpanTermQueryBuilder("keyword_field", "value2"))
|
||||
);
|
||||
addCandidate(
|
||||
"\"span_near\": {\"clauses\": [{ \"span_term\": { \"keyword_field\": \"value1\" }}, " +
|
||||
"{ \"span_term\": { \"keyword_field\": \"value2\" }}], \"slop\": 2, \"in_order\": false}",
|
||||
new SpanNearQueryBuilder(new SpanTermQueryBuilder("keyword_field", "value1"), 2)
|
||||
.addClause(new SpanTermQueryBuilder("keyword_field", "value2")).inOrder(false)
|
||||
);
|
||||
}
|
||||
|
||||
private static void addCandidate(String querySource, QueryBuilder expectedQb) {
|
||||
CANDIDATES.add(new Object[]{"{\"query\": {" + querySource + "}}", expectedQb});
|
||||
}
|
||||
|
||||
private final Version oldClusterVersion = Version.fromString(System.getProperty("tests.old_cluster_version"));
|
||||
private final boolean runningAgainstOldCluster = Booleans.parseBoolean(System.getProperty("tests.is_old_cluster"));
|
||||
|
||||
@Override
|
||||
protected boolean preserveIndicesUponCompletion() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testQueryBuilderBWC() throws Exception {
|
||||
String index = "queries";
|
||||
if (runningAgainstOldCluster) {
|
||||
XContentBuilder mappingsAndSettings = jsonBuilder();
|
||||
mappingsAndSettings.startObject();
|
||||
{
|
||||
mappingsAndSettings.startObject("settings");
|
||||
mappingsAndSettings.field("number_of_shards", 1);
|
||||
mappingsAndSettings.field("number_of_replicas", 0);
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
{
|
||||
mappingsAndSettings.startObject("mappings");
|
||||
mappingsAndSettings.startObject("doc");
|
||||
mappingsAndSettings.startObject("properties");
|
||||
{
|
||||
mappingsAndSettings.startObject("query");
|
||||
mappingsAndSettings.field("type", "percolator");
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
{
|
||||
mappingsAndSettings.startObject("keyword_field");
|
||||
mappingsAndSettings.field("type", "keyword");
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
{
|
||||
mappingsAndSettings.startObject("long_field");
|
||||
mappingsAndSettings.field("type", "long");
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
mappingsAndSettings.endObject();
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
Response rsp = client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
||||
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
||||
|
||||
for (int i = 0; i < CANDIDATES.size(); i++) {
|
||||
rsp = client().performRequest("PUT", "/" + index + "/doc/" + Integer.toString(i), Collections.emptyMap(),
|
||||
new StringEntity((String) CANDIDATES.get(i)[0], ContentType.APPLICATION_JSON));
|
||||
assertEquals(201, rsp.getStatusLine().getStatusCode());
|
||||
}
|
||||
} else {
|
||||
NamedWriteableRegistry registry = new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, false,
|
||||
Collections.emptyList()).getNamedWriteables());
|
||||
|
||||
for (int i = 0; i < CANDIDATES.size(); i++) {
|
||||
QueryBuilder expectedQueryBuilder = (QueryBuilder) CANDIDATES.get(i)[1];
|
||||
Response rsp = client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
||||
new StringEntity("{\"query\": {\"ids\": {\"values\": [\"" + Integer.toString(i) + "\"]}}, " +
|
||||
"\"docvalue_fields\" : [\"query.query_builder_field\"]}", ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
||||
Map<?, ?> hitRsp = (Map<?, ?>) ((List<?>) ((Map<?, ?>)toMap(rsp).get("hits")).get("hits")).get(0);
|
||||
String queryBuilderStr = (String) ((List<?>) ((Map<?, ?>) hitRsp.get("fields")).get("query.query_builder_field")).get(0);
|
||||
byte[] qbSource = Base64.getDecoder().decode(queryBuilderStr);
|
||||
try (InputStream in = new ByteArrayInputStream(qbSource, 0, qbSource.length)) {
|
||||
try (StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), registry)) {
|
||||
input.setVersion(oldClusterVersion);
|
||||
QueryBuilder queryBuilder = input.readNamedWriteable(QueryBuilder.class);
|
||||
assert in.read() == -1;
|
||||
assertEquals(expectedQueryBuilder, queryBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Map<String, Object> toMap(Response response) throws IOException {
|
||||
return toMap(EntityUtils.toString(response.getEntity()));
|
||||
}
|
||||
|
||||
private static Map<String, Object> toMap(String response) throws IOException {
|
||||
return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false);
|
||||
}
|
||||
}
|
|
@ -80,7 +80,8 @@ List projects = [
|
|||
'qa:smoke-test-tribe-node',
|
||||
'qa:vagrant',
|
||||
'qa:verify-version-constants',
|
||||
'qa:wildfly'
|
||||
'qa:wildfly',
|
||||
'qa:query-builder-bwc'
|
||||
]
|
||||
|
||||
File examplePluginsDir = new File(rootProject.projectDir, 'plugins/examples')
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.test;
|
||||
|
||||
import com.fasterxml.jackson.core.io.JsonStringEncoder;
|
||||
|
||||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
|
@ -1078,7 +1077,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
|
||||
QueryShardContext createShardContext() {
|
||||
return new QueryShardContext(0, idxSettings, bitsetFilterCache, indexFieldDataService::getForField, mapperService,
|
||||
similarityService, scriptService, xContentRegistry, this.client, null, () -> nowInMillis, null);
|
||||
similarityService, scriptService, xContentRegistry, namedWriteableRegistry, this.client, null, () -> nowInMillis, null);
|
||||
}
|
||||
|
||||
ScriptModule createScriptModule(List<ScriptPlugin> scriptPlugins) {
|
||||
|
|
|
@ -1113,6 +1113,13 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
return new NamedXContentRegistry(ClusterModule.getNamedXWriteables());
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link NamedWriteableRegistry} to use for this test. Subclasses should override and use liberally.
|
||||
*/
|
||||
protected NamedWriteableRegistry writableRegistry() {
|
||||
return new NamedWriteableRegistry(ClusterModule.getNamedWriteables());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a "mock" script for use either with {@link MockScriptEngine} or anywhere where you need a script but don't really care about
|
||||
* its contents.
|
||||
|
|
|
@ -41,7 +41,7 @@ public class MockSearchServiceTests extends ESTestCase {
|
|||
final long nowInMillis = randomNonNegativeLong();
|
||||
SearchContext s = new TestSearchContext(new QueryShardContext(0,
|
||||
new IndexSettings(EMPTY_INDEX_METADATA, Settings.EMPTY), null, null, null, null, null, xContentRegistry(),
|
||||
null, null, () -> nowInMillis, null)) {
|
||||
writableRegistry(), null, null, () -> nowInMillis, null)) {
|
||||
|
||||
@Override
|
||||
public SearchShardTarget shardTarget() {
|
||||
|
|
Loading…
Reference in New Issue