Require arguments for QueryShardContext creation. (#21196)

The `IndexService#newQueryShardContext()` method creates a QueryShardContext on
shard `0`, with a `null` reader and that uses `System.currentTimeMillis()` to
resolve `now`. This may hide bugs, since the shard id is sometimes used for
query parsing (it is used to salt random score generation in `function_score`),
passing a `null` reader disables query rewriting and for some use-cases, it is
simply not ok to rely on the current timestamp (eg. percolation). So this pull
request removes this method and instead requires that all call sites provide
these parameters explicitly.
This commit is contained in:
Adrien Grand 2016-11-02 09:48:49 +01:00 committed by GitHub
parent 51717c882f
commit aa6cd93e0f
13 changed files with 75 additions and 39 deletions

View File

@ -352,7 +352,9 @@ public class MetaDataCreateIndexService extends AbstractComponent {
throw mpe;
}
final QueryShardContext queryShardContext = indexService.newQueryShardContext();
// the context is only used for validation so it's fine to pass fake values for the shard id and the current
// timestamp
final QueryShardContext queryShardContext = indexService.newQueryShardContext(0, null, () -> 0L);
for (Alias alias : request.aliases()) {
if (Strings.hasLength(alias.filter())) {
aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext);

View File

@ -149,7 +149,9 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
}
indices.put(action.getIndex(), indexService);
}
aliasValidator.validateAliasFilter(alias, filter, indexService.newQueryShardContext());
// the context is only used for validation so it's fine to pass fake values for the shard id and the current
// timestamp
aliasValidator.validateAliasFilter(alias, filter, indexService.newQueryShardContext(0, null, () -> 0L));
}
};
changed |= action.apply(newAliasValidator, metadata, index);

View File

@ -145,7 +145,10 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
this.indexAnalyzers = registry.build(indexSettings);
this.similarityService = similarityService;
this.mapperService = new MapperService(indexSettings, indexAnalyzers, similarityService, mapperRegistry,
IndexService.this::newQueryShardContext);
// we parse all percolator queries as they would be parsed on shard 0
() -> newQueryShardContext(0, null, () -> {
throw new IllegalArgumentException("Percolator queries are not allowed to use the curent timestamp");
}));
this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService, mapperService);
this.shardStoreDeleter = shardStoreDeleter;
this.bigArrays = bigArrays;
@ -453,7 +456,10 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
/**
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via
* {@link QueryShardContext#setTypes(String...)}
* {@link QueryShardContext#setTypes(String...)}.
*
* Passing a {@code null} {@link IndexReader} will return a valid context, however it won't be able to make
* {@link IndexReader}-specific optimizations, such as rewriting containing range queries.
*/
public QueryShardContext newQueryShardContext(int shardId, IndexReader indexReader, LongSupplier nowInMillis) {
return new QueryShardContext(
@ -464,15 +470,6 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
nowInMillis);
}
/**
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via
* {@link QueryShardContext#setTypes(String...)}. This context may be used for query parsing but cannot be
* used for rewriting since it does not know about the current {@link IndexReader}.
*/
public QueryShardContext newQueryShardContext() {
return newQueryShardContext(0, null, System::currentTimeMillis);
}
/**
* The {@link ThreadPool} to use for this index.
*/

View File

@ -86,7 +86,9 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier {
return mapperService;
}
/** Return the current {@link IndexReader}, or {@code null} if we are on the coordinating node. */
/** Return the current {@link IndexReader}, or {@code null} if no index reader is available, for
* instance if we are on the coordinating node or if this rewrite context is used to index
* queries (percolation). */
public IndexReader getIndexReader() {
return reader;
}

View File

@ -97,7 +97,7 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase {
.startObject("date_field").field("type", "date").field("boost", 9.0f).endObject()
.endObject().endObject().endObject().string();
IndexService indexService = createIndex("test", BW_SETTINGS);
QueryShardContext context = indexService.newQueryShardContext();
QueryShardContext context = indexService.newQueryShardContext(0, null, () -> 0L);
DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentFieldMappers fieldMappers = mapper.mappers();
assertThat(fieldMappers.getMapper("s_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class));
@ -150,7 +150,7 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase {
.startObject("date_field").field("type", "date").field("boost", 9.0f).endObject()
.endObject().endObject().endObject().string();
IndexService indexService = createIndex("text");
QueryShardContext context = indexService.newQueryShardContext();
QueryShardContext context = indexService.newQueryShardContext(0, null, () -> 0L);
DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentFieldMappers fieldMappers = mapper.mappers();
assertThat(fieldMappers.getMapper("s_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class));

View File

@ -45,7 +45,7 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase {
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
DocumentMapper mapper = index.mapperService().documentMapper("type");
QueryShardContext context = index.newQueryShardContext();
QueryShardContext context = index.newQueryShardContext(0, null, () -> 0L);
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -39,6 +40,7 @@ import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.is;
@ -59,8 +61,11 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
Collections.singletonMap(ExternalMapperPlugin.EXTERNAL, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo")),
Collections.singletonMap(ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser()));
Supplier<QueryShardContext> queryShardContext = () -> {
return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); });
};
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext);
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject(ExternalMetadataMapper.CONTENT_TYPE)
@ -108,8 +113,11 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
mapperParsers.put(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser());
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
Supplier<QueryShardContext> queryShardContext = () -> {
return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); });
};
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext);
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
@ -178,8 +186,11 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
mapperParsers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser());
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
Supplier<QueryShardContext> queryShardContext = () -> {
return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); });
};
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext);
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -38,6 +39,7 @@ import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.function.Supplier;
public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
@ -231,9 +233,12 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
Collections.singletonMap("_dummy", new DummyMetadataFieldMapper.TypeParser())
);
final MapperRegistry mapperRegistry = indicesModule.getMapperRegistry();
MapperService mapperService = new MapperService(indexService.getIndexSettings(), indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
Supplier<QueryShardContext> queryShardContext = () -> {
return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); });
};
MapperService mapperService = new MapperService(indexService.getIndexSettings(), indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext);
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService,
indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext);
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDocument = mapper.parse("index", "type", "id", new BytesArray("{}"));

View File

@ -74,7 +74,8 @@ public class CustomQueryParserIT extends ESIntegTestCase {
private static QueryShardContext queryShardContext() {
IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class);
return indicesService.indexServiceSafe(resolveIndex("index")).newQueryShardContext();
return indicesService.indexServiceSafe(resolveIndex("index")).newQueryShardContext(
randomInt(20), null, () -> { throw new UnsupportedOperationException(); });
}
//see #11120

View File

@ -78,7 +78,8 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
}
public void testCrossFieldMultiMatchQuery() throws IOException {
QueryShardContext queryShardContext = indexService.newQueryShardContext();
QueryShardContext queryShardContext = indexService.newQueryShardContext(
randomInt(20), null, () -> { throw new UnsupportedOperationException(); });
queryShardContext.setAllowUnmappedFields(true);
Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext);
try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) {
@ -101,8 +102,9 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
Term[] terms = new Term[] { new Term("foo", "baz"), new Term("bar", "baz") };
float[] boosts = new float[] {2, 3};
Query expected = BlendedTermQuery.booleanBlendedQuery(terms, boosts, false);
Query actual = MultiMatchQuery.blendTerm(indexService.newQueryShardContext(), new BytesRef("baz"), null, 1f,
new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3));
Query actual = MultiMatchQuery.blendTerm(
indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }),
new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3));
assertEquals(expected, actual);
}
@ -116,8 +118,9 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
Term[] terms = new Term[] { new Term("foo", "baz"), new Term("bar", "baz") };
float[] boosts = new float[] {200, 30};
Query expected = BlendedTermQuery.booleanBlendedQuery(terms, boosts, false);
Query actual = MultiMatchQuery.blendTerm(indexService.newQueryShardContext(), new BytesRef("baz"), null, 1f,
new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3));
Query actual = MultiMatchQuery.blendTerm(
indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }),
new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3));
assertEquals(expected, actual);
}
@ -134,8 +137,9 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
Term[] terms = new Term[] { new Term("foo", "baz") };
float[] boosts = new float[] {2};
Query expected = BlendedTermQuery.booleanBlendedQuery(terms, boosts, false);
Query actual = MultiMatchQuery.blendTerm(indexService.newQueryShardContext(), new BytesRef("baz"), null, 1f,
new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3));
Query actual = MultiMatchQuery.blendTerm(
indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }),
new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3));
assertEquals(expected, actual);
}
@ -157,13 +161,15 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
.add(expectedClause1, Occur.SHOULD)
.add(expectedClause2, Occur.SHOULD)
.build();
Query actual = MultiMatchQuery.blendTerm(indexService.newQueryShardContext(), new BytesRef("baz"), null, 1f,
new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3));
Query actual = MultiMatchQuery.blendTerm(
indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }),
new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3));
assertEquals(expected, actual);
}
public void testMultiMatchPrefixWithAllField() throws IOException {
QueryShardContext queryShardContext = indexService.newQueryShardContext();
QueryShardContext queryShardContext = indexService.newQueryShardContext(
randomInt(20), null, () -> { throw new UnsupportedOperationException(); });
queryShardContext.setAllowUnmappedFields(true);
Query parsedQuery =
multiMatchQuery("foo").field("_all").type(MultiMatchQueryBuilder.Type.PHRASE_PREFIX).toQuery(queryShardContext);

View File

@ -292,7 +292,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.field(fieldName, queryBuilder)
.endObject().bytes());
BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
assertQueryBuilder(qbSource, queryBuilder.rewrite(indexService.newQueryShardContext()));
assertQueryBuilder(qbSource, queryBuilder.rewrite(indexService.newQueryShardContext(
randomInt(20), null, () -> { throw new UnsupportedOperationException(); })));
}
@ -476,7 +477,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws IOException {
XContentParser sourceParser = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent()
.createParser(actual.bytes, actual.offset, actual.length);
QueryParseContext qsc = indexService.newQueryShardContext().newParseContext(sourceParser);
QueryParseContext qsc = indexService.newQueryShardContext(
randomInt(20), null, () -> { throw new UnsupportedOperationException(); })
.newParseContext(sourceParser);
assertThat(qsc.parseInnerQueryBuilder().get(), equalTo(expected));
}

View File

@ -32,6 +32,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -42,6 +43,7 @@ import org.junit.Before;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.function.Supplier;
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
import static org.hamcrest.Matchers.containsString;
@ -58,8 +60,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
mapperRegistry = new MapperRegistry(
Collections.singletonMap(Murmur3FieldMapper.CONTENT_TYPE, new Murmur3FieldMapper.TypeParser()),
Collections.emptyMap());
Supplier<QueryShardContext> queryShardContext = () -> {
return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); });
};
parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, queryShardContext);
}
@Override
@ -152,8 +157,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build();
IndexService indexService2x = createIndex("test_old", oldIndexSettings);
Supplier<QueryShardContext> queryShardContext = () -> {
return indexService2x.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); });
};
DocumentMapperParser parser = new DocumentMapperParser(indexService2x.getIndexSettings(), indexService2x.mapperService(), indexService2x.getIndexAnalyzers(),
indexService2x.similarityService(), mapperRegistry, indexService2x::newQueryShardContext);
indexService2x.similarityService(), mapperRegistry, queryShardContext);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, defaultMapper.mappingSource().string());

View File

@ -53,7 +53,6 @@ import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreSearchContext;
@ -99,7 +98,7 @@ public class TestSearchContext extends SearchContext {
this.fixedBitSetFilterCache = indexService.cache().bitsetFilterCache();
this.threadPool = threadPool;
this.indexShard = indexService.getShardOrNull(0);
queryShardContext = indexService.newQueryShardContext();
queryShardContext = indexService.newQueryShardContext(0, null, () -> 0L);
}
public TestSearchContext(QueryShardContext queryShardContext) {