Remove ShapeFetchService and TermsLookupFetchService and use a Client instead
This commit removes all the optional injects etc. for the FetchServices and provides a Client via IndexQueryParserService / Context. This allows direct injection instead of optional injection. It also allows to remove all the unnecessary services and use the fetch code where it belongs. This commit also adds testing infrastructure for intercepting client calls to AbstractQueryTestCase to support GET calls in query tests.
This commit is contained in:
parent
cac72c9207
commit
94a37d486f
|
@ -25,23 +25,22 @@ import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
|
|||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.geo.ShapesAvailability;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.search.shape.ShapeFetchService;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class GeoShapeQueryParser extends BaseQueryParserTemp {
|
||||
|
||||
private ShapeFetchService fetchService;
|
||||
|
||||
public static class DEFAULTS {
|
||||
public static final String INDEX_NAME = "shapes";
|
||||
public static final String SHAPE_FIELD_NAME = "shape";
|
||||
|
@ -114,7 +113,7 @@ public class GeoShapeQueryParser extends BaseQueryParserTemp {
|
|||
}
|
||||
GetRequest getRequest = new GetRequest(index, type, id);
|
||||
getRequest.copyContextAndHeadersFrom(SearchContext.current());
|
||||
shape = fetchService.fetch(getRequest, shapePath);
|
||||
shape = fetch(context.getClient(), getRequest, shapePath);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[geo_shape] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
|
@ -173,11 +172,6 @@ public class GeoShapeQueryParser extends BaseQueryParserTemp {
|
|||
return query;
|
||||
}
|
||||
|
||||
@Inject(optional = true)
|
||||
public void setFetchService(@Nullable ShapeFetchService fetchService) {
|
||||
this.fetchService = fetchService;
|
||||
}
|
||||
|
||||
public static SpatialArgs getArgs(ShapeBuilder shape, ShapeRelation relation) {
|
||||
switch(relation) {
|
||||
case DISJOINT:
|
||||
|
@ -195,4 +189,51 @@ public class GeoShapeQueryParser extends BaseQueryParserTemp {
|
|||
public GeoShapeQueryBuilder getBuilderPrototype() {
|
||||
return GeoShapeQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the Shape with the given ID in the given type and index.
|
||||
*
|
||||
* @param getRequest GetRequest containing index, type and id
|
||||
* @param path Name or path of the field in the Shape Document where the Shape itself is located
|
||||
* @return Shape with the given ID
|
||||
* @throws IOException Can be thrown while parsing the Shape Document and extracting the Shape
|
||||
*/
|
||||
private ShapeBuilder fetch(Client client, GetRequest getRequest, String path) throws IOException {
|
||||
if (ShapesAvailability.JTS_AVAILABLE == false) {
|
||||
throw new IllegalStateException("JTS not available");
|
||||
}
|
||||
getRequest.preference("_local");
|
||||
getRequest.operationThreaded(false);
|
||||
GetResponse response = client.get(getRequest).actionGet();
|
||||
if (!response.isExists()) {
|
||||
throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] not found");
|
||||
}
|
||||
|
||||
String[] pathElements = Strings.splitStringToArray(path, '.');
|
||||
int currentPathSlot = 0;
|
||||
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentHelper.createParser(response.getSourceAsBytesRef());
|
||||
XContentParser.Token currentToken;
|
||||
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (currentToken == XContentParser.Token.FIELD_NAME) {
|
||||
if (pathElements[currentPathSlot].equals(parser.currentName())) {
|
||||
parser.nextToken();
|
||||
if (++currentPathSlot == pathElements.length) {
|
||||
return ShapeBuilder.parse(parser);
|
||||
}
|
||||
} else {
|
||||
parser.nextToken();
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new IllegalStateException("Shape with name [" + getRequest.id() + "] found but missing " + path + " field");
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.util.CloseableThreadLocal;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -44,15 +45,12 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
|
||||
import org.elasticsearch.index.search.termslookup.TermsLookupFetchService;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.indices.cache.query.terms.TermsLookup;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class IndexQueryParserService extends AbstractIndexComponent {
|
||||
|
||||
|
@ -98,7 +96,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
private final ParseFieldMatcher parseFieldMatcher;
|
||||
private final boolean defaultAllowUnmappedFields;
|
||||
|
||||
private TermsLookupFetchService termsLookupFetchService;
|
||||
private Client client;
|
||||
|
||||
@Inject
|
||||
public IndexQueryParserService(Index index, @IndexSettings Settings indexSettings, Settings settings,
|
||||
|
@ -108,7 +106,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
BitsetFilterCache bitsetFilterCache,
|
||||
@Nullable SimilarityService similarityService, ClusterService clusterService,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
InnerHitsQueryParserHelper innerHitsQueryParserHelper) {
|
||||
InnerHitsQueryParserHelper innerHitsQueryParserHelper, Client client) {
|
||||
super(index, indexSettings);
|
||||
this.scriptService = scriptService;
|
||||
this.analysisService = analysisService;
|
||||
|
@ -128,11 +126,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
this.defaultAllowUnmappedFields = indexSettings.getAsBoolean(ALLOW_UNMAPPED, true);
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
this.innerHitsQueryParserHelper = innerHitsQueryParserHelper;
|
||||
}
|
||||
|
||||
@Inject(optional=true)
|
||||
public void setTermsLookupFetchService(@Nullable TermsLookupFetchService termsLookupFetchService) {
|
||||
this.termsLookupFetchService = termsLookupFetchService;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
public void close() {
|
||||
|
@ -362,11 +356,11 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
return false;
|
||||
}
|
||||
|
||||
public List<Object> handleTermsLookup(TermsLookup termsLookup) {
|
||||
return this.termsLookupFetchService.fetch(termsLookup);
|
||||
}
|
||||
|
||||
public InnerHitsQueryParserHelper getInnerHitsQueryParserHelper() {
|
||||
return innerHitsQueryParserHelper;
|
||||
}
|
||||
|
||||
public Client getClient() {
|
||||
return client;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -331,10 +332,6 @@ public class QueryShardContext {
|
|||
return this.indexQueryParser.matchesIndices(indices);
|
||||
}
|
||||
|
||||
public List<Object> handleTermsLookup(TermsLookup termsLookup) {
|
||||
return this.indexQueryParser.handleTermsLookup(termsLookup);
|
||||
}
|
||||
|
||||
/*
|
||||
* Executes the given template, and returns the response.
|
||||
*/
|
||||
|
@ -342,4 +339,8 @@ public class QueryShardContext {
|
|||
ExecutableScript executable = scriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext);
|
||||
return (BytesReference) executable.run();
|
||||
}
|
||||
|
||||
public Client getClient() {
|
||||
return indexQueryParser.getClient();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,13 +30,18 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.indices.cache.query.terms.TermsLookup;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
@ -338,7 +343,8 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
if (termsLookup.index() == null) {
|
||||
termsLookup.index(context.index().name());
|
||||
}
|
||||
terms = context.handleTermsLookup(termsLookup);
|
||||
Client client = context.getClient();
|
||||
terms = fetch(termsLookup, client);
|
||||
} else {
|
||||
terms = values;
|
||||
}
|
||||
|
@ -348,6 +354,19 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
return handleTermsQuery(terms, fieldName, context, minimumShouldMatch, disableCoord);
|
||||
}
|
||||
|
||||
private List<Object> fetch(TermsLookup termsLookup, Client client) {
|
||||
List<Object> terms = new ArrayList<>();
|
||||
GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id())
|
||||
.preference("_local").routing(termsLookup.routing());
|
||||
getRequest.copyContextAndHeadersFrom(SearchContext.current());
|
||||
final GetResponse getResponse = client.get(getRequest).actionGet();
|
||||
if (getResponse.isExists()) {
|
||||
List<Object> extractedValues = XContentMapValues.extractRawValues(termsLookup.path(), getResponse.getSourceAsMap());
|
||||
terms.addAll(extractedValues);
|
||||
}
|
||||
return terms;
|
||||
}
|
||||
|
||||
private static Query handleTermsQuery(List<Object> terms, String fieldName, QueryShardContext context, String minimumShouldMatch, boolean disableCoord) {
|
||||
MappedFieldType fieldType = context.fieldMapper(fieldName);
|
||||
String indexFieldName;
|
||||
|
|
|
@ -1,91 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.search.shape;
|
||||
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Service which retrieves pre-indexed Shapes from another index
|
||||
*/
|
||||
public class ShapeFetchService extends AbstractComponent {
|
||||
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public ShapeFetchService(Client client, Settings settings) {
|
||||
super(settings);
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the Shape with the given ID in the given type and index.
|
||||
*
|
||||
* @param getRequest GetRequest containing index, type and id
|
||||
* @param path Name or path of the field in the Shape Document where the Shape itself is located
|
||||
* @return Shape with the given ID
|
||||
* @throws IOException Can be thrown while parsing the Shape Document and extracting the Shape
|
||||
*/
|
||||
public ShapeBuilder fetch(GetRequest getRequest,String path) throws IOException {
|
||||
getRequest.preference("_local");
|
||||
getRequest.operationThreaded(false);
|
||||
GetResponse response = client.get(getRequest).actionGet();
|
||||
if (!response.isExists()) {
|
||||
throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] not found");
|
||||
}
|
||||
|
||||
String[] pathElements = Strings.splitStringToArray(path, '.');
|
||||
int currentPathSlot = 0;
|
||||
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentHelper.createParser(response.getSourceAsBytesRef());
|
||||
XContentParser.Token currentToken;
|
||||
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (currentToken == XContentParser.Token.FIELD_NAME) {
|
||||
if (pathElements[currentPathSlot].equals(parser.currentName())) {
|
||||
parser.nextToken();
|
||||
if (++currentPathSlot == pathElements.length) {
|
||||
return ShapeBuilder.parse(parser);
|
||||
}
|
||||
} else {
|
||||
parser.nextToken();
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new IllegalStateException("Shape with name [" + getRequest.id() + "] found but missing " + path + " field");
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.search.shape;
|
||||
|
||||
import org.elasticsearch.common.geo.ShapesAvailability;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
|
||||
public class ShapeModule extends AbstractModule {
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
// TODO: We could wrap this entire module in a JTS_AVAILABILITY check
|
||||
if (ShapesAvailability.JTS_AVAILABLE) {
|
||||
bind(ShapeFetchService.class).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.search.termslookup;
|
||||
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.indices.cache.query.terms.TermsLookup;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Service which retrieves terms from a {@link TermsLookup} specification
|
||||
*/
|
||||
public class TermsLookupFetchService extends AbstractComponent {
|
||||
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public TermsLookupFetchService(Client client, Settings settings) {
|
||||
super(settings);
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
public List<Object> fetch(TermsLookup termsLookup) {
|
||||
List<Object> terms = new ArrayList<>();
|
||||
GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id())
|
||||
.preference("_local").routing(termsLookup.routing());
|
||||
getRequest.copyContextAndHeadersFrom(SearchContext.current());
|
||||
final GetResponse getResponse = client.get(getRequest).actionGet();
|
||||
if (getResponse.isExists()) {
|
||||
List<Object> extractedValues = XContentMapValues.extractRawValues(termsLookup.path(), getResponse.getSourceAsMap());
|
||||
terms.addAll(extractedValues);
|
||||
}
|
||||
return terms;
|
||||
}
|
||||
}
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.cluster.ClusterService;
|
|||
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
|
||||
import org.elasticsearch.cluster.routing.RoutingService;
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.component.Lifecycle;
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
|
@ -56,7 +55,6 @@ import org.elasticsearch.gateway.GatewayModule;
|
|||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.http.HttpServer;
|
||||
import org.elasticsearch.http.HttpServerModule;
|
||||
import org.elasticsearch.index.search.shape.ShapeModule;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerModule;
|
||||
|
@ -190,7 +188,6 @@ public class Node implements Releasable {
|
|||
modules.add(new MonitorModule(settings));
|
||||
modules.add(new GatewayModule(settings));
|
||||
modules.add(new NodeClientModule());
|
||||
modules.add(new ShapeModule());
|
||||
modules.add(new PercolatorModule());
|
||||
modules.add(new ResourceWatcherModule());
|
||||
modules.add(new RepositoriesModule());
|
||||
|
|
|
@ -23,7 +23,12 @@ import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -79,10 +84,14 @@ import org.junit.BeforeClass;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
@ -127,6 +136,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
private static NamedWriteableRegistry namedWriteableRegistry;
|
||||
|
||||
private static String[] randomTypes;
|
||||
private static ClientInvocationHandler clientInvocationHandler = new ClientInvocationHandler();
|
||||
|
||||
/**
|
||||
* Setup for the whole base test class.
|
||||
|
@ -146,6 +156,10 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
final TestClusterService clusterService = new TestClusterService();
|
||||
clusterService.setState(new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder().put(
|
||||
new IndexMetaData.Builder(index.name()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
|
||||
final Client proxy = (Client) Proxy.newProxyInstance(
|
||||
Client.class.getClassLoader(),
|
||||
new Class[]{Client.class},
|
||||
clientInvocationHandler);
|
||||
injector = new ModulesBuilder().add(
|
||||
new EnvironmentModule(new Environment(settings)),
|
||||
new SettingsModule(settings),
|
||||
|
@ -166,6 +180,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
new AbstractModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(Client.class).toInstance(proxy);
|
||||
Multibinder.newSetBinder(binder(), ScoreFunctionParser.class);
|
||||
bind(ClusterService.class).toProvider(Providers.of(clusterService));
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
|
@ -210,6 +225,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
|
||||
@Before
|
||||
public void beforeTest() {
|
||||
clientInvocationHandler.delegate = this;
|
||||
//set some random types to be queried as part the search request, before each test
|
||||
randomTypes = getRandomTypes();
|
||||
}
|
||||
|
@ -222,6 +238,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
|
||||
@After
|
||||
public void afterTest() {
|
||||
clientInvocationHandler.delegate = null;
|
||||
QueryShardContext.removeTypes();
|
||||
SearchContext.removeCurrent();
|
||||
}
|
||||
|
@ -605,4 +622,31 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
}
|
||||
|
||||
private static final List<String> TIMEZONE_IDS = new ArrayList<>(DateTimeZone.getAvailableIDs());
|
||||
|
||||
private static class ClientInvocationHandler implements InvocationHandler {
|
||||
AbstractQueryTestCase delegate;
|
||||
@Override
|
||||
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
|
||||
if (method.equals(Client.class.getDeclaredMethod("get", GetRequest.class))) {
|
||||
return new PlainActionFuture<GetResponse>() {
|
||||
@Override
|
||||
public GetResponse get() throws InterruptedException, ExecutionException {
|
||||
return delegate.executeGet((GetRequest) args[0]);
|
||||
}
|
||||
};
|
||||
} else if (method.equals(Object.class.getDeclaredMethod("toString"))) {
|
||||
return "MockClient";
|
||||
}
|
||||
throw new UnsupportedOperationException("this test can't handle calls to: " + method);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Override this to handle {@link Client#get(GetRequest)} calls from parsers / builders
|
||||
*/
|
||||
protected GetResponse executeGet(GetRequest getRequest) {
|
||||
throw new UnsupportedOperationException("this test can't handle GET requests");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
|
@ -54,6 +55,9 @@ import org.junit.Before;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
|
||||
/**
|
||||
* Test parsing and executing a template request.
|
||||
|
@ -72,7 +76,11 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
.put("name", getClass().getName())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
|
||||
final Client proxy = (Client) Proxy.newProxyInstance(
|
||||
Client.class.getClassLoader(),
|
||||
new Class[]{Client.class}, (proxy1, method, args) -> {
|
||||
throw new UnsupportedOperationException("client is just a dummy");
|
||||
});
|
||||
Index index = new Index("test");
|
||||
injector = new ModulesBuilder().add(
|
||||
new EnvironmentModule(new Environment(settings)),
|
||||
|
@ -94,6 +102,7 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
new AbstractModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(Client.class).toInstance(proxy); // not needed here
|
||||
Multibinder.newSetBinder(binder(), ScoreFunctionParser.class);
|
||||
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
|
||||
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
|
||||
|
|
|
@ -25,9 +25,15 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.search.termslookup.TermsLookupFetchService;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.indices.cache.query.terms.TermsLookup;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
|
@ -43,12 +49,21 @@ import static org.hamcrest.Matchers.*;
|
|||
|
||||
public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuilder> {
|
||||
|
||||
private MockTermsLookupFetchService termsLookupFetchService;
|
||||
private List<Object> randomTerms;
|
||||
private String termsPath;
|
||||
|
||||
@Before
|
||||
public void mockTermsLookupFetchService() {
|
||||
termsLookupFetchService = new MockTermsLookupFetchService();
|
||||
queryParserService().setTermsLookupFetchService(termsLookupFetchService);
|
||||
public void randomTerms() {
|
||||
List<Object> randomTerms = new ArrayList<>();
|
||||
String[] strings = generateRandomStringArray(10, 10, false, true);
|
||||
for (String string : strings) {
|
||||
randomTerms.add(string);
|
||||
if (rarely()) {
|
||||
randomTerms.add(null);
|
||||
}
|
||||
}
|
||||
this.randomTerms = randomTerms;
|
||||
termsPath = randomAsciiOfLength(10).replace('.', '_');
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -76,7 +91,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
randomBoolean() ? randomAsciiOfLength(10) : null,
|
||||
randomAsciiOfLength(10),
|
||||
randomAsciiOfLength(10),
|
||||
randomAsciiOfLength(10)
|
||||
termsPath
|
||||
).routing(randomBoolean() ? randomAsciiOfLength(10) : null);
|
||||
}
|
||||
|
||||
|
@ -94,7 +109,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
// expected returned terms depending on whether we have a terms query or a terms lookup query
|
||||
List<Object> terms;
|
||||
if (queryBuilder.termsLookup() != null) {
|
||||
terms = termsLookupFetchService.getRandomTerms();
|
||||
terms = randomTerms;
|
||||
} else {
|
||||
terms = queryBuilder.values();
|
||||
}
|
||||
|
@ -242,28 +257,19 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
assertEquals("42%", copy.minimumShouldMatch());
|
||||
}
|
||||
|
||||
private static class MockTermsLookupFetchService extends TermsLookupFetchService {
|
||||
|
||||
private List<Object> randomTerms = new ArrayList<>();
|
||||
|
||||
MockTermsLookupFetchService() {
|
||||
super(null, Settings.Builder.EMPTY_SETTINGS);
|
||||
String[] strings = generateRandomStringArray(10, 10, false, true);
|
||||
for (String string : strings) {
|
||||
randomTerms.add(string);
|
||||
if (rarely()) {
|
||||
randomTerms.add(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Object> fetch(TermsLookup termsLookup) {
|
||||
return randomTerms;
|
||||
}
|
||||
|
||||
List<Object> getRandomTerms() {
|
||||
return randomTerms;
|
||||
@Override
|
||||
public GetResponse executeGet(GetRequest getRequest) {
|
||||
String json;
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
|
||||
builder.startObject();
|
||||
builder.array(termsPath, randomTerms);
|
||||
builder.endObject();
|
||||
json = builder.string();
|
||||
} catch (IOException ex) {
|
||||
throw new ElasticsearchException("boom", ex);
|
||||
}
|
||||
return new GetResponse(new GetResult(getRequest.index(), getRequest.type(), getRequest.id(), 0, true, new BytesArray(json), null));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue