Unguice SearchModule (#20456)

After this change SearchModule doesn't subclass AbstractModule anymore and all wiring
happens in `Node.java`. As a side-effect several tests don't need a guice injector anymore.
This commit is contained in:
Simon Willnauer 2016-09-14 10:07:53 +02:00 committed by GitHub
parent 7560101ec7
commit 89640965d2
8 changed files with 99 additions and 342 deletions

View File

@ -136,7 +136,6 @@ public abstract class TransportClient extends AbstractClient {
}
modules.add(networkModule);
modules.add(b -> b.bind(ThreadPool.class).toInstance(threadPool));
modules.add(searchModule);
ActionModule actionModule = new ActionModule(false, true, settings, null, settingsModule.getClusterSettings(),
pluginsService.filterPlugins(ActionPlugin.class));
modules.add(actionModule);

View File

@ -88,6 +88,7 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.cluster.IndicesClusterStateService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.indices.store.IndicesStore;
import org.elasticsearch.indices.ttl.IndicesTTLService;
import org.elasticsearch.ingest.IngestService;
@ -111,7 +112,9 @@ import org.elasticsearch.repositories.RepositoriesModule;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchExtRegistry;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.snapshots.SnapshotShardsService;
@ -327,7 +330,6 @@ public class Node implements Closeable {
IndicesModule indicesModule = new IndicesModule(pluginsService.filterPlugins(MapperPlugin.class));
modules.add(indicesModule);
SearchModule searchModule = new SearchModule(settings, false, pluginsService.filterPlugins(SearchPlugin.class));
modules.add(searchModule);
modules.add(new ActionModule(DiscoveryNode.isIngestNode(settings), false, settings,
clusterModule.getIndexNameExpressionResolver(), settingsModule.getClusterSettings(),
pluginsService.filterPlugins(ActionPlugin.class)));
@ -363,7 +365,11 @@ public class Node implements Closeable {
.map(Plugin::getCustomMetaDataUpgrader)
.collect(Collectors.toList());
final MetaDataUpgrader metaDataUpgrader = new MetaDataUpgrader(customMetaDataUpgraders);
modules.add(b -> {
b.bind(IndicesQueriesRegistry.class).toInstance(searchModule.getQueryParserRegistry());
b.bind(SearchRequestParsers.class).toInstance(searchModule.getSearchRequestParsers());
b.bind(SearchExtRegistry.class).toInstance(searchModule.getSearchExtRegistry());
b.bind(PluginsService.class).toInstance(pluginsService);
b.bind(Client.class).toInstance(client);
b.bind(NodeClient.class).toInstance(client);

View File

@ -23,7 +23,6 @@ import org.apache.lucene.search.BooleanQuery;
import org.elasticsearch.common.NamedRegistry;
import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.geo.builders.ShapeBuilders;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Setting;
@ -286,7 +285,7 @@ import static java.util.Objects.requireNonNull;
/**
* Sets up things that can be done at search time like queries, aggregations, and suggesters.
*/
public class SearchModule extends AbstractModule {
public class SearchModule {
public static final Setting<Integer> INDICES_MAX_CLAUSE_COUNT_SETTING = Setting.intSetting("indices.query.bool.max_clause_count",
1024, 1, Integer.MAX_VALUE, Setting.Property.NodeScope);
@ -375,16 +374,6 @@ public class SearchModule extends AbstractModule {
return aggregatorParsers;
}
@Override
protected void configure() {
if (false == transportClient) {
bind(IndicesQueriesRegistry.class).toInstance(queryParserRegistry);
bind(SearchRequestParsers.class).toInstance(searchRequestParsers);
bind(SearchExtRegistry.class).toInstance(searchExtParserRegistry);
}
}
private void registerAggregations(List<SearchPlugin> plugins) {
registerAggregation(new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, new AvgParser())
.addResultReader(InternalAvg::new));
@ -811,4 +800,8 @@ public class SearchModule extends AbstractModule {
public FetchPhase getFetchPhase() {
return new FetchPhase(fetchSubPhases);
}
public SearchExtRegistry getSearchExtRegistry() {
return searchExtParserRegistry;
}
}

View File

@ -38,6 +38,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.search.SearchExtRegistry;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.index.query.QueryParseContext;
@ -74,73 +75,33 @@ import static org.hamcrest.Matchers.containsString;
public class AggregatorParsingTests extends ESTestCase {
private static Injector injector;
private static Index index;
private String[] currentTypes;
private static String[] currentTypes;
protected static String[] getCurrentTypes() {
protected String[] getCurrentTypes() {
return currentTypes;
}
private static NamedWriteableRegistry namedWriteableRegistry;
protected static AggregatorParsers aggParsers;
protected static IndicesQueriesRegistry queriesRegistry;
protected static ParseFieldMatcher parseFieldMatcher;
protected AggregatorParsers aggParsers;
protected IndicesQueriesRegistry queriesRegistry;
protected ParseFieldMatcher parseFieldMatcher;
/**
* Setup for the whole base test class.
*/
@BeforeClass
public static void init() throws IOException {
@Override
public void setUp() throws Exception {
super.setUp();
// we have to prefer CURRENT since with the range of versions we support
// it's rather unlikely to get the current actually.
Version version = randomBoolean() ? Version.CURRENT
: VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT);
Settings settings = Settings.builder().put("node.name", AbstractQueryTestCase.class.toString())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build();
index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_");
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
final ThreadPool threadPool = new ThreadPool(settings);
final ClusterService clusterService = createClusterService(threadPool);
setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder()
.put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
ScriptModule scriptModule = newTestScriptModule();
List<Setting<?>> scriptSettings = scriptModule.getSettings();
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
IndicesModule indicesModule = new IndicesModule(Collections.emptyList()) {
@Override
protected void configure() {
bindMapperExtension();
}
};
IndicesModule indicesModule = new IndicesModule(Collections.emptyList()) ;
SearchModule searchModule = new SearchModule(settings, false, emptyList());
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(indicesModule.getNamedWriteables());
entries.addAll(searchModule.getNamedWriteables());
namedWriteableRegistry = new NamedWriteableRegistry(entries);
injector = new ModulesBuilder().add(
(b) -> {
b.bind(Environment.class).toInstance(new Environment(settings));
b.bind(ThreadPool.class).toInstance(threadPool);
b.bind(ScriptService.class).toInstance(scriptModule.getScriptService());
},
settingsModule, indicesModule, searchModule,
new IndexSettingsModule(index, settings),
new AbstractModule() {
@Override
protected void configure() {
bind(ClusterService.class).toInstance(clusterService);
bind(CircuitBreakerService.class).toInstance(new NoneCircuitBreakerService());
bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry);
}
}).createInjector();
aggParsers = injector.getInstance(SearchRequestParsers.class).aggParsers;
aggParsers = searchModule.getSearchRequestParsers().aggParsers;
// create some random type with some default field, those types will
// stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)];
@ -148,21 +109,10 @@ public class AggregatorParsingTests extends ESTestCase {
String type = randomAsciiOfLengthBetween(1, 10);
currentTypes[i] = type;
}
queriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
queriesRegistry = searchModule.getQueryParserRegistry();
parseFieldMatcher = ParseFieldMatcher.STRICT;
}
@AfterClass
public static void afterClass() throws Exception {
injector.getInstance(ClusterService.class).close();
terminate(injector.getInstance(ThreadPool.class));
injector = null;
index = null;
aggParsers = null;
currentTypes = null;
namedWriteableRegistry = null;
}
public void testTwoTypes() throws Exception {
String source = JsonXContent.contentBuilder()
.startObject()

View File

@ -19,46 +19,24 @@
package org.elasticsearch.search.aggregations;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.ArrayList;
@ -66,8 +44,6 @@ import java.util.Collections;
import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.ClusterServiceUtils.setState;
import static org.hamcrest.Matchers.equalTo;
public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuilder<AB>> extends ESTestCase {
@ -78,101 +54,47 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean";
protected static final String DATE_FIELD_NAME = "mapped_date";
protected static final String IP_FIELD_NAME = "mapped_ip";
protected static final String OBJECT_FIELD_NAME = "mapped_object";
protected static final String[] mappedFieldNames = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME,
DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, IP_FIELD_NAME, OBJECT_FIELD_NAME};
private static Injector injector;
private static Index index;
private String[] currentTypes;
private static String[] currentTypes;
protected static String[] getCurrentTypes() {
protected String[] getCurrentTypes() {
return currentTypes;
}
private static NamedWriteableRegistry namedWriteableRegistry;
private NamedWriteableRegistry namedWriteableRegistry;
protected static AggregatorParsers aggParsers;
protected static IndicesQueriesRegistry queriesRegistry;
protected static ParseFieldMatcher parseFieldMatcher;
protected AggregatorParsers aggParsers;
protected IndicesQueriesRegistry queriesRegistry;
protected ParseFieldMatcher parseFieldMatcher;
protected abstract AB createTestAggregatorBuilder();
/**
* Setup for the whole base test class.
*/
@BeforeClass
public static void init() throws IOException {
index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_");
injector = buildInjector(index);
namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class);
aggParsers = injector.getInstance(SearchRequestParsers.class).aggParsers;
public void setUp() throws Exception {
super.setUp();
Settings settings = Settings.builder()
.put("node.name", AbstractQueryTestCase.class.toString())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build();
IndicesModule indicesModule = new IndicesModule(Collections.emptyList());
SearchModule searchModule = new SearchModule(settings, false, emptyList());
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(indicesModule.getNamedWriteables());
entries.addAll(searchModule.getNamedWriteables());
namedWriteableRegistry = new NamedWriteableRegistry(entries);
queriesRegistry = searchModule.getQueryParserRegistry();
aggParsers = searchModule.getSearchRequestParsers().aggParsers;
//create some random type with some default field, those types will stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)];
for (int i = 0; i < currentTypes.length; i++) {
String type = randomAsciiOfLengthBetween(1, 10);
currentTypes[i] = type;
}
queriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
parseFieldMatcher = ParseFieldMatcher.STRICT;
}
public static final Injector buildInjector(Index index) {
// we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually.
Version version = randomBoolean() ? Version.CURRENT
: VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT);
Settings settings = Settings.builder()
.put("node.name", AbstractQueryTestCase.class.toString())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false)
.build();
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
final ThreadPool threadPool = new ThreadPool(settings);
final ClusterService clusterService = createClusterService(threadPool);
setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder()
.put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
ScriptModule scriptModule = newTestScriptModule();
List<Setting<?>> scriptSettings = scriptModule.getSettings();
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
IndicesModule indicesModule = new IndicesModule(Collections.emptyList()) {
@Override
protected void configure() {
bindMapperExtension();
}
};
SearchModule searchModule = new SearchModule(settings, false, emptyList());
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(indicesModule.getNamedWriteables());
entries.addAll(searchModule.getNamedWriteables());
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(entries);
return new ModulesBuilder().add(
(b) -> {
b.bind(Environment.class).toInstance(new Environment(settings));
b.bind(ThreadPool.class).toInstance(threadPool);
b.bind(ScriptService.class).toInstance(scriptModule.getScriptService());
b.bind(ClusterService.class).toProvider(Providers.of(clusterService));
b.bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
b.bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry);
},
settingsModule, indicesModule, searchModule, new IndexSettingsModule(index, settings)
).createInjector();
}
@AfterClass
public static void afterClass() throws Exception {
injector.getInstance(ClusterService.class).close();
terminate(injector.getInstance(ThreadPool.class));
injector = null;
index = null;
aggParsers = null;
currentTypes = null;
namedWriteableRegistry = null;
}
/**
* Generic test that creates new AggregatorFactory from the test
* AggregatorFactory and checks both for equality and asserts equality on
@ -263,24 +185,6 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
}
}
protected String[] getRandomTypes() {
String[] types;
if (currentTypes.length > 0 && randomBoolean()) {
int numberOfQueryTypes = randomIntBetween(1, currentTypes.length);
types = new String[numberOfQueryTypes];
for (int i = 0; i < numberOfQueryTypes; i++) {
types[i] = randomFrom(currentTypes);
}
} else {
if (randomBoolean()) {
types = new String[]{MetaData.ALL};
} else {
types = new String[0];
}
}
return types;
}
public String randomNumericField() {
int randomInt = randomInt(3);
switch (randomInt) {

View File

@ -20,29 +20,32 @@
package org.elasticsearch.search.aggregations;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.Index;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelineAggregationBuilder<AF>> extends ESTestCase {
@ -53,59 +56,51 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean";
protected static final String DATE_FIELD_NAME = "mapped_date";
private static Injector injector;
private static Index index;
private String[] currentTypes;
private static String[] currentTypes;
protected static String[] getCurrentTypes() {
protected String[] getCurrentTypes() {
return currentTypes;
}
private static NamedWriteableRegistry namedWriteableRegistry;
private NamedWriteableRegistry namedWriteableRegistry;
protected static AggregatorParsers aggParsers;
protected static ParseFieldMatcher parseFieldMatcher;
protected static IndicesQueriesRegistry queriesRegistry;
protected AggregatorParsers aggParsers;
protected IndicesQueriesRegistry queriesRegistry;
protected ParseFieldMatcher parseFieldMatcher;
protected abstract AF createTestAggregatorFactory();
/**
* Setup for the whole base test class.
*/
@BeforeClass
public static void init() throws IOException {
index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_");
injector = BaseAggregationTestCase.buildInjector(index);
namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class);
aggParsers = injector.getInstance(SearchRequestParsers.class).aggParsers;
public void setUp() throws Exception {
super.setUp();
Settings settings = Settings.builder()
.put("node.name", AbstractQueryTestCase.class.toString())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build();
IndicesModule indicesModule = new IndicesModule(Collections.emptyList());
SearchModule searchModule = new SearchModule(settings, false, emptyList());
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(indicesModule.getNamedWriteables());
entries.addAll(searchModule.getNamedWriteables());
namedWriteableRegistry = new NamedWriteableRegistry(entries);
queriesRegistry = searchModule.getQueryParserRegistry();
aggParsers = searchModule.getSearchRequestParsers().aggParsers;
//create some random type with some default field, those types will stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)];
for (int i = 0; i < currentTypes.length; i++) {
String type = randomAsciiOfLengthBetween(1, 10);
currentTypes[i] = type;
}
queriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
parseFieldMatcher = ParseFieldMatcher.STRICT;
}
@AfterClass
public static void afterClass() throws Exception {
injector.getInstance(ClusterService.class).close();
terminate(injector.getInstance(ThreadPool.class));
injector = null;
index = null;
aggParsers = null;
currentTypes = null;
namedWriteableRegistry = null;
}
/**
* Generic test that creates new AggregatorFactory from the test
* AggregatorFactory and checks both for equality and asserts equality on
* the two queries.
*/
public void testFromXContent() throws IOException {
AF testAgg = createTestAggregatorFactory();
AggregatorFactories.Builder factoriesBuilder = AggregatorFactories.builder().skipResolveOrder().addPipelineAggregator(testAgg);

View File

@ -53,9 +53,11 @@ import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchExtRegistry;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.aggregations.AggregationBuilders;
@ -93,69 +95,30 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasToString;
public class SearchSourceBuilderTests extends ESTestCase {
private static Injector injector;
private static NamedWriteableRegistry namedWriteableRegistry;
private NamedWriteableRegistry namedWriteableRegistry;
private static SearchRequestParsers searchRequestParsers;
private SearchRequestParsers searchRequestParsers;
private static Index index;
private String[] currentTypes;
private static String[] currentTypes;
private ParseFieldMatcher parseFieldMatcher;
private static ParseFieldMatcher parseFieldMatcher;
@BeforeClass
public static void init() throws IOException {
public void setUp() throws Exception {
super.setUp();
// we have to prefer CURRENT since with the range of versions we support
// it's rather unlikely to get the current actually.
Version version = randomBoolean() ? Version.CURRENT
: VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT);
Settings settings = Settings.builder()
.put("node.name", AbstractQueryTestCase.class.toString())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build();
index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_");
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
final ThreadPool threadPool = new ThreadPool(settings);
final ClusterService clusterService = createClusterService(threadPool);
setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder()
.put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
ScriptModule scriptModule = newTestScriptModule();
List<Setting<?>> scriptSettings = scriptModule.getSettings();
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
IndicesModule indicesModule = new IndicesModule(Collections.emptyList()) {
@Override
protected void configure() {
bindMapperExtension();
}
};
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
IndicesModule indicesModule = new IndicesModule(Collections.emptyList());
SearchModule searchModule = new SearchModule(settings, false,
Collections.singletonList(new FetchSubPhasePluginIT.FetchTermVectorsPlugin()));
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(indicesModule.getNamedWriteables());
entries.addAll(searchModule.getNamedWriteables());
namedWriteableRegistry = new NamedWriteableRegistry(entries);
injector = new ModulesBuilder().add(
(b) -> {
b.bind(Environment.class).toInstance(new Environment(settings));
b.bind(ThreadPool.class).toInstance(threadPool);
b.bind(ScriptService.class).toInstance(scriptModule.getScriptService());
},
settingsModule, indicesModule, searchModule,
new IndexSettingsModule(index, settings),
new AbstractModule() {
@Override
protected void configure() {
bind(ClusterService.class).toProvider(Providers.of(clusterService));
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry);
}
}
).createInjector();
searchRequestParsers = injector.getInstance(SearchRequestParsers.class);
searchRequestParsers = searchModule.getSearchRequestParsers();
// create some random type with some default field, those types will
// stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)];
@ -166,17 +129,6 @@ public class SearchSourceBuilderTests extends ESTestCase {
parseFieldMatcher = ParseFieldMatcher.STRICT;
}
@AfterClass
public static void afterClass() throws Exception {
injector.getInstance(ClusterService.class).close();
terminate(injector.getInstance(ThreadPool.class));
injector = null;
index = null;
searchRequestParsers = null;
currentTypes = null;
namedWriteableRegistry = null;
}
public static SearchSourceBuilder createSearchSourceBuilder() throws IOException {
SearchSourceBuilder builder = new SearchSourceBuilder();
if (randomBoolean()) {
@ -410,11 +362,11 @@ public class SearchSourceBuilderTests extends ESTestCase {
assertParseSearchSource(testSearchSourceBuilder, builder.bytes());
}
private static void assertParseSearchSource(SearchSourceBuilder testBuilder, BytesReference searchSourceAsBytes) throws IOException {
private void assertParseSearchSource(SearchSourceBuilder testBuilder, BytesReference searchSourceAsBytes) throws IOException {
assertParseSearchSource(testBuilder, searchSourceAsBytes, ParseFieldMatcher.STRICT);
}
private static void assertParseSearchSource(SearchSourceBuilder testBuilder, BytesReference searchSourceAsBytes, ParseFieldMatcher pfm)
private void assertParseSearchSource(SearchSourceBuilder testBuilder, BytesReference searchSourceAsBytes, ParseFieldMatcher pfm)
throws IOException {
XContentParser parser = XContentFactory.xContent(searchSourceAsBytes).createParser(searchSourceAsBytes);
QueryParseContext parseContext = new QueryParseContext(searchRequestParsers.queryParsers, parser, pfm);
@ -429,7 +381,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
assertEquals(testBuilder.hashCode(), newBuilder.hashCode());
}
private static QueryParseContext createParseContext(XContentParser parser) {
private QueryParseContext createParseContext(XContentParser parser) {
return new QueryParseContext(searchRequestParsers.queryParsers, parser, parseFieldMatcher);
}
@ -474,7 +426,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
}
//we use the streaming infra to create a copy of the builder provided as argument
protected static SearchSourceBuilder copyBuilder(SearchSourceBuilder builder) throws IOException {
protected SearchSourceBuilder copyBuilder(SearchSourceBuilder builder) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) {
builder.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) {

View File

@ -39,7 +39,6 @@ import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
@ -47,10 +46,6 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
@ -86,7 +81,6 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.indices.mapper.MapperRegistry;
@ -101,7 +95,6 @@ import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.threadpool.ThreadPool;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.After;
@ -160,6 +153,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
private static String[] currentTypes;
private static String[] randomTypes;
protected static Index getIndex() {
return index;
}
@ -1015,7 +1009,6 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
private static class ServiceHolder implements Closeable {
private final Injector injector;
private final IndicesQueriesRegistry indicesQueriesRegistry;
private final IndexFieldDataService indexFieldDataService;
private final SearchModule searchModule;
@ -1026,18 +1019,14 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
private final MapperService mapperService;
private final BitsetFilterCache bitsetFilterCache;
private final ScriptService scriptService;
private final Client client;
ServiceHolder(Settings nodeSettings, Settings indexSettings,
Collection<Class<? extends Plugin>> plugins, AbstractQueryTestCase<?> testCase) throws IOException {
final ThreadPool threadPool = new ThreadPool(nodeSettings);
ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool);
ClusterServiceUtils.setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(
new MetaData.Builder().put(new IndexMetaData.Builder(
index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
Environment env = InternalSettingsPreparer.prepareEnvironment(nodeSettings, null);
PluginsService pluginsService = new PluginsService(nodeSettings, env.modulesFile(), env.pluginsFile(), plugins);
final Client proxy = (Client) Proxy.newProxyInstance(
client = (Client) Proxy.newProxyInstance(
Client.class.getClassLoader(),
new Class[]{Client.class},
clientInvocationHandler);
@ -1047,47 +1036,23 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
SettingsModule settingsModule = new SettingsModule(nodeSettings, scriptSettings, pluginsService.getPluginSettingsFilter());
searchModule = new SearchModule(nodeSettings, false, pluginsService.filterPlugins(SearchPlugin.class));
IndicesModule indicesModule = new IndicesModule(pluginsService.filterPlugins(MapperPlugin.class)) {
@Override
public void configure() {
// skip services
bindMapperExtension();
}
};
IndicesModule indicesModule = new IndicesModule(pluginsService.filterPlugins(MapperPlugin.class));
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(indicesModule.getNamedWriteables());
entries.addAll(searchModule.getNamedWriteables());
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(entries);
ModulesBuilder modulesBuilder = new ModulesBuilder();
for (Module pluginModule : pluginsService.createGuiceModules()) {
modulesBuilder.add(pluginModule);
}
modulesBuilder.add(
b -> {
b.bind(PluginsService.class).toInstance(pluginsService);
b.bind(Environment.class).toInstance(new Environment(nodeSettings));
b.bind(ThreadPool.class).toInstance(threadPool);
b.bind(Client.class).toInstance(proxy);
b.bind(ClusterService.class).toProvider(Providers.of(clusterService));
b.bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
b.bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry);
},
settingsModule, indicesModule, searchModule, new IndexSettingsModule(index, indexSettings)
);
pluginsService.processModules(modulesBuilder);
injector = modulesBuilder.createInjector();
IndexScopedSettings indexScopedSettings = injector.getInstance(IndexScopedSettings.class);
IndexScopedSettings indexScopedSettings = settingsModule.getIndexScopedSettings();
idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings, indexScopedSettings);
AnalysisModule analysisModule = new AnalysisModule(new Environment(nodeSettings), emptyList());
AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings);
scriptService = scriptModule.getScriptService();
similarityService = new SimilarityService(idxSettings, Collections.emptyMap());
MapperRegistry mapperRegistry = injector.getInstance(MapperRegistry.class);
MapperRegistry mapperRegistry = indicesModule.getMapperRegistry();
mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, this::createShardContext);
IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(nodeSettings, new IndexFieldDataCache.Listener() {
});
indexFieldDataService = new IndexFieldDataService(idxSettings, indicesFieldDataCache,
injector.getInstance(CircuitBreakerService.class), mapperService);
new NoneCircuitBreakerService(), mapperService);
bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() {
@Override
public void onCache(ShardId shardId, Accountable accountable) {
@ -1099,7 +1064,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
}
});
indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
indicesQueriesRegistry = searchModule.getQueryParserRegistry();
String geoFieldMapping = (idxSettings.getIndexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) ?
LEGACY_GEO_POINT_FIELD_MAPPING : "type=geo_point";
@ -1123,24 +1088,17 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
MapperService.MergeReason.MAPPING_UPDATE, false);
}
testCase.initializeAdditionalMappings(mapperService);
this.namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class);
this.namedWriteableRegistry = namedWriteableRegistry;
}
@Override
public void close() throws IOException {
injector.getInstance(ClusterService.class).close();
try {
terminate(injector.getInstance(ThreadPool.class));
} catch (InterruptedException e) {
IOUtils.reThrow(e);
}
}
QueryShardContext createShardContext() {
ClusterState state = ClusterState.builder(new ClusterName("_name")).build();
Client client = injector.getInstance(Client.class);
return new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService,
scriptService, indicesQueriesRegistry, client, null, state);
scriptService, indicesQueriesRegistry, this.client, null, state);
}
ScriptModule createScriptModule(List<ScriptPlugin> scriptPlugins) {