Merge branch 'master' into modularize_netty

This commit is contained in:
Ryan Ernst 2016-07-11 23:49:00 -07:00
commit 93aebbef0f
71 changed files with 722 additions and 454 deletions

View File

@ -21,6 +21,8 @@ package org.elasticsearch.client.transport;
import java.io.Closeable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@ -53,6 +55,7 @@ import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.threadpool.ExecutorBuilder;
import org.elasticsearch.threadpool.ThreadPool;
@ -138,12 +141,12 @@ public class TransportClient extends AbstractClient {
ModulesBuilder modules = new ModulesBuilder();
// plugin modules must be added here, before others or we can get crazy injection errors...
for (Module pluginModule : pluginsService.nodeModules()) {
for (Module pluginModule : pluginsService.createGuiceModules()) {
modules.add(pluginModule);
}
modules.add(new NetworkModule(networkService, settings, true, namedWriteableRegistry));
modules.add(b -> b.bind(ThreadPool.class).toInstance(threadPool));
modules.add(new SearchModule(settings, namedWriteableRegistry, true));
modules.add(new SearchModule(settings, namedWriteableRegistry, true, pluginsService.filterPlugins(SearchPlugin.class)));
ActionModule actionModule = new ActionModule(false, true, settings, null, settingsModule.getClusterSettings(),
pluginsService.filterPlugins(ActionPlugin.class));
modules.add(actionModule);
@ -154,11 +157,13 @@ public class TransportClient extends AbstractClient {
resourcesToClose.add(circuitBreakerService);
BigArrays bigArrays = new BigArrays(settings, circuitBreakerService);
resourcesToClose.add(bigArrays);
Collection<Object> pluginComponents = pluginsService.createComponenents();
modules.add(settingsModule);
modules.add((b -> {
b.bind(BigArrays.class).toInstance(bigArrays);
b.bind(PluginsService.class).toInstance(pluginsService);
b.bind(CircuitBreakerService.class).toInstance(circuitBreakerService);
pluginComponents.stream().forEach(p -> b.bind((Class)p.getClass()).toInstance(p));
}));
Injector injector = modules.createInjector();
@ -168,9 +173,17 @@ public class TransportClient extends AbstractClient {
final TransportProxyClient proxy = new TransportProxyClient(settings, transportService, nodesService,
actionModule.getActions().values().stream().map(x -> x.getAction()).collect(Collectors.toList()));
List<LifecycleComponent> pluginLifecycleComponents = pluginComponents.stream()
.filter(p -> p instanceof LifecycleComponent)
.map(p -> (LifecycleComponent)p).collect(Collectors.toList());
pluginLifecycleComponents.addAll(pluginsService.getGuiceServiceClasses().stream()
.map(injector::getInstance).collect(Collectors.toList()));
resourcesToClose.addAll(pluginLifecycleComponents);
transportService.start();
transportService.acceptIncomingRequests();
TransportClient transportClient = new TransportClient(injector, nodesService, proxy);
TransportClient transportClient = new TransportClient(injector, pluginLifecycleComponents, nodesService, proxy);
resourcesToClose.clear();
return transportClient;
} finally {
@ -183,12 +196,15 @@ public class TransportClient extends AbstractClient {
final Injector injector;
private final List<LifecycleComponent> pluginLifecycleComponents;
private final TransportClientNodesService nodesService;
private final TransportProxyClient proxy;
private TransportClient(Injector injector, TransportClientNodesService nodesService, TransportProxyClient proxy) {
private TransportClient(Injector injector, List<LifecycleComponent> pluginLifecycleComponents,
TransportClientNodesService nodesService, TransportProxyClient proxy) {
super(injector.getInstance(Settings.class), injector.getInstance(ThreadPool.class));
this.injector = injector;
this.pluginLifecycleComponents = Collections.unmodifiableList(pluginLifecycleComponents);
this.nodesService = nodesService;
this.proxy = proxy;
}
@ -269,8 +285,8 @@ public class TransportClient extends AbstractClient {
closeables.add(nodesService);
closeables.add(injector.getInstance(TransportService.class));
for (Class<? extends LifecycleComponent> plugin : injector.getInstance(PluginsService.class).nodeServices()) {
closeables.add(injector.getInstance(plugin));
for (LifecycleComponent plugin : pluginLifecycleComponents) {
closeables.add(plugin);
}
closeables.add(() -> ThreadPool.terminate(injector.getInstance(ThreadPool.class), 10, TimeUnit.SECONDS));
closeables.add(injector.getInstance(BigArrays.class));

View File

@ -22,12 +22,13 @@ package org.elasticsearch.index.query.functionscore;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.SearchModule;
import java.io.IOException;
import java.util.function.BiFunction;
@ -64,8 +65,7 @@ import java.util.function.BiFunction;
* <p>
* To write a new decay scoring function, create a new class that extends
* {@link DecayFunctionBuilder}, setup a PARSER field with this class, and
* register them both using
* {@link org.elasticsearch.search.SearchModule#registerScoreFunction(Writeable.Reader, ScoreFunctionParser, ParseField)}.
* register them in {@link SearchModule#registerScoreFunctions} or {@link SearchPlugin#getScoreFunctions}
* See {@link GaussDecayFunctionBuilder#PARSER} for an example.
*/
public final class DecayFunctionParser<DFB extends DecayFunctionBuilder<DFB>> implements ScoreFunctionParser<DFB> {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query.functionscore;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
@ -29,7 +28,6 @@ import java.io.IOException;
public class ExponentialDecayFunctionBuilder extends DecayFunctionBuilder<ExponentialDecayFunctionBuilder> {
public static final String NAME = "exp";
public static final ParseField FUNCTION_NAME_FIELD = new ParseField(NAME);
public static final ScoreFunctionParser<ExponentialDecayFunctionBuilder> PARSER = new DecayFunctionParser<>(
ExponentialDecayFunctionBuilder::new);
public static final DecayFunction EXP_DECAY_FUNCTION = new ExponentialDecayScoreFunction();

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query.functionscore;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -43,7 +42,6 @@ import java.util.Objects;
*/
public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder<FieldValueFactorFunctionBuilder> {
public static final String NAME = "field_value_factor";
public static final ParseField FUNCTION_NAME_FIELD = new ParseField(NAME);
public static final FieldValueFactorFunction.Modifier DEFAULT_MODIFIER = FieldValueFactorFunction.Modifier.NONE;
public static final float DEFAULT_FACTOR = 1;

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query.functionscore;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
@ -28,7 +27,6 @@ import java.io.IOException;
public class LinearDecayFunctionBuilder extends DecayFunctionBuilder<LinearDecayFunctionBuilder> {
public static final String NAME = "linear";
public static final ParseField FUNCTION_NAME_FIELD = new ParseField(NAME);
public static final ScoreFunctionParser<LinearDecayFunctionBuilder> PARSER = new DecayFunctionParser<>(LinearDecayFunctionBuilder::new);
public static final DecayFunction LINEAR_DECAY_FUNCTION = new LinearDecayScoreFunction();

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.index.query.functionscore;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -40,7 +39,6 @@ import java.util.Objects;
*/
public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScoreFunctionBuilder> {
public static final String NAME = "random_score";
public static final ParseField FUNCTION_NAME_FIELD = new ParseField(NAME);
private Integer seed;
public RandomScoreFunctionBuilder() {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query.functionscore;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -49,7 +48,6 @@ import java.util.Objects;
*/
public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder<ScriptScoreFunctionBuilder> {
public static final String NAME = "script_score";
public static final ParseField FUNCTION_NAME_FIELD = new ParseField(NAME);
private final Script script;

View File

@ -98,6 +98,7 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.plugins.RepositoryPlugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.repositories.RepositoriesModule;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.script.ScriptModule;
@ -132,6 +133,7 @@ import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* A node represent a node within a cluster (<tt>cluster.name</tt>). The {@link #client()} can be used
@ -182,6 +184,7 @@ public class Node implements Closeable {
private final NodeEnvironment nodeEnvironment;
private final PluginsService pluginsService;
private final NodeClient client;
private final Collection<LifecycleComponent> pluginLifecycleComponents;
/**
* Constructs a node with the given settings.
@ -276,7 +279,7 @@ public class Node implements Closeable {
ModulesBuilder modules = new ModulesBuilder();
// plugin modules must be added here, before others or we can get crazy injection errors...
for (Module pluginModule : pluginsService.nodeModules()) {
for (Module pluginModule : pluginsService.createGuiceModules()) {
modules.add(pluginModule);
}
final MonitorService monitorService = new MonitorService(settings, nodeEnvironment, threadPool);
@ -286,7 +289,7 @@ public class Node implements Closeable {
ClusterModule clusterModule = new ClusterModule(settings, clusterService);
modules.add(clusterModule);
modules.add(new IndicesModule(namedWriteableRegistry, pluginsService.filterPlugins(MapperPlugin.class)));
modules.add(new SearchModule(settings, namedWriteableRegistry, false));
modules.add(new SearchModule(settings, namedWriteableRegistry, false, pluginsService.filterPlugins(SearchPlugin.class)));
modules.add(new ActionModule(DiscoveryNode.isIngestNode(settings), false, settings,
clusterModule.getIndexNameExpressionResolver(), settingsModule.getClusterSettings(),
pluginsService.filterPlugins(ActionPlugin.class)));
@ -300,6 +303,7 @@ public class Node implements Closeable {
resourcesToClose.add(bigArrays);
modules.add(settingsModule);
client = new NodeClient(settings, threadPool);
Collection<Object> pluginComponents = pluginsService.createComponenents();
modules.add(b -> {
b.bind(PluginsService.class).toInstance(pluginsService);
b.bind(Client.class).toInstance(client);
@ -314,10 +318,19 @@ public class Node implements Closeable {
b.bind(ScriptService.class).toInstance(scriptModule.getScriptService());
b.bind(AnalysisRegistry.class).toInstance(analysisModule.getAnalysisRegistry());
b.bind(IngestService.class).toInstance(ingestService);
pluginComponents.stream().forEach(p -> b.bind((Class)p.getClass()).toInstance(p));
}
);
injector = modules.createInjector();
List<LifecycleComponent> pluginLifecycleComponents = pluginComponents.stream()
.filter(p -> p instanceof LifecycleComponent)
.map(p -> (LifecycleComponent)p).collect(Collectors.toList());
pluginLifecycleComponents.addAll(pluginsService.getGuiceServiceClasses().stream()
.map(injector::getInstance).collect(Collectors.toList()));
resourcesToClose.addAll(pluginLifecycleComponents);
this.pluginLifecycleComponents = Collections.unmodifiableList(pluginLifecycleComponents);
client.intialize(injector.getInstance(new Key<Map<GenericAction, TransportAction>>() {}));
success = true;
@ -373,9 +386,7 @@ public class Node implements Closeable {
logger.info("starting ...");
// hack around dependency injection problem (for now...)
injector.getInstance(Discovery.class).setAllocationService(injector.getInstance(AllocationService.class));
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
injector.getInstance(plugin).start();
}
pluginLifecycleComponents.forEach(LifecycleComponent::start);
injector.getInstance(MappingUpdatedAction.class).setClient(client);
injector.getInstance(IndicesService.class).start();
@ -511,9 +522,7 @@ public class Node implements Closeable {
injector.getInstance(RestController.class).stop();
injector.getInstance(TransportService.class).stop();
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
injector.getInstance(plugin).stop();
}
pluginLifecycleComponents.forEach(LifecycleComponent::stop);
// we should stop this last since it waits for resources to get released
// if we had scroll searchers etc or recovery going on we wait for to finish.
injector.getInstance(IndicesService.class).stop();
@ -577,9 +586,9 @@ public class Node implements Closeable {
toClose.add(() -> stopWatch.stop().start("transport"));
toClose.add(injector.getInstance(TransportService.class));
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
toClose.add(() -> stopWatch.stop().start("plugin(" + plugin.getName() + ")"));
toClose.add(injector.getInstance(plugin));
for (LifecycleComponent plugin : pluginLifecycleComponents) {
toClose.add(() -> stopWatch.stop().start("plugin(" + plugin.getClass().getName() + ")"));
toClose.add(plugin);
}
toClose.addAll(pluginsService.filterPlugins(Closeable.class));

View File

@ -20,6 +20,7 @@
package org.elasticsearch.plugins;
import org.elasticsearch.action.ActionModule;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.settings.Setting;
@ -37,22 +38,40 @@ import java.util.List;
/**
* An extension point allowing to plug in custom functionality.
* <p>
* A plugin can be register custom extensions to builtin behavior by implementing <tt>onModule(AnyModule)</tt>,
* and registering the extension with the given module.
* Implement any of these interfaces to extend Elasticsearch:
* <ul>
* <li>{@link ActionPlugin}
* <li>{@link AnalysisPlugin}
* <li>{@link MapperPlugin}
* <li>{@link ScriptPlugin}
* <li>{@link SearchPlugin}
* </ul>
*/
public abstract class Plugin {
/**
* Node level modules.
* Node level guice modules.
*/
public Collection<Module> nodeModules() {
public Collection<Module> createGuiceModules() {
return Collections.emptyList();
}
/**
* Node level services that will be automatically started/stopped/closed.
* Node level services that will be automatically started/stopped/closed. This classes must be constructed
* by injection with guice.
*/
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
public Collection<Class<? extends LifecycleComponent>> getGuiceServiceClasses() {
return Collections.emptyList();
}
/**
* Returns components maintained by this plugin.
*
* Any components returned that implement {@link LifecycleComponent} will have their lifecycle managed.
* Note: To aid in the migration away from guice, all objects returned as components will be bound in guice
* to themselves.
*/
public Collection<Object> createComponents() {
return Collections.emptyList();
}

View File

@ -39,13 +39,7 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.script.NativeScriptFactory;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.threadpool.ExecutorBuilder;
import java.io.IOException;
@ -274,10 +268,10 @@ public class PluginsService extends AbstractComponent {
return builder.put(this.settings).build();
}
public Collection<Module> nodeModules() {
public Collection<Module> createGuiceModules() {
List<Module> modules = new ArrayList<>();
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
modules.addAll(plugin.v2().nodeModules());
modules.addAll(plugin.v2().createGuiceModules());
}
return modules;
}
@ -290,14 +284,20 @@ public class PluginsService extends AbstractComponent {
return builders;
}
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
/** Returns all classes injected into guice by plugins which extend {@link LifecycleComponent}. */
public Collection<Class<? extends LifecycleComponent>> getGuiceServiceClasses() {
List<Class<? extends LifecycleComponent>> services = new ArrayList<>();
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
services.addAll(plugin.v2().nodeServices());
services.addAll(plugin.v2().getGuiceServiceClasses());
}
return services;
}
/** Gets components from each plugin. This method should be called exactly once. */
public Collection<Object> createComponenents() {
return plugins.stream().flatMap(p -> p.v2().createComponents().stream()).collect(Collectors.toList());
}
public void onIndexModule(IndexModule indexModule) {
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
plugin.v2().onIndexModule(indexModule);

View File

@ -0,0 +1,177 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugins;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.highlight.Highlighter;
import org.elasticsearch.search.suggest.Suggester;
import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
/**
* Plugin for extending search time behavior.
*/
public interface SearchPlugin {
/**
* The new {@link ScoreFunction}s defined by this plugin.
*/
default List<ScoreFunctionSpec<?>> getScoreFunctions() {
return emptyList();
}
/**
* The new {@link SignificanceHeuristic}s defined by this plugin. {@linkplain SignificanceHeuristic}s are used by the
* {@link SignificantTerms} aggregation to pick which terms are significant for a given query.
*/
default List<SearchPluginSpec<SignificanceHeuristic, SignificanceHeuristicParser>> getSignificanceHeuristics() {
return emptyList();
}
/**
* The new {@link MovAvgModel}s defined by this plugin. {@linkplain MovAvgModel}s are used by the {@link MovAvgPipelineAggregator} to
* model trends in data.
*/
default List<SearchPluginSpec<MovAvgModel, MovAvgModel.AbstractModelParser>> getMovingAverageModels() {
return emptyList();
}
/**
* The new {@link FetchSubPhase}s defined by this plugin.
*/
default List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
return emptyList();
}
/**
* Get the {@link Highlighter}s defined by this plugin.
*/
default Map<String, Highlighter> getHighlighters() {
return emptyMap();
}
/**
* The new {@link Suggester}s defined by this plugin.
*/
default Map<String, Suggester<?>> getSuggesters() {
return emptyMap();
}
/**
* Specification of custom {@link ScoreFunction}.
*/
public class ScoreFunctionSpec<T extends ScoreFunctionBuilder<T>> extends SearchPluginSpec<T, ScoreFunctionParser<T>> {
public ScoreFunctionSpec(ParseField name, Reader<T> reader, ScoreFunctionParser<T> parser) {
super(name, reader, parser);
}
public ScoreFunctionSpec(String name, Reader<T> reader, ScoreFunctionParser<T> parser) {
super(name, reader, parser);
}
}
/**
* Specification of search time behavior extension like a custom {@link MovAvgModel} or {@link ScoreFunction}.
*
* @param W the type of the main {@link NamedWriteable} for this spec. All specs have this but it isn't always *for* the same thing
* though, usually it is some sort of builder sent from the coordinating node to the data nodes executing the behavior
* @param P the type of the parser for this spec. The parser runs on the coordinating node, converting {@link XContent} into the
* behavior to execute
*/
public class SearchPluginSpec<W extends NamedWriteable, P> {
private final ParseField name;
private final Writeable.Reader<W> reader;
private final P parser;
/**
* Build the spec with a {@linkplain ParseField}.
*
* @param name the name of the behavior as a {@linkplain ParseField}. The parser is registered under all names specified by the
* {@linkplain ParseField} but the reader is only registered under the {@link ParseField#getPreferredName()} so be sure that
* that is the name that W's {@link NamedWriteable#getWriteableName()} returns.
* @param reader reader that reads the behavior from the internode protocol
* @param parser parser that read the behavior from a REST request
*/
public SearchPluginSpec(ParseField name, Writeable.Reader<W> reader, P parser) {
this.name = name;
this.reader = reader;
this.parser = parser;
}
/**
* Build the spec with a String.
*
* @param name the name of the behavior. The parser and the reader are are registered under this name so be sure that that is the
* name that W's {@link NamedWriteable#getWriteableName()} returns.
* @param reader reader that reads the behavior from the internode protocol
* @param parser parser that read the behavior from a REST request
*/
public SearchPluginSpec(String name, Writeable.Reader<W> reader, P parser) {
this(new ParseField(name), reader, parser);
}
/**
* The name of the thing being specified as a {@link ParseField}. This allows it to have deprecated names.
*/
public ParseField getName() {
return name;
}
/**
* The reader responsible for reading the behavior from the internode protocol.
*/
public Writeable.Reader<W> getReader() {
return reader;
}
/**
* The parser responsible for converting {@link XContent} into the behavior.
*/
public P getParser() {
return parser;
}
}
/**
* Context available during fetch phase construction.
*/
public class FetchPhaseConstructionContext {
private final Map<String, Highlighter> highlighters;
public FetchPhaseConstructionContext(Map<String, Highlighter> highlighters) {
this.highlighters = highlighters;
}
public Map<String, Highlighter> getHighlighters() {
return highlighters;
}
}
}

View File

@ -1,63 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.highlight.FastVectorHighlighter;
import org.elasticsearch.search.highlight.Highlighter;
import org.elasticsearch.search.highlight.PlainHighlighter;
import org.elasticsearch.search.highlight.PostingsHighlighter;
import java.util.HashMap;
import java.util.Map;
/**
* An extensions point and registry for all the highlighters a node supports.
*/
public final class Highlighters {
private final Map<String, Highlighter> parsers = new HashMap<>();
public Highlighters(Settings settings) {
registerHighlighter("fvh", new FastVectorHighlighter(settings));
registerHighlighter("plain", new PlainHighlighter());
registerHighlighter("postings", new PostingsHighlighter());
}
/**
* Returns the highlighter for the given key or <code>null</code> if there is no highlighter registered for that key.
*/
public Highlighter get(String key) {
return parsers.get(key);
}
/**
* Registers a highlighter for the given key
* @param key the key the highlighter should be referenced by in the search request
* @param highlighter the highlighter instance
*/
void registerHighlighter(String key, Highlighter highlighter) {
if (highlighter == null) {
throw new IllegalArgumentException("Can't register null highlighter for key: [" + key + "]");
}
if (parsers.putIfAbsent(key, highlighter) != null) {
throw new IllegalArgumentException("Can't register the same [highlighter] more than once for [" + key + "]");
}
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search;
import org.apache.lucene.search.BooleanQuery;
import org.elasticsearch.common.NamedRegistry;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.geo.builders.ShapeBuilders;
@ -91,6 +92,10 @@ import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder;
import org.elasticsearch.index.query.functionscore.WeightBuilder;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.plugins.SearchPlugin.FetchPhaseConstructionContext;
import org.elasticsearch.plugins.SearchPlugin.ScoreFunctionSpec;
import org.elasticsearch.plugins.SearchPlugin.SearchPluginSpec;
import org.elasticsearch.search.action.SearchTransportService;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator;
@ -248,8 +253,11 @@ import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase;
import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.source.FetchSourceSubPhase;
import org.elasticsearch.search.fetch.version.VersionFetchSubPhase;
import org.elasticsearch.search.highlight.FastVectorHighlighter;
import org.elasticsearch.search.highlight.HighlightPhase;
import org.elasticsearch.search.highlight.Highlighter;
import org.elasticsearch.search.highlight.PlainHighlighter;
import org.elasticsearch.search.highlight.PostingsHighlighter;
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.sort.FieldSortBuilder;
@ -259,21 +267,33 @@ import org.elasticsearch.search.sort.ScriptSortBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.phrase.Laplace;
import org.elasticsearch.search.suggest.phrase.LinearInterpolation;
import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
import org.elasticsearch.search.suggest.phrase.StupidBackoff;
import org.elasticsearch.search.suggest.term.TermSuggester;
import java.util.HashSet;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeMap;
import java.util.function.Consumer;
import java.util.function.Function;
import static java.util.Collections.unmodifiableMap;
import static java.util.Objects.requireNonNull;
/**
*
* Sets up things that can be done at search time like queries, aggregations, and suggesters.
*/
public class SearchModule extends AbstractModule {
private final boolean transportClient;
private final Highlighters highlighters;
private final Suggesters suggesters;
private final Map<String, Highlighter> highlighters;
private final Map<String, Suggester<?>> suggesters;
private final ParseFieldRegistry<ScoreFunctionParser<?>> scoreFunctionParserRegistry = new ParseFieldRegistry<>("score_function");
private final IndicesQueriesRegistry queryParserRegistry = new IndicesQueriesRegistry();
private final ParseFieldRegistry<Aggregator.Parser> aggregationParserRegistry = new ParseFieldRegistry<>("aggregation");
@ -285,7 +305,7 @@ public class SearchModule extends AbstractModule {
private final ParseFieldRegistry<MovAvgModel.AbstractModelParser> movingAverageModelParserRegistry = new ParseFieldRegistry<>(
"moving_avg_model");
private final Set<FetchSubPhase> fetchSubPhases = new HashSet<>();
private final List<FetchSubPhase> fetchSubPhases = new ArrayList<>();
private final Settings settings;
private final NamedWriteableRegistry namedWriteableRegistry;
@ -295,53 +315,22 @@ public class SearchModule extends AbstractModule {
// pkg private so tests can mock
Class<? extends SearchService> searchServiceImpl = SearchService.class;
public SearchModule(Settings settings, NamedWriteableRegistry namedWriteableRegistry, boolean transportClient) {
public SearchModule(Settings settings, NamedWriteableRegistry namedWriteableRegistry, boolean transportClient,
List<SearchPlugin> plugins) {
this.settings = settings;
this.namedWriteableRegistry = namedWriteableRegistry;
this.transportClient = transportClient;
suggesters = new Suggesters(namedWriteableRegistry);
highlighters = new Highlighters(settings);
registerBuiltinScoreFunctionParsers();
suggesters = setupSuggesters(plugins);
highlighters = setupHighlighters(settings, plugins);
registerScoreFunctions(plugins);
registerBuiltinQueryParsers();
registerBuiltinRescorers();
registerBuiltinSorts();
registerBuiltinValueFormats();
registerBuiltinSignificanceHeuristics();
registerBuiltinMovingAverageModels();
registerBuiltinSubFetchPhases();
registerRescorers();
registerSorts();
registerValueFormats();
registerSignificanceHeuristics(plugins);
registerMovingAverageModels(plugins);
registerBuiltinAggregations();
}
public void registerHighlighter(String key, Highlighter highligher) {
highlighters.registerHighlighter(key, highligher);
}
public void registerSuggester(String key, Suggester<?> suggester) {
suggesters.register(key, suggester);
}
/**
* Register a new ScoreFunctionBuilder. Registration does two things:
* <ul>
* <li>Register the {@link ScoreFunctionParser} which parses XContent into a {@link ScoreFunctionBuilder} using its {@link ParseField}
* </li>
* <li>Register the {@link Writeable.Reader} which reads a stream representation of the builder under the
* {@linkplain ParseField#getPreferredName()}.</li>
* </ul>
*/
public <T extends ScoreFunctionBuilder<T>> void registerScoreFunction(Writeable.Reader<T> reader, ScoreFunctionParser<T> parser,
ParseField functionName) {
scoreFunctionParserRegistry.register(parser, functionName);
namedWriteableRegistry.register(ScoreFunctionBuilder.class, functionName.getPreferredName(), reader);
}
/**
* Register a new ValueFormat.
*/
// private for now, we can consider making it public if there are actual use cases for plugins
// to register custom value formats
private void registerValueFormat(String name, Writeable.Reader<? extends DocValueFormat> reader) {
namedWriteableRegistry.register(DocValueFormat.class, name, reader);
registerFetchSubPhases(plugins);
}
/**
@ -360,39 +349,21 @@ public class SearchModule extends AbstractModule {
namedWriteableRegistry.register(QueryBuilder.class, queryName.getPreferredName(), reader);
}
public Suggesters getSuggesters() {
return new Suggesters(suggesters);
}
public IndicesQueriesRegistry getQueryParserRegistry() {
return queryParserRegistry;
}
/**
* Registers a {@link FetchSubPhase} instance. This sub phase is executed when docuemnts are fetched for instanced to highlight
* documents.
*/
public void registerFetchSubPhase(FetchSubPhase subPhase) {
fetchSubPhases.add(Objects.requireNonNull(subPhase, "FetchSubPhase must not be null"));
}
/**
* Returns the {@link Highlighter} registry
*/
public Highlighters getHighlighters() {
public Map<String, Highlighter> getHighlighters() {
return highlighters;
}
/**
* Register a {@link SignificanceHeuristic}.
*
* @param heuristicName the name(s) at which the heuristic is parsed and streamed. The {@link ParseField#getPreferredName()} is the name
* under which it is streamed. All names work for the parser.
* @param reader reads the heuristic from a stream
* @param parser reads the heuristic from an XContentParser
*/
public void registerSignificanceHeuristic(ParseField heuristicName, Writeable.Reader<SignificanceHeuristic> reader,
SignificanceHeuristicParser parser) {
significanceHeuristicParserRegistry.register(parser, heuristicName);
namedWriteableRegistry.register(SignificanceHeuristic.class, heuristicName.getPreferredName(), reader);
}
/**
* The registry of {@link SignificanceHeuristic}s.
*/
@ -400,20 +371,6 @@ public class SearchModule extends AbstractModule {
return significanceHeuristicParserRegistry;
}
/**
* Register a {@link MovAvgModel}.
*
* @param modelName the name(s) at which the model is parsed and streamed. The {@link ParseField#getPreferredName()} is the name under
* which it is streamed. All named work for the parser.
* @param reader reads the model from a stream
* @param parser reads the model from an XContentParser
*/
public void registerMovingAverageModel(ParseField modelName, Writeable.Reader<MovAvgModel> reader,
MovAvgModel.AbstractModelParser parser) {
movingAverageModelParserRegistry.register(parser, modelName);
namedWriteableRegistry.register(MovAvgModel.class, modelName.getPreferredName(), reader);
}
/**
* The registry of {@link MovAvgModel}s.
*/
@ -517,7 +474,7 @@ public class SearchModule extends AbstractModule {
* NamedWriteableRegistry.
*/
bind(IndicesQueriesRegistry.class).toInstance(queryParserRegistry);
bind(Suggesters.class).toInstance(suggesters);
bind(Suggesters.class).toInstance(getSuggesters());
configureSearch();
configureShapes();
bind(AggregatorParsers.class).toInstance(aggregatorParsers);
@ -647,37 +604,88 @@ public class SearchModule extends AbstractModule {
}
}
private void registerBuiltinRescorers() {
private void registerRescorers() {
namedWriteableRegistry.register(RescoreBuilder.class, QueryRescorerBuilder.NAME, QueryRescorerBuilder::new);
}
private void registerBuiltinSorts() {
private void registerSorts() {
namedWriteableRegistry.register(SortBuilder.class, GeoDistanceSortBuilder.NAME, GeoDistanceSortBuilder::new);
namedWriteableRegistry.register(SortBuilder.class, ScoreSortBuilder.NAME, ScoreSortBuilder::new);
namedWriteableRegistry.register(SortBuilder.class, ScriptSortBuilder.NAME, ScriptSortBuilder::new);
namedWriteableRegistry.register(SortBuilder.class, FieldSortBuilder.NAME, FieldSortBuilder::new);
}
private void registerBuiltinScoreFunctionParsers() {
registerScoreFunction(ScriptScoreFunctionBuilder::new, ScriptScoreFunctionBuilder::fromXContent,
ScriptScoreFunctionBuilder.FUNCTION_NAME_FIELD);
registerScoreFunction(GaussDecayFunctionBuilder::new, GaussDecayFunctionBuilder.PARSER,
GaussDecayFunctionBuilder.FUNCTION_NAME_FIELD);
registerScoreFunction(LinearDecayFunctionBuilder::new, LinearDecayFunctionBuilder.PARSER,
LinearDecayFunctionBuilder.FUNCTION_NAME_FIELD);
registerScoreFunction(ExponentialDecayFunctionBuilder::new, ExponentialDecayFunctionBuilder.PARSER,
ExponentialDecayFunctionBuilder.FUNCTION_NAME_FIELD);
registerScoreFunction(RandomScoreFunctionBuilder::new, RandomScoreFunctionBuilder::fromXContent,
RandomScoreFunctionBuilder.FUNCTION_NAME_FIELD);
registerScoreFunction(FieldValueFactorFunctionBuilder::new, FieldValueFactorFunctionBuilder::fromXContent,
FieldValueFactorFunctionBuilder.FUNCTION_NAME_FIELD);
private <T> void registerFromPlugin(List<SearchPlugin> plugins, Function<SearchPlugin, List<T>> producer, Consumer<T> consumer) {
for (SearchPlugin plugin : plugins) {
for (T t : producer.apply(plugin)) {
consumer.accept(t);
}
}
}
public static void registerSmoothingModels(NamedWriteableRegistry namedWriteableRegistry) {
namedWriteableRegistry.register(SmoothingModel.class, Laplace.NAME, Laplace::new);
namedWriteableRegistry.register(SmoothingModel.class, LinearInterpolation.NAME, LinearInterpolation::new);
namedWriteableRegistry.register(SmoothingModel.class, StupidBackoff.NAME, StupidBackoff::new);
}
private Map<String, Suggester<?>> setupSuggesters(List<SearchPlugin> plugins) {
registerSmoothingModels(namedWriteableRegistry);
// Suggester<?> is weird - it is both a Parser and a reader....
NamedRegistry<Suggester<?>> suggesters = new NamedRegistry<Suggester<?>>("suggester") {
@Override
public void register(String name, Suggester<?> t) {
super.register(name, t);
namedWriteableRegistry.register(SuggestionBuilder.class, name, t);
}
};
suggesters.register("phrase", PhraseSuggester.INSTANCE);
suggesters.register("term", TermSuggester.INSTANCE);
suggesters.register("completion", CompletionSuggester.INSTANCE);
suggesters.extractAndRegister(plugins, SearchPlugin::getSuggesters);
return unmodifiableMap(suggesters.getRegistry());
}
private Map<String, Highlighter> setupHighlighters(Settings settings, List<SearchPlugin> plugins) {
NamedRegistry<Highlighter> highlighters = new NamedRegistry<>("highlighter");
highlighters.register("fvh", new FastVectorHighlighter(settings));
highlighters.register("plain", new PlainHighlighter());
highlighters.register("postings", new PostingsHighlighter());
highlighters.extractAndRegister(plugins, SearchPlugin::getHighlighters);
return unmodifiableMap(highlighters.getRegistry());
}
private void registerScoreFunctions(List<SearchPlugin> plugins) {
registerScoreFunction(new ScoreFunctionSpec<>(ScriptScoreFunctionBuilder.NAME, ScriptScoreFunctionBuilder::new,
ScriptScoreFunctionBuilder::fromXContent));
registerScoreFunction(
new ScoreFunctionSpec<>(GaussDecayFunctionBuilder.NAME, GaussDecayFunctionBuilder::new, GaussDecayFunctionBuilder.PARSER));
registerScoreFunction(new ScoreFunctionSpec<>(LinearDecayFunctionBuilder.NAME, LinearDecayFunctionBuilder::new,
LinearDecayFunctionBuilder.PARSER));
registerScoreFunction(new ScoreFunctionSpec<>(ExponentialDecayFunctionBuilder.NAME, ExponentialDecayFunctionBuilder::new,
ExponentialDecayFunctionBuilder.PARSER));
registerScoreFunction(new ScoreFunctionSpec<>(RandomScoreFunctionBuilder.NAME, RandomScoreFunctionBuilder::new,
RandomScoreFunctionBuilder::fromXContent));
registerScoreFunction(new ScoreFunctionSpec<>(FieldValueFactorFunctionBuilder.NAME, FieldValueFactorFunctionBuilder::new,
FieldValueFactorFunctionBuilder::fromXContent));
//weight doesn't have its own parser, so every function supports it out of the box.
//Can be a single function too when not associated to any other function, which is why it needs to be registered manually here.
namedWriteableRegistry.register(ScoreFunctionBuilder.class, WeightBuilder.NAME, WeightBuilder::new);
registerFromPlugin(plugins, SearchPlugin::getScoreFunctions, this::registerScoreFunction);
}
private void registerBuiltinValueFormats() {
private void registerScoreFunction(ScoreFunctionSpec<?> scoreFunction) {
scoreFunctionParserRegistry.register(scoreFunction.getParser(), scoreFunction.getName());
namedWriteableRegistry.register(ScoreFunctionBuilder.class, scoreFunction.getName().getPreferredName(), scoreFunction.getReader());
}
private void registerValueFormats() {
registerValueFormat(DocValueFormat.BOOLEAN.getWriteableName(), in -> DocValueFormat.BOOLEAN);
registerValueFormat(DocValueFormat.DateTime.NAME, DocValueFormat.DateTime::new);
registerValueFormat(DocValueFormat.Decimal.NAME, DocValueFormat.Decimal::new);
@ -686,24 +694,45 @@ public class SearchModule extends AbstractModule {
registerValueFormat(DocValueFormat.RAW.getWriteableName(), in -> DocValueFormat.RAW);
}
private void registerBuiltinSignificanceHeuristics() {
registerSignificanceHeuristic(ChiSquare.NAMES_FIELD, ChiSquare::new, ChiSquare.PARSER);
registerSignificanceHeuristic(GND.NAMES_FIELD, GND::new, GND.PARSER);
registerSignificanceHeuristic(JLHScore.NAMES_FIELD, JLHScore::new, JLHScore::parse);
registerSignificanceHeuristic(MutualInformation.NAMES_FIELD, MutualInformation::new, MutualInformation.PARSER);
registerSignificanceHeuristic(PercentageScore.NAMES_FIELD, PercentageScore::new, PercentageScore::parse);
registerSignificanceHeuristic(ScriptHeuristic.NAMES_FIELD, ScriptHeuristic::new, ScriptHeuristic::parse);
/**
* Register a new ValueFormat.
*/
private void registerValueFormat(String name, Writeable.Reader<? extends DocValueFormat> reader) {
namedWriteableRegistry.register(DocValueFormat.class, name, reader);
}
private void registerBuiltinMovingAverageModels() {
registerMovingAverageModel(SimpleModel.NAME_FIELD, SimpleModel::new, SimpleModel.PARSER);
registerMovingAverageModel(LinearModel.NAME_FIELD, LinearModel::new, LinearModel.PARSER);
registerMovingAverageModel(EwmaModel.NAME_FIELD, EwmaModel::new, EwmaModel.PARSER);
registerMovingAverageModel(HoltLinearModel.NAME_FIELD, HoltLinearModel::new, HoltLinearModel.PARSER);
registerMovingAverageModel(HoltWintersModel.NAME_FIELD, HoltWintersModel::new, HoltWintersModel.PARSER);
private void registerSignificanceHeuristics(List<SearchPlugin> plugins) {
registerSignificanceHeuristic(new SearchPluginSpec<>(ChiSquare.NAME, ChiSquare::new, ChiSquare.PARSER));
registerSignificanceHeuristic(new SearchPluginSpec<>(GND.NAME, GND::new, GND.PARSER));
registerSignificanceHeuristic(new SearchPluginSpec<>(JLHScore.NAME, JLHScore::new, JLHScore::parse));
registerSignificanceHeuristic(new SearchPluginSpec<>(MutualInformation.NAME, MutualInformation::new, MutualInformation.PARSER));
registerSignificanceHeuristic(new SearchPluginSpec<>(PercentageScore.NAME, PercentageScore::new, PercentageScore::parse));
registerSignificanceHeuristic(new SearchPluginSpec<>(ScriptHeuristic.NAME, ScriptHeuristic::new, ScriptHeuristic::parse));
registerFromPlugin(plugins, SearchPlugin::getSignificanceHeuristics, this::registerSignificanceHeuristic);
}
private void registerBuiltinSubFetchPhases() {
private void registerSignificanceHeuristic(SearchPluginSpec<SignificanceHeuristic, SignificanceHeuristicParser> heuristic) {
significanceHeuristicParserRegistry.register(heuristic.getParser(), heuristic.getName());
namedWriteableRegistry.register(SignificanceHeuristic.class, heuristic.getName().getPreferredName(), heuristic.getReader());
}
private void registerMovingAverageModels(List<SearchPlugin> plugins) {
registerMovingAverageModel(new SearchPluginSpec<>(SimpleModel.NAME, SimpleModel::new, SimpleModel.PARSER));
registerMovingAverageModel(new SearchPluginSpec<>(LinearModel.NAME, LinearModel::new, LinearModel.PARSER));
registerMovingAverageModel(new SearchPluginSpec<>(EwmaModel.NAME, EwmaModel::new, EwmaModel.PARSER));
registerMovingAverageModel(new SearchPluginSpec<>(HoltLinearModel.NAME, HoltLinearModel::new, HoltLinearModel.PARSER));
registerMovingAverageModel(new SearchPluginSpec<>(HoltWintersModel.NAME, HoltWintersModel::new, HoltWintersModel.PARSER));
registerFromPlugin(plugins, SearchPlugin::getMovingAverageModels, this::registerMovingAverageModel);
}
private void registerMovingAverageModel(SearchPluginSpec<MovAvgModel, MovAvgModel.AbstractModelParser> movAvgModel) {
movingAverageModelParserRegistry.register(movAvgModel.getParser(), movAvgModel.getName());
namedWriteableRegistry.register(MovAvgModel.class, movAvgModel.getName().getPreferredName(), movAvgModel.getReader());
}
private void registerFetchSubPhases(List<SearchPlugin> plugins) {
registerFetchSubPhase(new ExplainFetchSubPhase());
registerFetchSubPhase(new FieldDataFieldsFetchSubPhase());
registerFetchSubPhase(new ScriptFieldsFetchSubPhase());
@ -712,6 +741,17 @@ public class SearchModule extends AbstractModule {
registerFetchSubPhase(new MatchedQueriesFetchSubPhase());
registerFetchSubPhase(new HighlightPhase(settings, highlighters));
registerFetchSubPhase(new ParentFieldSubFetchPhase());
FetchPhaseConstructionContext context = new FetchPhaseConstructionContext(highlighters);
registerFromPlugin(plugins, p -> p.getFetchSubPhases(context), this::registerFetchSubPhase);
}
private void registerFetchSubPhase(FetchSubPhase subPhase) {
Class<?> subPhaseClass = subPhase.getClass();
if (fetchSubPhases.stream().anyMatch(p -> p.getClass().equals(subPhaseClass))) {
throw new IllegalArgumentException("FetchSubPhase [" + subPhaseClass + "] already registered");
}
fetchSubPhases.add(requireNonNull(subPhase, "FetchSubPhase must not be null"));
}
private void registerBuiltinQueryParsers() {
@ -806,8 +846,4 @@ public class SearchModule extends AbstractModule {
BucketSelectorPipelineAggregator.registerStreams();
SerialDiffPipelineAggregator.registerStreams();
}
public Suggesters getSuggesters() {
return suggesters;
}
}

View File

@ -21,14 +21,13 @@
package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
public class ChiSquare extends NXYSignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("chi_square");
public static final String NAME = "chi_square";
public ChiSquare(boolean includeNegatives, boolean backgroundIsSuperset) {
super(includeNegatives, backgroundIsSuperset);
@ -51,7 +50,7 @@ public class ChiSquare extends NXYSignificanceHeuristic {
@Override
public int hashCode() {
int result = NAMES_FIELD.getPreferredName().hashCode();
int result = NAME.hashCode();
result = 31 * result + super.hashCode();
return result;
}
@ -74,12 +73,12 @@ public class ChiSquare extends NXYSignificanceHeuristic {
@Override
public String getWriteableName() {
return NAMES_FIELD.getPreferredName();
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName());
builder.startObject(NAME);
super.build(builder);
builder.endObject();
return builder;
@ -100,7 +99,7 @@ public class ChiSquare extends NXYSignificanceHeuristic {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName());
builder.startObject(NAME);
super.build(builder);
builder.endObject();
return builder;

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -33,7 +32,7 @@ import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
public class GND extends NXYSignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("gnd");
public static final String NAME = "gnd";
public GND(boolean backgroundIsSuperset) {
super(true, backgroundIsSuperset);
@ -61,7 +60,7 @@ public class GND extends NXYSignificanceHeuristic {
@Override
public int hashCode() {
int result = NAMES_FIELD.getPreferredName().hashCode();
int result = NAME.hashCode();
result = 31 * result + super.hashCode();
return result;
}
@ -96,12 +95,12 @@ public class GND extends NXYSignificanceHeuristic {
@Override
public String getWriteableName() {
return NAMES_FIELD.getPreferredName();
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName());
builder.startObject(NAME);
builder.field(BACKGROUND_IS_SUPERSET.getPreferredName(), backgroundIsSuperset);
builder.endObject();
return builder;
@ -140,7 +139,7 @@ public class GND extends NXYSignificanceHeuristic {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName());
builder.startObject(NAME);
builder.field(BACKGROUND_IS_SUPERSET.getPreferredName(), backgroundIsSuperset);
builder.endObject();
return builder;

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -33,7 +32,7 @@ import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
public class JLHScore extends SignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("jlh");
public static final String NAME = "jlh";
public JLHScore() {
}
@ -51,7 +50,7 @@ public class JLHScore extends SignificanceHeuristic {
@Override
public String getWriteableName() {
return NAMES_FIELD.getPreferredName();
return NAME;
}
/**
@ -101,7 +100,7 @@ public class JLHScore extends SignificanceHeuristic {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()).endObject();
builder.startObject(NAME).endObject();
return builder;
}
@ -133,7 +132,7 @@ public class JLHScore extends SignificanceHeuristic {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()).endObject();
builder.startObject(NAME).endObject();
return builder;
}
}

View File

@ -21,14 +21,13 @@
package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
public class MutualInformation extends NXYSignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("mutual_information");
public static final String NAME = "mutual_information";
private static final double log2 = Math.log(2.0);
@ -54,7 +53,7 @@ public class MutualInformation extends NXYSignificanceHeuristic {
@Override
public int hashCode() {
int result = NAMES_FIELD.getPreferredName().hashCode();
int result = NAME.hashCode();
result = 31 * result + super.hashCode();
return result;
}
@ -108,18 +107,18 @@ public class MutualInformation extends NXYSignificanceHeuristic {
@Override
public String getWriteableName() {
return NAMES_FIELD.getPreferredName();
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName());
builder.startObject(NAME);
super.build(builder);
builder.endObject();
return builder;
}
public static SignificanceHeuristicParser PARSER = new NXYParser() {
public static final SignificanceHeuristicParser PARSER = new NXYParser() {
@Override
protected SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) {
return new MutualInformation(includeNegatives, backgroundIsSuperset);
@ -134,7 +133,7 @@ public class MutualInformation extends NXYSignificanceHeuristic {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName());
builder.startObject(NAME);
super.build(builder);
builder.endObject();
return builder;

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -33,7 +32,7 @@ import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
public class PercentageScore extends SignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("percentage");
public static final String NAME = "percentage";
public PercentageScore() {
}
@ -48,12 +47,12 @@ public class PercentageScore extends SignificanceHeuristic {
@Override
public String getWriteableName() {
return NAMES_FIELD.getPreferredName();
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()).endObject();
builder.startObject(NAME).endObject();
return builder;
}
@ -97,7 +96,7 @@ public class PercentageScore extends SignificanceHeuristic {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()).endObject();
builder.startObject(NAME).endObject();
return builder;
}
}

View File

@ -22,8 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -48,7 +46,7 @@ import java.util.Map;
import java.util.Objects;
public class ScriptHeuristic extends SignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("script_heuristic");
public static final String NAME = "script_heuristic";
private final LongAccessor subsetSizeHolder;
private final LongAccessor supersetSizeHolder;
@ -123,12 +121,12 @@ public class ScriptHeuristic extends SignificanceHeuristic {
@Override
public String getWriteableName() {
return NAMES_FIELD.getPreferredName();
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName());
builder.startObject(NAME);
builder.field(ScriptField.SCRIPT.getPreferredName());
script.toXContent(builder, builderParams);
builder.endObject();
@ -205,7 +203,7 @@ public class ScriptHeuristic extends SignificanceHeuristic {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName());
builder.startObject(NAME);
builder.field(ScriptField.SCRIPT.getPreferredName());
script.toXContent(builder, builderParams);
builder.endObject();

View File

@ -19,12 +19,15 @@
package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
import org.elasticsearch.search.internal.SearchContext;
/**
* Heuristic for that {@link SignificantTerms} uses to pick out significant terms.
*/
public abstract class SignificanceHeuristic implements NamedWriteable, ToXContent {
/**
* @param subsetFreq The frequency of the term in the selected sample

View File

@ -20,7 +20,6 @@
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -39,7 +38,6 @@ import java.util.Objects;
*/
public class EwmaModel extends MovAvgModel {
public static final String NAME = "ewma";
public static final ParseField NAME_FIELD = new ParseField(NAME);
public static final double DEFAULT_ALPHA = 0.3;
@ -120,7 +118,7 @@ public class EwmaModel extends MovAvgModel {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME);
builder.startObject(MovAvgPipelineAggregationBuilder.SETTINGS.getPreferredName());
builder.field("alpha", alpha);
builder.endObject();
@ -174,7 +172,7 @@ public class EwmaModel extends MovAvgModel {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME);
builder.startObject(MovAvgPipelineAggregationBuilder.SETTINGS.getPreferredName());
builder.field("alpha", alpha);

View File

@ -20,7 +20,6 @@
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -38,7 +37,6 @@ import java.util.Objects;
*/
public class HoltLinearModel extends MovAvgModel {
public static final String NAME = "holt";
public static final ParseField NAME_FIELD = new ParseField(NAME);
public static final double DEFAULT_ALPHA = 0.3;
public static final double DEFAULT_BETA = 0.1;
@ -183,7 +181,7 @@ public class HoltLinearModel extends MovAvgModel {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME);
builder.startObject(MovAvgPipelineAggregationBuilder.SETTINGS.getPreferredName());
builder.field("alpha", alpha);
builder.field("beta", beta);
@ -255,7 +253,7 @@ public class HoltLinearModel extends MovAvgModel {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME);
builder.startObject(MovAvgPipelineAggregationBuilder.SETTINGS.getPreferredName());
builder.field("alpha", alpha);
builder.field("beta", beta);

View File

@ -44,7 +44,6 @@ import java.util.Objects;
*/
public class HoltWintersModel extends MovAvgModel {
public static final String NAME = "holt_winters";
public static final ParseField NAME_FIELD = new ParseField(NAME);
public static final double DEFAULT_ALPHA = 0.3;
public static final double DEFAULT_BETA = 0.1;
@ -366,7 +365,7 @@ public class HoltWintersModel extends MovAvgModel {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME);
builder.startObject(MovAvgPipelineAggregationBuilder.SETTINGS.getPreferredName());
builder.field("alpha", alpha);
builder.field("beta", beta);
@ -495,7 +494,7 @@ public class HoltWintersModel extends MovAvgModel {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME);
builder.startObject(MovAvgPipelineAggregationBuilder.SETTINGS.getPreferredName());
builder.field("alpha", alpha);
builder.field("beta", beta);

View File

@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -40,7 +39,6 @@ import java.util.Map;
*/
public class LinearModel extends MovAvgModel {
public static final String NAME = "linear";
public static final ParseField NAME_FIELD = new ParseField("linear");
public LinearModel() {
}
@ -102,7 +100,7 @@ public class LinearModel extends MovAvgModel {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME);
return builder;
}
@ -118,7 +116,7 @@ public class LinearModel extends MovAvgModel {
public static class LinearModelBuilder implements MovAvgModelBuilder {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME);
return builder;
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -38,7 +37,6 @@ import java.util.Map;
*/
public class SimpleModel extends MovAvgModel {
public static final String NAME = "simple";
public static final ParseField NAME_FIELD = new ParseField(NAME);
public SimpleModel() {
}
@ -111,7 +109,7 @@ public class SimpleModel extends MovAvgModel {
public static class SimpleModelBuilder implements MovAvgModelBuilder {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME_FIELD.getPreferredName());
builder.field(MovAvgPipelineAggregationBuilder.MODEL.getPreferredName(), NAME);
return builder;
}

View File

@ -72,7 +72,7 @@ public class FetchPhase implements SearchPhase {
private final FetchSubPhase[] fetchSubPhases;
public FetchPhase(Set<FetchSubPhase> fetchSubPhases) {
public FetchPhase(List<FetchSubPhase> fetchSubPhases) {
this.fetchSubPhases = fetchSubPhases.toArray(new FetchSubPhase[fetchSubPhases.size() + 1]);
this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsFetchSubPhase(this);
}

View File

@ -22,7 +22,6 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext;
@ -32,7 +31,7 @@ import java.util.HashMap;
import java.util.Map;
/**
*
* Sub phase within the fetch phase used to fetch things *about* the documents highlghting.
*/
public interface FetchSubPhase {

View File

@ -29,7 +29,6 @@ import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.search.Highlighters;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
@ -43,9 +42,9 @@ import java.util.Map;
public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
private static final List<String> STANDARD_HIGHLIGHTERS_BY_PRECEDENCE = Arrays.asList("fvh", "postings", "plain");
private final Highlighters highlighters;
private final Map<String, Highlighter> highlighters;
public HighlightPhase(Settings settings, Highlighters highlighters) {
public HighlightPhase(Settings settings, Map<String, Highlighter> highlighters) {
super(settings);
this.highlighters = highlighters;
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.highlight;
import org.elasticsearch.index.mapper.FieldMapper;
/**
*
* Highlights a search result.
*/
public interface Highlighter {

View File

@ -192,19 +192,23 @@ public class DefaultSearchContext extends SearchContext {
if (hasOnlySuggest() ) {
return;
}
if (scrollContext == null) {
long from = from() == -1 ? 0 : from();
long size = size() == -1 ? 10 : size();
long resultWindow = from + size;
int maxResultWindow = indexService.getIndexSettings().getMaxResultWindow();
if (resultWindow > maxResultWindow) {
if (scrollContext == null) {
throw new QueryPhaseExecutionException(this,
"Result window is too large, from + size must be less than or equal to: [" + maxResultWindow + "] but was ["
+ resultWindow + "]. See the scroll api for a more efficient way to request large data sets. "
+ "This limit can be set by changing the [" + IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey()
+ "] index level setting.");
}
throw new QueryPhaseExecutionException(this,
"Batch size is too large, size must be less than or equal to: [" + maxResultWindow + "] but was [" + resultWindow
+ "]. Scroll batch sizes cost as much memory as result windows so they are controlled by the ["
+ IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey() + "] index level setting.");
}
if (rescore != null) {
int maxWindow = indexService.getIndexSettings().getMaxRescoreWindow();

View File

@ -18,43 +18,16 @@
*/
package org.elasticsearch.search.suggest;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.phrase.Laplace;
import org.elasticsearch.search.suggest.phrase.LinearInterpolation;
import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
import org.elasticsearch.search.suggest.phrase.StupidBackoff;
import org.elasticsearch.search.suggest.term.TermSuggester;
import java.util.HashMap;
import java.util.Map;
/**
*
* Registry of Suggesters. This is only its own class to make Guice happy.
*/
public final class Suggesters {
private final Map<String, Suggester<?>> suggesters = new HashMap<>();
private final NamedWriteableRegistry namedWriteableRegistry;
private final Map<String, Suggester<?>> suggesters;
public Suggesters(NamedWriteableRegistry namedWriteableRegistry) {
this.namedWriteableRegistry = namedWriteableRegistry;
register("phrase", PhraseSuggester.INSTANCE);
register("term", TermSuggester.INSTANCE);
register("completion", CompletionSuggester.INSTANCE);
// Builtin smoothing models
namedWriteableRegistry.register(SmoothingModel.class, Laplace.NAME, Laplace::new);
namedWriteableRegistry.register(SmoothingModel.class, LinearInterpolation.NAME, LinearInterpolation::new);
namedWriteableRegistry.register(SmoothingModel.class, StupidBackoff.NAME, StupidBackoff::new);
}
public void register(String key, Suggester<?> suggester) {
if (suggesters.containsKey(key)) {
throw new IllegalArgumentException("Can't register the same [suggester] more than once for [" + key + "]");
}
suggesters.put(key, suggester);
namedWriteableRegistry.register(SuggestionBuilder.class, key, suggester);
public Suggesters(Map<String, Suggester<?>> suggesters) {
this.suggesters = suggesters;
}
public Suggester<?> getSuggester(String suggesterName) {

View File

@ -579,7 +579,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
public static class TestPlugin extends Plugin {
@Override
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
public Collection<Class<? extends LifecycleComponent>> getGuiceServiceClasses() {
List<Class<? extends LifecycleComponent>> services = new ArrayList<>(1);
services.add(MasterAwareService.class);
return services;

View File

@ -60,7 +60,7 @@ public class SettingsListenerIT extends ESIntegTestCase {
}
@Override
public Collection<Module> nodeModules() {
public Collection<Module> createGuiceModules() {
return Collections.<Module>singletonList(new SettingsListenerModule(service));
}
}

View File

@ -30,6 +30,8 @@ import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import static java.util.Collections.emptyList;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -67,7 +69,7 @@ public class InnerHitBuilderTests extends ESTestCase {
@BeforeClass
public static void init() {
namedWriteableRegistry = new NamedWriteableRegistry();
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false).getQueryParserRegistry();
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList()).getQueryParserRegistry();
}
@AfterClass

View File

@ -25,7 +25,6 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.GeoPoint;
@ -37,11 +36,7 @@ import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.WeightFactorFunction;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
@ -50,21 +45,24 @@ import org.elasticsearch.index.query.RandomQueryBuilder;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.hamcrest.Matcher;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static java.util.Collections.singletonList;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
@ -728,7 +726,6 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
*/
static class RandomScoreFunctionBuilderWithFixedSeed extends RandomScoreFunctionBuilder {
public static final String NAME = "random_with_fixed_seed";
public static final ParseField FUNCTION_NAME_FIELD = new ParseField(NAME);
public RandomScoreFunctionBuilderWithFixedSeed() {
}
@ -760,11 +757,11 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
}
}
public static class TestPlugin extends Plugin {
public void onModule(SearchModule module) {
module.registerScoreFunction(RandomScoreFunctionBuilderWithFixedSeed::new,
RandomScoreFunctionBuilderWithFixedSeed::fromXContent, RandomScoreFunctionBuilderWithFixedSeed.FUNCTION_NAME_FIELD);
}
public static class TestPlugin extends Plugin implements SearchPlugin {
@Override
public List<ScoreFunctionSpec<?>> getScoreFunctions() {
return singletonList(new ScoreFunctionSpec<>(RandomScoreFunctionBuilderWithFixedSeed.NAME,
RandomScoreFunctionBuilderWithFixedSeed::new, RandomScoreFunctionBuilderWithFixedSeed::fromXContent));
}
}
}

View File

@ -27,19 +27,36 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParser;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.functionscore.GaussDecayFunctionBuilder;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel;
import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase;
import org.elasticsearch.search.highlight.CustomHighlighter;
import org.elasticsearch.search.highlight.FastVectorHighlighter;
import org.elasticsearch.search.highlight.Highlighter;
import org.elasticsearch.search.highlight.PlainHighlighter;
import org.elasticsearch.search.highlight.PostingsHighlighter;
import org.elasticsearch.search.suggest.CustomSuggester;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
import org.elasticsearch.search.suggest.term.TermSuggester;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.notNullValue;
@ -47,55 +64,98 @@ import static org.hamcrest.Matchers.notNullValue;
public class SearchModuleTests extends ModuleTestCase {
public void testDoubleRegister() {
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false);
try {
module.registerHighlighter("fvh", new PlainHighlighter());
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "Can't register the same [highlighter] more than once for [fvh]");
SearchPlugin registersDupeHighlighter = new SearchPlugin() {
@Override
public Map<String, Highlighter> getHighlighters() {
return singletonMap("plain", new PlainHighlighter());
}
};
expectThrows(IllegalArgumentException.class,
() -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, singletonList(registersDupeHighlighter)));
try {
module.registerSuggester("term", PhraseSuggester.INSTANCE);
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [term]");
SearchPlugin registersDupeSuggester = new SearchPlugin() {
@Override
public Map<String,org.elasticsearch.search.suggest.Suggester<?>> getSuggesters() {
return singletonMap("term", TermSuggester.INSTANCE);
}
};
expectThrows(IllegalArgumentException.class,
() -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, singletonList(registersDupeSuggester)));
SearchPlugin registersDupeScoreFunction = new SearchPlugin() {
@Override
public List<ScoreFunctionSpec<?>> getScoreFunctions() {
return singletonList(new ScoreFunctionSpec<>(GaussDecayFunctionBuilder.NAME, GaussDecayFunctionBuilder::new,
GaussDecayFunctionBuilder.PARSER));
}
};
expectThrows(IllegalArgumentException.class,
() -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, singletonList(registersDupeScoreFunction)));
public void testRegisterSuggester() {
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false);
module.registerSuggester("custom", CustomSuggester.INSTANCE);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> module.registerSuggester("custom", CustomSuggester.INSTANCE));
assertEquals("Can't register the same [suggester] more than once for [custom]", e.getMessage());
SearchPlugin registersDupeSignificanceHeuristic = new SearchPlugin() {
@Override
public List<SearchPluginSpec<SignificanceHeuristic, SignificanceHeuristicParser>> getSignificanceHeuristics() {
return singletonList(new SearchPluginSpec<>(ChiSquare.NAME, ChiSquare::new, ChiSquare.PARSER));
}
};
expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false,
singletonList(registersDupeSignificanceHeuristic)));
public void testRegisterHighlighter() {
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false);
CustomHighlighter customHighlighter = new CustomHighlighter();
module.registerHighlighter("custom", customHighlighter);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
() -> module.registerHighlighter("custom", new CustomHighlighter()));
assertEquals("Can't register the same [highlighter] more than once for [custom]", exception.getMessage());
exception = expectThrows(IllegalArgumentException.class,
() -> module.registerHighlighter("custom", null));
assertEquals("Can't register null highlighter for key: [custom]", exception.getMessage());
Highlighters highlighters = module.getHighlighters();
assertEquals(highlighters.get("fvh").getClass(), FastVectorHighlighter.class);
assertEquals(highlighters.get("plain").getClass(), PlainHighlighter.class);
assertEquals(highlighters.get("postings").getClass(), PostingsHighlighter.class);
assertSame(highlighters.get("custom"), customHighlighter);
SearchPlugin registersDupeMovAvgModel = new SearchPlugin() {
@Override
public List<SearchPluginSpec<MovAvgModel, MovAvgModel.AbstractModelParser>> getMovingAverageModels() {
return singletonList(new SearchPluginSpec<>(SimpleModel.NAME, SimpleModel::new, SimpleModel.PARSER));
}
};
expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false,
singletonList(registersDupeMovAvgModel)));
public void testRegisterQueryParserDuplicate() {
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false);
SearchPlugin registersDupeFetchSubPhase = new SearchPlugin() {
@Override
public List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
return singletonList(new ExplainFetchSubPhase());
}
};
expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false,
singletonList(registersDupeFetchSubPhase)));
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, emptyList());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> module
.registerQuery(TermQueryBuilder::new, TermQueryBuilder::fromXContent, TermQueryBuilder.QUERY_NAME_FIELD));
assertThat(e.getMessage(), containsString("] already registered for [query][term] while trying to register [org.elasticsearch."));
}
public void testRegisterSuggester() {
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, singletonList(new SearchPlugin() {
@Override
public Map<String, Suggester<?>> getSuggesters() {
return singletonMap("custom", CustomSuggester.INSTANCE);
}
}));
assertSame(TermSuggester.INSTANCE, module.getSuggesters().getSuggester("term"));
assertSame(PhraseSuggester.INSTANCE, module.getSuggesters().getSuggester("phrase"));
assertSame(CompletionSuggester.INSTANCE, module.getSuggesters().getSuggester("completion"));
assertSame(CustomSuggester.INSTANCE, module.getSuggesters().getSuggester("custom"));
}
public void testRegisterHighlighter() {
CustomHighlighter customHighlighter = new CustomHighlighter();
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, singletonList(new SearchPlugin() {
@Override
public Map<String, Highlighter> getHighlighters() {
return singletonMap("custom", customHighlighter);
}
}));
Map<String, Highlighter> highlighters = module.getHighlighters();
assertEquals(FastVectorHighlighter.class, highlighters.get("fvh").getClass());
assertEquals(PlainHighlighter.class, highlighters.get("plain").getClass());
assertEquals(PostingsHighlighter.class, highlighters.get("postings").getClass());
assertSame(highlighters.get("custom"), customHighlighter);
}
public void testRegisteredQueries() throws IOException {
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false);
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, emptyList());
List<String> allSupportedQueries = new ArrayList<>();
Collections.addAll(allSupportedQueries, NON_DEPRECATED_QUERIES);
Collections.addAll(allSupportedQueries, DEPRECATED_QUERIES);

View File

@ -62,6 +62,7 @@ import java.util.Random;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static java.util.Collections.emptyList;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.ClusterServiceUtils.setState;
import static org.hamcrest.Matchers.containsString;
@ -119,7 +120,7 @@ public class AggregatorParsingTests extends ESTestCase {
protected void configure() {
bindMapperExtension();
}
}, new SearchModule(settings, namedWriteableRegistry, false) {
}, new SearchModule(settings, namedWriteableRegistry, false, emptyList()) {
@Override
protected void configureSearch() {
// Skip me

View File

@ -63,6 +63,7 @@ import java.io.IOException;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.ClusterServiceUtils.setState;
import static org.hamcrest.Matchers.equalTo;
@ -151,7 +152,7 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
bindMapperExtension();
}
},
new SearchModule(settings, namedWriteableRegistry, false) {
new SearchModule(settings, namedWriteableRegistry, false, emptyList()) {
@Override
protected void configureSearch() {
// Skip me

View File

@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -31,11 +30,10 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.script.NativeScriptFactory;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptNoParams;
@ -47,6 +45,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.test.ESIntegTestCase;
@ -62,6 +61,7 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import static java.util.Collections.singletonList;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
@ -167,20 +167,22 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
}
}
public static class CustomSignificanceHeuristicPlugin extends Plugin implements ScriptPlugin {
public void onModule(SearchModule searchModule) {
searchModule.registerSignificanceHeuristic(SimpleHeuristic.NAMES_FIELD, SimpleHeuristic::new, SimpleHeuristic::parse);
public static class CustomSignificanceHeuristicPlugin extends Plugin implements ScriptPlugin, SearchPlugin {
@Override
public List<SearchPluginSpec<SignificanceHeuristic, SignificanceHeuristicParser>> getSignificanceHeuristics() {
return singletonList(new SearchPluginSpec<SignificanceHeuristic, SignificanceHeuristicParser>(SimpleHeuristic.NAME,
SimpleHeuristic::new, SimpleHeuristic::parse));
}
@Override
public List<NativeScriptFactory> getNativeScripts() {
return Arrays.asList(new NativeSignificanceScoreScriptNoParams.Factory(), new NativeSignificanceScoreScriptWithParams.Factory());
return Arrays.asList(new NativeSignificanceScoreScriptNoParams.Factory(),
new NativeSignificanceScoreScriptWithParams.Factory());
}
}
public static class SimpleHeuristic extends SignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("simple");
public static final String NAME = "simple";
public SimpleHeuristic() {
}
@ -199,12 +201,12 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
@Override
public String getWriteableName() {
return NAMES_FIELD.getPreferredName();
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()).endObject();
builder.startObject(NAME).endObject();
return builder;
}

View File

@ -62,6 +62,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.Matchers.equalTo;
@ -106,7 +107,7 @@ public class SignificanceHeuristicTests extends ESTestCase {
ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
StreamInput in = new InputStreamStreamInput(inBuffer);
NamedWriteableRegistry registry = new NamedWriteableRegistry();
new SearchModule(Settings.EMPTY, registry, false); // populates the registry through side effects
new SearchModule(Settings.EMPTY, registry, false, emptyList()); // populates the registry through side effects
in = new NamedWriteableAwareStreamInput(in, registry);
in.setVersion(version);
sigTerms[1].readFrom(in);
@ -202,7 +203,7 @@ public class SignificanceHeuristicTests extends ESTestCase {
// 1. The output of the builders can actually be parsed
// 2. The parser does not swallow parameters after a significance heuristic was defined
public void testBuilderAndParser() throws Exception {
SearchModule searchModule = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false);
SearchModule searchModule = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, emptyList());
ParseFieldRegistry<SignificanceHeuristicParser> heuristicParserMapper = searchModule.getSignificanceHeuristicParserRegistry();
SearchContext searchContext = new SignificantTermsTestSearchContext();

View File

@ -87,6 +87,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.ClusterServiceUtils.setState;
import static org.hamcrest.CoreMatchers.containsString;
@ -145,7 +146,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
bindMapperExtension();
}
},
new SearchModule(settings, namedWriteableRegistry, false) {
new SearchModule(settings, namedWriteableRegistry, false, emptyList()) {
@Override
protected void configureSearch() {
// Skip me

View File

@ -32,11 +32,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESIntegTestCase;
@ -47,8 +46,10 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.client.Requests.indexRequest;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
@ -99,9 +100,10 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
equalTo(1));
}
public static class FetchTermVectorsPlugin extends Plugin {
public void onModule(SearchModule searchModule) {
searchModule.registerFetchSubPhase(new TermVectorsFetchSubPhase());
public static class FetchTermVectorsPlugin extends Plugin implements SearchPlugin {
@Override
public List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
return singletonList(new TermVectorsFetchSubPhase());
}
}

View File

@ -23,7 +23,6 @@ import org.apache.lucene.search.Explanation;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
@ -32,8 +31,8 @@ import org.elasticsearch.index.query.functionscore.DecayFunctionBuilder;
import org.elasticsearch.index.query.functionscore.DecayFunctionParser;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@ -41,7 +40,9 @@ import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import static java.util.Collections.singletonList;
import static org.elasticsearch.client.Requests.indexRequest;
import static org.elasticsearch.client.Requests.searchRequest;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
@ -94,16 +95,16 @@ public class FunctionScorePluginIT extends ESIntegTestCase {
}
public static class CustomDistanceScorePlugin extends Plugin {
public void onModule(SearchModule scoreModule) {
scoreModule.registerScoreFunction(CustomDistanceScoreBuilder::new, CustomDistanceScoreBuilder.PARSER,
CustomDistanceScoreBuilder.FUNCTION_NAME_FIELD);
public static class CustomDistanceScorePlugin extends Plugin implements SearchPlugin {
@Override
public List<ScoreFunctionSpec<?>> getScoreFunctions() {
return singletonList(new ScoreFunctionSpec<>(CustomDistanceScoreBuilder.NAME, CustomDistanceScoreBuilder::new,
CustomDistanceScoreBuilder.PARSER));
}
}
public static class CustomDistanceScoreBuilder extends DecayFunctionBuilder<CustomDistanceScoreBuilder> {
public static final String NAME = "linear_mult";
public static final ParseField FUNCTION_NAME_FIELD = new ParseField(NAME);
public static final ScoreFunctionParser<CustomDistanceScoreBuilder> PARSER = new DecayFunctionParser<>(
CustomDistanceScoreBuilder::new);

View File

@ -20,11 +20,15 @@
package org.elasticsearch.search.highlight;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.plugins.SearchPlugin;
public class CustomHighlighterPlugin extends Plugin {
import java.util.Map;
public void onModule(SearchModule highlightModule) {
highlightModule.registerHighlighter("test-custom", new CustomHighlighter());
import static java.util.Collections.singletonMap;
public class CustomHighlighterPlugin extends Plugin implements SearchPlugin {
@Override
public Map<String, Highlighter> getHighlighters() {
return singletonMap("test-custom", new CustomHighlighter());
}
}

View File

@ -35,7 +35,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHigh
import static org.hamcrest.Matchers.equalTo;
/**
*
* Integration test for highlighters registered by a plugin.
*/
@ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1)
public class CustomHighlighterSearchIT extends ESIntegTestCase {

View File

@ -68,6 +68,7 @@ import java.util.TreeSet;
import java.util.function.BiConsumer;
import java.util.function.Function;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
@ -83,7 +84,7 @@ public class HighlightBuilderTests extends ESTestCase {
@BeforeClass
public static void init() {
namedWriteableRegistry = new NamedWriteableRegistry();
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false).getQueryParserRegistry();
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList()).getQueryParserRegistry();
}
@AfterClass

View File

@ -55,6 +55,7 @@ import org.junit.BeforeClass;
import java.io.IOException;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
@ -70,7 +71,7 @@ public class QueryRescoreBuilderTests extends ESTestCase {
@BeforeClass
public static void init() {
namedWriteableRegistry = new NamedWriteableRegistry();
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false).getQueryParserRegistry();
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList()).getQueryParserRegistry();
}
@AfterClass

View File

@ -35,6 +35,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.search.RestClearScrollAction;
@ -68,7 +69,7 @@ import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
/**
*
* Tests for scrolling.
*/
public class SearchScrollIT extends ESIntegTestCase {
public void testSimpleScrollQueryThenFetch() throws Exception {
@ -419,11 +420,20 @@ public class SearchScrollIT extends ESIntegTestCase {
assertThrows(internalCluster().transportClient().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)), RestStatus.NOT_FOUND);
}
/**
* Tests that we use an optimization shrinking the batch to the size of the shard. Thus the Integer.MAX_VALUE window doesn't OOM us.
*/
public void testDeepScrollingDoesNotBlowUp() throws Exception {
client().prepareIndex("index", "type", "1")
.setSource("field", "value")
.setRefreshPolicy(IMMEDIATE)
.execute().get();
/*
* Disable the max result window setting for this test because it'll reject the search's unreasonable batch size. We want
* unreasonable batch sizes to just OOM.
*/
client().admin().indices().prepareUpdateSettings("index")
.setSettings(Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), Integer.MAX_VALUE)).get();
for (SearchType searchType : SearchType.values()) {
SearchRequestBuilder builder = client().prepareSearch("index")

View File

@ -75,6 +75,7 @@ import java.nio.file.Path;
import java.util.Collections;
import java.util.Map;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
@ -106,7 +107,7 @@ public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends EST
};
namedWriteableRegistry = new NamedWriteableRegistry();
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false).getQueryParserRegistry();
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList()).getQueryParserRegistry();
}
@AfterClass

View File

@ -41,6 +41,8 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import static java.util.Collections.emptyList;
public class SortBuilderTests extends ESTestCase {
private static final int NUMBER_OF_RUNS = 20;
@ -52,7 +54,7 @@ public class SortBuilderTests extends ESTestCase {
@BeforeClass
public static void init() {
namedWriteableRegistry = new NamedWriteableRegistry();
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false).getQueryParserRegistry();
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList()).getQueryParserRegistry();
}
@AfterClass

View File

@ -39,6 +39,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
@ -56,7 +58,7 @@ public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBui
@BeforeClass
public static void init() throws IOException {
namedWriteableRegistry = new NamedWriteableRegistry();
SearchModule searchModule = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false);
SearchModule searchModule = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList());
queriesRegistry = searchModule.getQueryParserRegistry();
suggesters = searchModule.getSuggesters();
parseFieldMatcher = ParseFieldMatcher.STRICT;

View File

@ -32,7 +32,7 @@ import java.util.Map;
public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestionsContext> {
public static CustomSuggester INSTANCE = new CustomSuggester();
public static final CustomSuggester INSTANCE = new CustomSuggester();
// This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123
@Override

View File

@ -19,12 +19,15 @@
package org.elasticsearch.search.suggest;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.plugins.SearchPlugin;
public class CustomSuggesterPlugin extends Plugin {
import java.util.Map;
public void onModule(SearchModule searchModule) {
searchModule.registerSuggester("custom", CustomSuggester.INSTANCE);
import static java.util.Collections.singletonMap;
public class CustomSuggesterPlugin extends Plugin implements SearchPlugin {
@Override
public Map<String, Suggester<?>> getSuggesters() {
return singletonMap("custom", CustomSuggester.INSTANCE);
}
}

View File

@ -51,7 +51,7 @@ import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
/**
*
* Integration test for registering a custom suggester.
*/
@ClusterScope(scope= Scope.SUITE, numDataNodes =1)
public class CustomSuggesterSearchIT extends ESIntegTestCase {

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.suggest;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -30,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.suggest.completion.CompletionSuggesterBuilderTests;
import org.elasticsearch.search.suggest.completion.WritableTestCase;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilderTests;
@ -40,6 +42,8 @@ import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Map.Entry;
import static java.util.Collections.emptyList;
public class SuggestBuilderTests extends WritableTestCase<SuggestBuilder> {
private static NamedWriteableRegistry namedWriteableRegistry;
@ -51,7 +55,7 @@ public class SuggestBuilderTests extends WritableTestCase<SuggestBuilder> {
@BeforeClass
public static void init() {
namedWriteableRegistry = new NamedWriteableRegistry();
suggesters = new Suggesters(namedWriteableRegistry);
suggesters = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList()).getSuggesters();
}
@AfterClass

View File

@ -44,7 +44,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -67,7 +67,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase {
public static void init() {
if (namedWriteableRegistry == null) {
namedWriteableRegistry = new NamedWriteableRegistry();
new Suggesters(namedWriteableRegistry);
SearchModule.registerSmoothingModels(namedWriteableRegistry);
}
}

View File

@ -141,6 +141,12 @@ remove their `onModule(ActionModule)` implementation.
Plugins that register custom `RestHandler`s should implement `ActionPlugin` and
remove their `onModule(NetworkModule)` implemnetation.
==== SearchPlugin
Plugins that register custom search time behavior (`Suggester`, `ScoreFunction`,
`FetchSubPhase`, `Highlighter`, etc) should implement `SearchPlugin` and remove
their `onModule(SearchModule)` implementation.
==== Mapper-Size plugin
The metadata field `_size` is not accessible in aggregations, scripts and when

View File

@ -118,7 +118,7 @@ public class TemplateQueryParserTests extends ESTestCase {
b.bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
},
settingsModule,
new SearchModule(settings, new NamedWriteableRegistry(), false) {
new SearchModule(settings, new NamedWriteableRegistry(), false, emptyList()) {
@Override
protected void configureSearch() {
// skip so we don't need transport

View File

@ -33,9 +33,9 @@ import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.search.Highlighters;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.highlight.HighlightPhase;
import org.elasticsearch.search.highlight.Highlighter;
import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext;
@ -44,6 +44,7 @@ import org.elasticsearch.search.internal.SubSearchContext;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* Highlighting in the case of the percolate query is a bit different, because the PercolateQuery itself doesn't get highlighted,
@ -51,7 +52,7 @@ import java.util.List;
*/
public final class PercolatorHighlightSubFetchPhase extends HighlightPhase {
public PercolatorHighlightSubFetchPhase(Settings settings, Highlighters highlighters) {
public PercolatorHighlightSubFetchPhase(Settings settings, Map<String, Highlighter> highlighters) {
super(settings, highlighters);
}

View File

@ -27,15 +27,19 @@ import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.fetch.FetchSubPhase;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class PercolatorPlugin extends Plugin implements MapperPlugin, ActionPlugin {
import static java.util.Collections.singletonList;
public class PercolatorPlugin extends Plugin implements MapperPlugin, ActionPlugin, SearchPlugin {
private final Settings settings;
@ -56,7 +60,11 @@ public class PercolatorPlugin extends Plugin implements MapperPlugin, ActionPlug
public void onModule(SearchModule module) {
module.registerQuery(PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent, PercolateQueryBuilder.QUERY_NAME_FIELD);
module.registerFetchSubPhase(new PercolatorHighlightSubFetchPhase(settings, module.getHighlighters()));
}
@Override
public List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
return singletonList(new PercolatorHighlightSubFetchPhase(settings, context.getHighlighters()));
}
@Override

View File

@ -29,7 +29,6 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.Highlighters;
import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase;
@ -38,6 +37,7 @@ import org.mockito.Mockito;
import java.util.Arrays;
import java.util.Collections;
import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
@ -49,7 +49,7 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase {
"", ctx -> null, new BytesArray("{}"), new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery()
);
PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(Settings.EMPTY,
new Highlighters(Settings.EMPTY));
emptyMap());
SearchContext searchContext = Mockito.mock(SearchContext.class);
Mockito.when(searchContext.highlight()).thenReturn(new SearchContextHighlight(Collections.emptyList()));
Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery());

View File

@ -51,7 +51,7 @@ public class AzureDiscoveryPlugin extends Plugin {
}
@Override
public Collection<Module> nodeModules() {
public Collection<Module> createGuiceModules() {
return Collections.singletonList((Module) new AzureDiscoveryModule(settings));
}

View File

@ -86,7 +86,7 @@ public class Ec2DiscoveryPlugin extends Plugin {
}
@Override
public Collection<Module> nodeModules() {
public Collection<Module> createGuiceModules() {
Collection<Module> modules = new ArrayList<>();
modules.add(new Ec2Module());
return modules;
@ -94,7 +94,7 @@ public class Ec2DiscoveryPlugin extends Plugin {
@Override
@SuppressWarnings("rawtypes") // Supertype uses rawtype
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
public Collection<Class<? extends LifecycleComponent>> getGuiceServiceClasses() {
Collection<Class<? extends LifecycleComponent>> services = new ArrayList<>();
services.add(AwsEc2ServiceImpl.class);
return services;

View File

@ -70,13 +70,13 @@ public class GceDiscoveryPlugin extends Plugin {
}
@Override
public Collection<Module> nodeModules() {
public Collection<Module> createGuiceModules() {
return Collections.singletonList(new GceModule());
}
@Override
@SuppressWarnings("rawtypes") // Supertype uses raw type
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
public Collection<Class<? extends LifecycleComponent>> getGuiceServiceClasses() {
return Collections.singletonList(GceModule.getComputeServiceImpl());
}

View File

@ -44,13 +44,13 @@ public class JvmExamplePlugin extends Plugin {
}
@Override
public Collection<Module> nodeModules() {
public Collection<Module> createGuiceModules() {
return Collections.<Module>singletonList(new ConfiguredExampleModule());
}
@Override
@SuppressWarnings("rawtypes") // Plugin use a rawtype
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
public Collection<Class<? extends LifecycleComponent>> getGuiceServiceClasses() {
Collection<Class<? extends LifecycleComponent>> services = new ArrayList<>();
return services;
}

View File

@ -292,7 +292,7 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase {
public static class ActionLoggingPlugin extends Plugin implements ActionPlugin {
@Override
public Collection<Module> nodeModules() {
public Collection<Module> createGuiceModules() {
return Collections.<Module>singletonList(new ActionLoggingModule());
}

View File

@ -10,13 +10,22 @@ setup:
indices.refresh: {}
---
"Request window limits":
"Request window limits without scroll":
- do:
catch: /Result window is too large, from \+ size must be less than or equal to[:] \[10000\] but was \[10010\]/
catch: /Result window is too large, from \+ size must be less than or equal to[:] \[10000\] but was \[10010\]\. See the scroll api for a more efficient way to request large data sets\./
search:
index: test_1
from: 10000
---
"Request window limits with scroll":
- do:
catch: /Batch size is too large, size must be less than or equal to[:] \[10000\] but was \[10010\]\. Scroll batch sizes cost as much memory as result windows so they are controlled by the \[index.max_result_window\] index level setting\./
search:
index: test_1
scroll: 5m
from: 10000
---
"Rescore window limits":
- do:

View File

@ -49,7 +49,7 @@ public class MockEngineFactoryPlugin extends Plugin {
}
@Override
public Collection<Module> nodeModules() {
public Collection<Module> createGuiceModules() {
return Collections.singleton(new MockEngineReaderModule());
}

View File

@ -94,6 +94,7 @@ import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.script.Script.ScriptParseException;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
@ -866,14 +867,14 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
scriptSettings.addAll(pluginsService.getPluginSettings());
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, pluginsService.getPluginSettingsFilter());
searchModule = new SearchModule(settings, namedWriteableRegistry, false) {
searchModule = new SearchModule(settings, namedWriteableRegistry, false, pluginsService.filterPlugins(SearchPlugin.class)) {
@Override
protected void configureSearch() {
// Skip me
}
};
ModulesBuilder modulesBuilder = new ModulesBuilder();
for (Module pluginModule : pluginsService.nodeModules()) {
for (Module pluginModule : pluginsService.createGuiceModules()) {
modulesBuilder.add(pluginModule);
}
modulesBuilder.add(

View File

@ -70,7 +70,7 @@ public final class MockIndexEventListener {
}
@Override
public Collection<Module> nodeModules() {
public Collection<Module> createGuiceModules() {
return Collections.singleton(binder -> binder.bind(TestEventListener.class).toInstance(listener));
}
}

View File

@ -76,6 +76,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Set;
import static java.util.Collections.emptyList;
import static org.apache.lucene.util.LuceneTestCase.random;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.equalTo;
@ -627,7 +628,7 @@ public class ElasticsearchAssertions {
registry = ESIntegTestCase.internalCluster().getInstance(NamedWriteableRegistry.class);
} else {
registry = new NamedWriteableRegistry();
new SearchModule(Settings.EMPTY, registry, false);
new SearchModule(Settings.EMPTY, registry, false, emptyList());
}
assertVersionSerializable(version, streamable, registry);
}