Merge branch 'master' into mapper_plugin_api

This commit is contained in:
Ryan Ernst 2016-06-27 11:19:59 -07:00
commit 33ccc5aead
150 changed files with 1968 additions and 1343 deletions

View File

@ -28,12 +28,6 @@ import org.gradle.api.logging.LogLevel
import org.gradle.api.logging.Logger import org.gradle.api.logging.Logger
import org.junit.runner.Description import org.junit.runner.Description
import javax.sound.sampled.AudioSystem
import javax.sound.sampled.Clip
import javax.sound.sampled.Line
import javax.sound.sampled.LineEvent
import javax.sound.sampled.LineListener
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.atomic.AtomicInteger
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDescription import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDescription
@ -123,36 +117,9 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv
formatTime(e.getCurrentTime()) + ", stalled for " + formatTime(e.getCurrentTime()) + ", stalled for " +
formatDurationInSeconds(e.getNoEventDuration()) + " at: " + formatDurationInSeconds(e.getNoEventDuration()) + " at: " +
(e.getDescription() == null ? "<unknown>" : formatDescription(e.getDescription()))) (e.getDescription() == null ? "<unknown>" : formatDescription(e.getDescription())))
try {
playBeat();
} catch (Exception nosound) { /* handling exceptions with style */ }
slowTestsFound = true slowTestsFound = true
} }
void playBeat() throws Exception {
Clip clip = (Clip)AudioSystem.getLine(new Line.Info(Clip.class));
final AtomicBoolean stop = new AtomicBoolean();
clip.addLineListener(new LineListener() {
@Override
public void update(LineEvent event) {
if (event.getType() == LineEvent.Type.STOP) {
stop.set(true);
}
}
});
InputStream stream = getClass().getResourceAsStream("/beat.wav");
try {
clip.open(AudioSystem.getAudioInputStream(stream));
clip.start();
while (!stop.get()) {
Thread.sleep(20);
}
clip.close();
} finally {
stream.close();
}
}
@Subscribe @Subscribe
void onQuit(AggregatedQuitEvent e) throws IOException { void onQuit(AggregatedQuitEvent e) throws IOException {
if (config.showNumFailuresAtEnd > 0 && !failedTests.isEmpty()) { if (config.showNumFailuresAtEnd > 0 && !failedTests.isEmpty()) {

Binary file not shown.

View File

@ -477,8 +477,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndexingMemoryController.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndexingMemoryController.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesWarmer.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesWarmer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analysis[/\\]AnalysisModule.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analysis[/\\]HunspellService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analysis[/\\]PreBuiltCacheFactory.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analysis[/\\]PreBuiltCacheFactory.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analysis[/\\]PreBuiltTokenFilters.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analysis[/\\]PreBuiltTokenFilters.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]breaker[/\\]HierarchyCircuitBreakerService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]breaker[/\\]HierarchyCircuitBreakerService.java" checks="LineLength" />
@ -879,9 +877,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexingSlowLogTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexingSlowLogTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicySettingsTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicySettingsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]SearchSlowLogTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]SearchSlowLogTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisModuleTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]CompoundAnalysisTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]NGramTokenizerFactoryTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]NGramTokenizerFactoryTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PatternCaptureTokenFilterTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PatternCaptureTokenFilterTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PreBuiltAnalyzerProviderFactoryTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PreBuiltAnalyzerProviderFactoryTests.java" checks="LineLength" />
@ -1186,8 +1181,6 @@
<suppress files="plugins[/\\]analysis-kuromoji[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]JapaneseStopTokenFilterFactory.java" checks="LineLength" /> <suppress files="plugins[/\\]analysis-kuromoji[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]JapaneseStopTokenFilterFactory.java" checks="LineLength" />
<suppress files="plugins[/\\]analysis-kuromoji[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]KuromojiAnalysisTests.java" checks="LineLength" /> <suppress files="plugins[/\\]analysis-kuromoji[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]KuromojiAnalysisTests.java" checks="LineLength" />
<suppress files="plugins[/\\]analysis-phonetic[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PhoneticTokenFilterFactory.java" checks="LineLength" /> <suppress files="plugins[/\\]analysis-phonetic[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PhoneticTokenFilterFactory.java" checks="LineLength" />
<suppress files="plugins[/\\]analysis-smartcn[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]SimpleSmartChineseAnalysisTests.java" checks="LineLength" />
<suppress files="plugins[/\\]analysis-stempel[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PolishAnalysisTests.java" checks="LineLength" />
<suppress files="plugins[/\\]discovery-azure[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cloud[/\\]azure[/\\]AbstractAzureTestCase.java" checks="LineLength" /> <suppress files="plugins[/\\]discovery-azure[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cloud[/\\]azure[/\\]AbstractAzureTestCase.java" checks="LineLength" />
<suppress files="plugins[/\\]discovery-azure[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]azure[/\\]AzureMinimumMasterNodesTests.java" checks="LineLength" /> <suppress files="plugins[/\\]discovery-azure[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]azure[/\\]AzureMinimumMasterNodesTests.java" checks="LineLength" />
<suppress files="plugins[/\\]discovery-azure[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]azure[/\\]AzureSimpleTests.java" checks="LineLength" /> <suppress files="plugins[/\\]discovery-azure[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]azure[/\\]AzureSimpleTests.java" checks="LineLength" />

View File

@ -77,7 +77,7 @@ final class SettingsUpdater {
Settings settings = build.metaData().settings(); Settings settings = build.metaData().settings();
// now we try to apply things and if they are invalid we fail // now we try to apply things and if they are invalid we fail
// this dryRun will validate & parse settings but won't actually apply them. // this dryRun will validate & parse settings but won't actually apply them.
clusterSettings.dryRun(settings); clusterSettings.validateUpdate(settings);
return build; return build;
} }

View File

@ -252,8 +252,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
/** /**
* Sets no fields to be loaded, resulting in only id and type to be returned per field. * Sets no fields to be loaded, resulting in only id and type to be returned per field.
*/ */
public SearchRequestBuilder setNoStoredFields() { public SearchRequestBuilder setNoFields() {
sourceBuilder().noStoredFields(); sourceBuilder().noFields();
return this; return this;
} }
@ -289,23 +289,13 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
return this; return this;
} }
/**
* Adds a docvalue based field to load and return. The field does not have to be stored,
* but its recommended to use non analyzed or numeric fields.
*
* @param name The field to get from the docvalue
*/
public SearchRequestBuilder addDocValueField(String name) {
sourceBuilder().docValueField(name);
return this;
}
/** /**
* Adds a stored field to load and return (note, it must be stored) as part of the search request. * Adds a field to load and return (note, it must be stored) as part of the search request.
* If none are specified, the source of the document will be return. * If none are specified, the source of the document will be return.
*/ */
public SearchRequestBuilder addStoredField(String field) { public SearchRequestBuilder addField(String field) {
sourceBuilder().storedField(field); sourceBuilder().field(field);
return this; return this;
} }
@ -314,15 +304,12 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
* but its recommended to use non analyzed or numeric fields. * but its recommended to use non analyzed or numeric fields.
* *
* @param name The field to get from the field data cache * @param name The field to get from the field data cache
* @deprecated Use {@link SearchRequestBuilder#addDocValueField(String)} instead.
*/ */
@Deprecated
public SearchRequestBuilder addFieldDataField(String name) { public SearchRequestBuilder addFieldDataField(String name) {
sourceBuilder().docValueField(name); sourceBuilder().fieldDataField(name);
return this; return this;
} }
/** /**
* Adds a script based field to load and return. The field does not have to be stored, * Adds a script based field to load and return. The field does not have to be stored,
* but its recommended to use non analyzed or numeric fields. * but its recommended to use non analyzed or numeric fields.
@ -379,24 +366,12 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
return this; return this;
} }
/**
* Sets the stored fields to load and return as part of the search request. If none
* are specified, the source of the document will be returned.
*
* @deprecated Use {@link SearchRequestBuilder#storedFields(String...)} instead.
*/
@Deprecated
public SearchRequestBuilder fields(String... fields) {
sourceBuilder().storedFields(Arrays.asList(fields));
return this;
}
/** /**
* Sets the fields to load and return as part of the search request. If none * Sets the fields to load and return as part of the search request. If none
* are specified, the source of the document will be returned. * are specified, the source of the document will be returned.
*/ */
public SearchRequestBuilder storedFields(String... fields) { public SearchRequestBuilder fields(String... fields) {
sourceBuilder().storedFields(Arrays.asList(fields)); sourceBuilder().fields(Arrays.asList(fields));
return this; return this;
} }

View File

@ -21,7 +21,8 @@ package org.elasticsearch.action.support.nodes;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.common.Strings; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
@ -33,10 +34,24 @@ import java.io.IOException;
*/ */
public abstract class BaseNodesRequest<Request extends BaseNodesRequest<Request>> extends ActionRequest<Request> { public abstract class BaseNodesRequest<Request extends BaseNodesRequest<Request>> extends ActionRequest<Request> {
public static String[] ALL_NODES = Strings.EMPTY_ARRAY; /**
* the list of nodesIds that will be used to resolve this request and {@link #concreteNodes}
* will be populated. Note that if {@link #concreteNodes} is not null, it will be used and nodeIds
* will be ignored.
*
* See {@link DiscoveryNodes#resolveNodes} for a full description of the options.
*
* TODO: once we stop using the transport client as a gateway to the cluster, we can get rid of this and resolve it to concrete nodes
* in the rest layer
**/
private String[] nodesIds; private String[] nodesIds;
/**
* once {@link #nodesIds} are resolved this will contain the concrete nodes that are part of this request. If set, {@link #nodesIds}
* will be ignored and this will be used.
* */
private DiscoveryNode[] concreteNodes;
private TimeValue timeout; private TimeValue timeout;
protected BaseNodesRequest() { protected BaseNodesRequest() {
@ -47,6 +62,11 @@ public abstract class BaseNodesRequest<Request extends BaseNodesRequest<Request>
this.nodesIds = nodesIds; this.nodesIds = nodesIds;
} }
protected BaseNodesRequest(DiscoveryNode... concreteNodes) {
this.nodesIds = null;
this.concreteNodes = concreteNodes;
}
public final String[] nodesIds() { public final String[] nodesIds() {
return nodesIds; return nodesIds;
} }
@ -72,6 +92,13 @@ public abstract class BaseNodesRequest<Request extends BaseNodesRequest<Request>
this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"); this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout");
return (Request) this; return (Request) this;
} }
public DiscoveryNode[] concreteNodes() {
return concreteNodes;
}
public void setConcreteNodes(DiscoveryNode[] concreteNodes) {
this.concreteNodes = concreteNodes;
}
@Override @Override
public ActionRequestValidationException validate() { public ActionRequestValidationException validate() {
@ -82,6 +109,7 @@ public abstract class BaseNodesRequest<Request extends BaseNodesRequest<Request>
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
nodesIds = in.readStringArray(); nodesIds = in.readStringArray();
concreteNodes = in.readOptionalArray(DiscoveryNode::new, DiscoveryNode[]::new);
timeout = in.readOptionalWriteable(TimeValue::new); timeout = in.readOptionalWriteable(TimeValue::new);
} }
@ -89,6 +117,7 @@ public abstract class BaseNodesRequest<Request extends BaseNodesRequest<Request>
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeStringArrayNullable(nodesIds); out.writeStringArrayNullable(nodesIds);
out.writeOptionalArray(concreteNodes);
out.writeOptionalWriteable(timeout); out.writeOptionalWriteable(timeout);
} }
} }

View File

@ -27,7 +27,6 @@ import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
@ -42,6 +41,7 @@ import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
@ -148,20 +148,19 @@ public abstract class TransportNodesAction<NodesRequest extends BaseNodesRequest
protected abstract boolean accumulateExceptions(); protected abstract boolean accumulateExceptions();
protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) { /**
return nodesIds; * resolve node ids to concrete nodes of the incoming request
} **/
protected void resolveRequest(NodesRequest request, ClusterState clusterState) {
protected String[] resolveNodes(NodesRequest request, ClusterState clusterState) { assert request.concreteNodes() == null : "request concreteNodes shouldn't be set";
return clusterState.nodes().resolveNodesIds(request.nodesIds()); String[] nodesIds = clusterState.nodes().resolveNodes(request.nodesIds());
request.setConcreteNodes(Arrays.stream(nodesIds).map(clusterState.nodes()::get).toArray(DiscoveryNode[]::new));
} }
class AsyncAction { class AsyncAction {
private final NodesRequest request; private final NodesRequest request;
private final String[] nodesIds;
private final DiscoveryNode[] nodes;
private final ActionListener<NodesResponse> listener; private final ActionListener<NodesResponse> listener;
private final AtomicReferenceArray<Object> responses; private final AtomicReferenceArray<Object> responses;
private final AtomicInteger counter = new AtomicInteger(); private final AtomicInteger counter = new AtomicInteger();
@ -171,24 +170,18 @@ public abstract class TransportNodesAction<NodesRequest extends BaseNodesRequest
this.task = task; this.task = task;
this.request = request; this.request = request;
this.listener = listener; this.listener = listener;
ClusterState clusterState = clusterService.state(); if (request.concreteNodes() == null) {
nodesIds = filterNodeIds(clusterState.nodes(), resolveNodes(request, clusterState)); resolveRequest(request, clusterService.state());
this.nodes = new DiscoveryNode[nodesIds.length]; assert request.concreteNodes() != null;
for (int i = 0; i < nodesIds.length; i++) {
this.nodes[i] = clusterState.nodes().get(nodesIds[i]);
} }
this.responses = new AtomicReferenceArray<>(this.nodesIds.length); this.responses = new AtomicReferenceArray<>(request.concreteNodes().length);
} }
void start() { void start() {
if (nodesIds.length == 0) { final DiscoveryNode[] nodes = request.concreteNodes();
if (nodes.length == 0) {
// nothing to notify // nothing to notify
threadPool.generic().execute(new Runnable() { threadPool.generic().execute(() -> listener.onResponse(newResponse(request, responses)));
@Override
public void run() {
listener.onResponse(newResponse(request, responses));
}
});
return; return;
} }
TransportRequestOptions.Builder builder = TransportRequestOptions.builder(); TransportRequestOptions.Builder builder = TransportRequestOptions.builder();
@ -196,10 +189,10 @@ public abstract class TransportNodesAction<NodesRequest extends BaseNodesRequest
builder.withTimeout(request.timeout()); builder.withTimeout(request.timeout());
} }
builder.withCompress(transportCompress()); builder.withCompress(transportCompress());
for (int i = 0; i < nodesIds.length; i++) { for (int i = 0; i < nodes.length; i++) {
final String nodeId = nodesIds[i];
final int idx = i; final int idx = i;
final DiscoveryNode node = nodes[i]; final DiscoveryNode node = nodes[i];
final String nodeId = node.getId();
try { try {
if (node == null) { if (node == null) {
onFailure(idx, nodeId, new NoSuchNodeException(nodeId)); onFailure(idx, nodeId, new NoSuchNodeException(nodeId));

View File

@ -124,7 +124,7 @@ public abstract class TransportTasksAction<
if (request.getTaskId().isSet()) { if (request.getTaskId().isSet()) {
return new String[]{request.getTaskId().getNodeId()}; return new String[]{request.getTaskId().getNodeId()};
} else { } else {
return clusterState.nodes().resolveNodesIds(request.getNodesIds()); return clusterState.nodes().resolveNodes(request.getNodesIds());
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.cluster.metadata; package org.elasticsearch.cluster.metadata;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest;
@ -43,7 +44,10 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.NodeServicesProvider;
import org.elasticsearch.indices.IndicesService;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@ -61,17 +65,20 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
private final AllocationService allocationService; private final AllocationService allocationService;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final IndexScopedSettings indexScopedSettings; private final IndexScopedSettings indexScopedSettings;
private final IndicesService indicesService;
private final NodeServicesProvider nodeServiceProvider;
@Inject @Inject
public MetaDataUpdateSettingsService(Settings settings, ClusterService clusterService, AllocationService allocationService, IndexScopedSettings indexScopedSettings, IndexNameExpressionResolver indexNameExpressionResolver) { public MetaDataUpdateSettingsService(Settings settings, ClusterService clusterService, AllocationService allocationService,
IndexScopedSettings indexScopedSettings, IndicesService indicesService, NodeServicesProvider nodeServicesProvider) {
super(settings); super(settings);
this.clusterService = clusterService; this.clusterService = clusterService;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.clusterService.add(this); this.clusterService.add(this);
this.allocationService = allocationService; this.allocationService = allocationService;
this.indexScopedSettings = indexScopedSettings; this.indexScopedSettings = indexScopedSettings;
this.indicesService = indicesService;
this.nodeServiceProvider = nodeServicesProvider;
} }
@Override @Override
@ -266,11 +273,15 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
// now, reroute in case things change that require it (like number of replicas) // now, reroute in case things change that require it (like number of replicas)
RoutingAllocation.Result routingResult = allocationService.reroute(updatedState, "settings update"); RoutingAllocation.Result routingResult = allocationService.reroute(updatedState, "settings update");
updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build(); updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build();
for (Index index : openIndices) { try {
indexScopedSettings.dryRun(updatedState.metaData().getIndexSafe(index).getSettings()); for (Index index : openIndices) {
} indicesService.verifyIndexMetadata(nodeServiceProvider, updatedState.getMetaData().getIndexSafe(index));
for (Index index : closeIndices) { }
indexScopedSettings.dryRun(updatedState.metaData().getIndexSafe(index).getSettings()); for (Index index : closeIndices) {
indicesService.verifyIndexMetadata(nodeServiceProvider, updatedState.getMetaData().getIndexSafe(index));
}
} catch (IOException ex) {
ExceptionsHelper.convertToElastic(ex);
} }
return updatedState; return updatedState;
} }

View File

@ -245,7 +245,7 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
* @throws IllegalArgumentException if more than one node matches the request or no nodes have been resolved * @throws IllegalArgumentException if more than one node matches the request or no nodes have been resolved
*/ */
public DiscoveryNode resolveNode(String node) { public DiscoveryNode resolveNode(String node) {
String[] resolvedNodeIds = resolveNodesIds(node); String[] resolvedNodeIds = resolveNodes(node);
if (resolvedNodeIds.length > 1) { if (resolvedNodeIds.length > 1) {
throw new IllegalArgumentException("resolved [" + node + "] into [" + resolvedNodeIds.length + "] nodes, where expected to be resolved to a single node"); throw new IllegalArgumentException("resolved [" + node + "] into [" + resolvedNodeIds.length + "] nodes, where expected to be resolved to a single node");
} }
@ -255,17 +255,25 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
return nodes.get(resolvedNodeIds[0]); return nodes.get(resolvedNodeIds[0]);
} }
public String[] resolveNodesIds(String... nodesIds) { /**
if (isAllNodes(nodesIds)) { * resolves a set of node "descriptions" to concrete and existing node ids. "descriptions" can be (resolved in this order):
* - "_local" or "_master" for the relevant nodes
* - a node id
* - a wild card pattern that will be matched against node names
* - a "attr:value" pattern, where attr can be a node role (master, data, ingest etc.) in which case the value can be true of false
* or a generic node attribute name in which case value will be treated as a wildcard and matched against the node attribute values.
*/
public String[] resolveNodes(String... nodes) {
if (isAllNodes(nodes)) {
int index = 0; int index = 0;
nodesIds = new String[nodes.size()]; nodes = new String[this.nodes.size()];
for (DiscoveryNode node : this) { for (DiscoveryNode node : this) {
nodesIds[index++] = node.getId(); nodes[index++] = node.getId();
} }
return nodesIds; return nodes;
} else { } else {
ObjectHashSet<String> resolvedNodesIds = new ObjectHashSet<>(nodesIds.length); ObjectHashSet<String> resolvedNodesIds = new ObjectHashSet<>(nodes.length);
for (String nodeId : nodesIds) { for (String nodeId : nodes) {
if (nodeId.equals("_local")) { if (nodeId.equals("_local")) {
String localNodeId = getLocalNodeId(); String localNodeId = getLocalNodeId();
if (localNodeId != null) { if (localNodeId != null) {

View File

@ -22,7 +22,6 @@ package org.elasticsearch.cluster.routing;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -357,7 +356,7 @@ public class IndexShardRoutingTable implements Iterable<ShardRouting> {
*/ */
public ShardIterator onlyNodeSelectorActiveInitializingShardsIt(String[] nodeAttributes, DiscoveryNodes discoveryNodes) { public ShardIterator onlyNodeSelectorActiveInitializingShardsIt(String[] nodeAttributes, DiscoveryNodes discoveryNodes) {
ArrayList<ShardRouting> ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); ArrayList<ShardRouting> ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size());
Set<String> selectedNodes = Sets.newHashSet(discoveryNodes.resolveNodesIds(nodeAttributes)); Set<String> selectedNodes = Sets.newHashSet(discoveryNodes.resolveNodes(nodeAttributes));
int seed = shuffler.nextSeed(); int seed = shuffler.nextSeed();
for (ShardRouting shardRouting : shuffler.shuffle(activeShards, seed)) { for (ShardRouting shardRouting : shuffler.shuffle(activeShards, seed)) {
if (selectedNodes.contains(shardRouting.currentNodeId())) { if (selectedNodes.contains(shardRouting.currentNodeId())) {

View File

@ -57,6 +57,7 @@ import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.IntFunction;
import java.util.function.Supplier; import java.util.function.Supplier;
import static org.elasticsearch.ElasticsearchException.readException; import static org.elasticsearch.ElasticsearchException.readException;
@ -608,6 +609,19 @@ public abstract class StreamInput extends InputStream {
return bytes; return bytes;
} }
public <T> T[] readArray(Writeable.Reader<T> reader, IntFunction<T[]> arraySupplier) throws IOException {
int length = readVInt();
T[] values = arraySupplier.apply(length);
for (int i = 0; i < length; i++) {
values[i] = reader.read(this);
}
return values;
}
public <T> T[] readOptionalArray(Writeable.Reader<T> reader, IntFunction<T[]> arraySupplier) throws IOException {
return readBoolean() ? readArray(reader, arraySupplier) : null;
}
/** /**
* Serializes a potential null value. * Serializes a potential null value.
*/ */
@ -782,7 +796,7 @@ public abstract class StreamInput extends InputStream {
/** /**
* Reads a list of objects * Reads a list of objects
*/ */
public <T> List<T> readList(StreamInputReader<T> reader) throws IOException { public <T> List<T> readList(Writeable.Reader<T> reader) throws IOException {
int count = readVInt(); int count = readVInt();
List<T> builder = new ArrayList<>(count); List<T> builder = new ArrayList<>(count);
for (int i=0; i<count; i++) { for (int i=0; i<count; i++) {

View File

@ -1,33 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.io.stream;
import java.io.IOException;
/**
* Defines a method for reading a list of objects from StreamInput.
*
* It can be used in {@link StreamInput#readList(StreamInputReader)} for reading
* lists of immutable objects that implement StreamInput accepting constructors.
*/
@FunctionalInterface
public interface StreamInputReader<T> {
T read(StreamInput t) throws IOException;
}

View File

@ -594,6 +594,22 @@ public abstract class StreamOutput extends OutputStream {
} }
} }
public <T extends Writeable> void writeArray(T[] array) throws IOException {
writeVInt(array.length);
for (T value: array) {
value.writeTo(this);
}
}
public <T extends Writeable> void writeOptionalArray(@Nullable T[] array) throws IOException {
if (array == null) {
writeBoolean(false);
} else {
writeBoolean(true);
writeArray(array);
}
}
/** /**
* Serializes a potential null value. * Serializes a potential null value.
*/ */

View File

@ -238,7 +238,8 @@ public class NetworkModule extends AbstractModule {
RestIndexAction.class, RestIndexAction.class,
RestGetAction.class, RestGetAction.class,
RestGetSourceAction.class, RestGetSourceAction.class,
RestHeadAction.class, RestHeadAction.Document.class,
RestHeadAction.Source.class,
RestMultiGetAction.class, RestMultiGetAction.class,
RestDeleteAction.class, RestDeleteAction.class,
org.elasticsearch.rest.action.count.RestCountAction.class, org.elasticsearch.rest.action.count.RestCountAction.class,

View File

@ -115,18 +115,18 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
} }
/** /**
* Applies the given settings to all listeners and rolls back the result after application. This * Validates the given settings by running it through all update listeners without applying it. This
* method will not change any settings but will fail if any of the settings can't be applied. * method will not change any settings but will fail if any of the settings can't be applied.
*/ */
public synchronized Settings dryRun(Settings settings) { public synchronized Settings validateUpdate(Settings settings) {
final Settings current = Settings.builder().put(this.settings).put(settings).build(); final Settings current = Settings.builder().put(this.settings).put(settings).build();
final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build();
List<RuntimeException> exceptions = new ArrayList<>(); List<RuntimeException> exceptions = new ArrayList<>();
for (SettingUpdater<?> settingUpdater : settingUpdaters) { for (SettingUpdater<?> settingUpdater : settingUpdaters) {
try { try {
if (settingUpdater.hasChanged(current, previous)) { // ensure running this through the updater / dynamic validator
settingUpdater.getValue(current, previous); // don't check if the value has changed we wanna test this anyways
} settingUpdater.getValue(current, previous);
} catch (RuntimeException ex) { } catch (RuntimeException ex) {
exceptions.add(ex); exceptions.add(ex);
logger.debug("failed to prepareCommit settings for [{}]", ex, settingUpdater); logger.debug("failed to prepareCommit settings for [{}]", ex, settingUpdater);

View File

@ -60,7 +60,7 @@ public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Rel
* An action that lists the relevant shard data that needs to be fetched. * An action that lists the relevant shard data that needs to be fetched.
*/ */
public interface Lister<NodesResponse extends BaseNodesResponse<NodeResponse>, NodeResponse extends BaseNodeResponse> { public interface Lister<NodesResponse extends BaseNodesResponse<NodeResponse>, NodeResponse extends BaseNodeResponse> {
void list(ShardId shardId, String[] nodesIds, ActionListener<NodesResponse> listener); void list(ShardId shardId, DiscoveryNode[] nodes, ActionListener<NodesResponse> listener);
} }
protected final ESLogger logger; protected final ESLogger logger;
@ -116,12 +116,9 @@ public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Rel
for (NodeEntry<T> nodeEntry : nodesToFetch) { for (NodeEntry<T> nodeEntry : nodesToFetch) {
nodeEntry.markAsFetching(); nodeEntry.markAsFetching();
} }
String[] nodesIds = new String[nodesToFetch.size()]; DiscoveryNode[] discoNodesToFetch = nodesToFetch.stream().map(NodeEntry::getNodeId).map(nodes::get)
int index = 0; .toArray(DiscoveryNode[]::new);
for (NodeEntry<T> nodeEntry : nodesToFetch) { asyncFetch(shardId, discoNodesToFetch);
nodesIds[index++] = nodeEntry.getNodeId();
}
asyncFetch(shardId, nodesIds);
} }
// if we are still fetching, return null to indicate it // if we are still fetching, return null to indicate it
@ -187,7 +184,7 @@ public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Rel
if (nodeEntry.isFailed()) { if (nodeEntry.isFailed()) {
logger.trace("{} node {} has failed for [{}] (failure [{}])", shardId, nodeEntry.getNodeId(), type, nodeEntry.getFailure()); logger.trace("{} node {} has failed for [{}] (failure [{}])", shardId, nodeEntry.getNodeId(), type, nodeEntry.getFailure());
} else { } else {
logger.trace("{} marking {} as done for [{}]", shardId, nodeEntry.getNodeId(), type); logger.trace("{} marking {} as done for [{}], result is [{}]", shardId, nodeEntry.getNodeId(), type, response);
nodeEntry.doneFetching(response); nodeEntry.doneFetching(response);
} }
} }
@ -268,9 +265,9 @@ public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Rel
* Async fetches data for the provided shard with the set of nodes that need to be fetched from. * Async fetches data for the provided shard with the set of nodes that need to be fetched from.
*/ */
// visible for testing // visible for testing
void asyncFetch(final ShardId shardId, final String[] nodesIds) { void asyncFetch(final ShardId shardId, final DiscoveryNode[] nodes) {
logger.trace("{} fetching [{}] from {}", shardId, type, nodesIds); logger.trace("{} fetching [{}] from {}", shardId, type, nodes);
action.list(shardId, nodesIds, new ActionListener<BaseNodesResponse<T>>() { action.list(shardId, nodes, new ActionListener<BaseNodesResponse<T>>() {
@Override @Override
public void onResponse(BaseNodesResponse<T> response) { public void onResponse(BaseNodesResponse<T> response) {
processAsyncFetch(shardId, response.getNodes(), response.failures()); processAsyncFetch(shardId, response.getNodes(), response.failures());
@ -278,9 +275,9 @@ public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Rel
@Override @Override
public void onFailure(Throwable e) { public void onFailure(Throwable e) {
List<FailedNodeException> failures = new ArrayList<>(nodesIds.length); List<FailedNodeException> failures = new ArrayList<>(nodes.length);
for (String nodeId : nodesIds) { for (final DiscoveryNode node: nodes) {
failures.add(new FailedNodeException(nodeId, "total failure in fetching", e)); failures.add(new FailedNodeException(node.getId(), "total failure in fetching", e));
} }
processAsyncFetch(shardId, null, failures); processAsyncFetch(shardId, null, failures);
} }

View File

@ -38,13 +38,13 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.index.store.StoreFileMetaData;
import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
/** /**
*/ */
@ -91,7 +91,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent {
ShardRouting primaryShard = allocation.routingNodes().activePrimary(shard.shardId()); ShardRouting primaryShard = allocation.routingNodes().activePrimary(shard.shardId());
assert primaryShard != null : "the replica shard can be allocated on at least one node, so there must be an active primary"; assert primaryShard != null : "the replica shard can be allocated on at least one node, so there must be an active primary";
TransportNodesListShardStoreMetaData.StoreFilesMetaData primaryStore = findStore(primaryShard, allocation, shardStores); TransportNodesListShardStoreMetaData.StoreFilesMetaData primaryStore = findStore(primaryShard, allocation, shardStores);
if (primaryStore == null || primaryStore.allocated() == false) { if (primaryStore == null) {
// if we can't find the primary data, it is probably because the primary shard is corrupted (and listing failed) // if we can't find the primary data, it is probably because the primary shard is corrupted (and listing failed)
// just let the recovery find it out, no need to do anything about it for the initializing shard // just let the recovery find it out, no need to do anything about it for the initializing shard
logger.trace("{}: no primary shard store found or allocated, letting actual allocation figure it out", shard); logger.trace("{}: no primary shard store found or allocated, letting actual allocation figure it out", shard);
@ -102,8 +102,15 @@ public abstract class ReplicaShardAllocator extends AbstractComponent {
if (matchingNodes.getNodeWithHighestMatch() != null) { if (matchingNodes.getNodeWithHighestMatch() != null) {
DiscoveryNode currentNode = allocation.nodes().get(shard.currentNodeId()); DiscoveryNode currentNode = allocation.nodes().get(shard.currentNodeId());
DiscoveryNode nodeWithHighestMatch = matchingNodes.getNodeWithHighestMatch(); DiscoveryNode nodeWithHighestMatch = matchingNodes.getNodeWithHighestMatch();
// current node will not be in matchingNodes as it is filtered away by SameShardAllocationDecider
final String currentSyncId;
if (shardStores.getData().containsKey(currentNode)) {
currentSyncId = shardStores.getData().get(currentNode).storeFilesMetaData().syncId();
} else {
currentSyncId = null;
}
if (currentNode.equals(nodeWithHighestMatch) == false if (currentNode.equals(nodeWithHighestMatch) == false
&& matchingNodes.isNodeMatchBySyncID(currentNode) == false && Objects.equals(currentSyncId, primaryStore.syncId()) == false
&& matchingNodes.isNodeMatchBySyncID(nodeWithHighestMatch) == true) { && matchingNodes.isNodeMatchBySyncID(nodeWithHighestMatch) == true) {
// we found a better match that has a full sync id match, the existing allocation is not fully synced // we found a better match that has a full sync id match, the existing allocation is not fully synced
// so we found a better one, cancel this one // so we found a better one, cancel this one
@ -160,7 +167,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent {
ShardRouting primaryShard = routingNodes.activePrimary(shard.shardId()); ShardRouting primaryShard = routingNodes.activePrimary(shard.shardId());
assert primaryShard != null : "the replica shard can be allocated on at least one node, so there must be an active primary"; assert primaryShard != null : "the replica shard can be allocated on at least one node, so there must be an active primary";
TransportNodesListShardStoreMetaData.StoreFilesMetaData primaryStore = findStore(primaryShard, allocation, shardStores); TransportNodesListShardStoreMetaData.StoreFilesMetaData primaryStore = findStore(primaryShard, allocation, shardStores);
if (primaryStore == null || primaryStore.allocated() == false) { if (primaryStore == null) {
// if we can't find the primary data, it is probably because the primary shard is corrupted (and listing failed) // if we can't find the primary data, it is probably because the primary shard is corrupted (and listing failed)
// we want to let the replica be allocated in order to expose the actual problem with the primary that the replica // we want to let the replica be allocated in order to expose the actual problem with the primary that the replica
// will try and recover from // will try and recover from
@ -257,8 +264,8 @@ public abstract class ReplicaShardAllocator extends AbstractComponent {
for (Map.Entry<DiscoveryNode, TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> nodeStoreEntry : data.getData().entrySet()) { for (Map.Entry<DiscoveryNode, TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> nodeStoreEntry : data.getData().entrySet()) {
DiscoveryNode discoNode = nodeStoreEntry.getKey(); DiscoveryNode discoNode = nodeStoreEntry.getKey();
TransportNodesListShardStoreMetaData.StoreFilesMetaData storeFilesMetaData = nodeStoreEntry.getValue().storeFilesMetaData(); TransportNodesListShardStoreMetaData.StoreFilesMetaData storeFilesMetaData = nodeStoreEntry.getValue().storeFilesMetaData();
if (storeFilesMetaData == null) { // we don't have any files at all, it is an empty index
// already allocated on that node... if (storeFilesMetaData.isEmpty()) {
continue; continue;
} }
@ -275,16 +282,6 @@ public abstract class ReplicaShardAllocator extends AbstractComponent {
continue; continue;
} }
// if it is already allocated, we can't assign to it... (and it might be primary as well)
if (storeFilesMetaData.allocated()) {
continue;
}
// we don't have any files at all, it is an empty index
if (storeFilesMetaData.iterator().hasNext() == false) {
continue;
}
String primarySyncId = primaryStore.syncId(); String primarySyncId = primaryStore.syncId();
String replicaSyncId = storeFilesMetaData.syncId(); String replicaSyncId = storeFilesMetaData.syncId();
// see if we have a sync id we can make use of // see if we have a sync id we can make use of

View File

@ -29,7 +29,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest;
import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.action.support.nodes.TransportNodesAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
@ -81,16 +80,9 @@ public class TransportNodesListGatewayStartedShards extends
} }
@Override @Override
public void list(ShardId shardId, String[] nodesIds, public void list(ShardId shardId, DiscoveryNode[] nodes,
ActionListener<NodesGatewayStartedShards> listener) { ActionListener<NodesGatewayStartedShards> listener) {
execute(new Request(shardId, nodesIds), listener); execute(new Request(shardId, nodes), listener);
}
@Override
protected String[] resolveNodes(Request request, ClusterState clusterState) {
// default implementation may filter out non existent nodes. it's important to keep exactly the ids
// we were given for accounting on the caller
return request.nodesIds();
} }
@Override @Override
@ -177,8 +169,8 @@ public class TransportNodesListGatewayStartedShards extends
public Request() { public Request() {
} }
public Request(ShardId shardId, String[] nodesIds) { public Request(ShardId shardId, DiscoveryNode[] nodes) {
super(nodesIds); super(nodes);
this.shardId = shardId; this.shardId = shardId;
} }

View File

@ -126,6 +126,17 @@ public final class IndexModule {
indexSettings.getScopedSettings().addSettingsUpdateConsumer(setting, consumer); indexSettings.getScopedSettings().addSettingsUpdateConsumer(setting, consumer);
} }
/**
* Adds a Setting, it's consumer and validator for this index.
*/
public <T> void addSettingsUpdateConsumer(Setting<T> setting, Consumer<T> consumer, Consumer<T> validator) {
ensureNotFrozen();
if (setting == null) {
throw new IllegalArgumentException("setting must not be null");
}
indexSettings.getScopedSettings().addSettingsUpdateConsumer(setting, consumer, validator);
}
/** /**
* Returns the index {@link Settings} for this index * Returns the index {@link Settings} for this index
*/ */

View File

@ -275,6 +275,7 @@ public final class IndexSettings {
scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval); scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval);
scopedSettings.addSettingsUpdateConsumer(MAX_REFRESH_LISTENERS_PER_SHARD, this::setMaxRefreshListeners); scopedSettings.addSettingsUpdateConsumer(MAX_REFRESH_LISTENERS_PER_SHARD, this::setMaxRefreshListeners);
scopedSettings.addSettingsUpdateConsumer(MAX_SLICES_PER_SCROLL, this::setMaxSlicesPerScroll); scopedSettings.addSettingsUpdateConsumer(MAX_SLICES_PER_SCROLL, this::setMaxSlicesPerScroll);
} }
private void setTranslogFlushThresholdSize(ByteSizeValue byteSizeValue) { private void setTranslogFlushThresholdSize(ByteSizeValue byteSizeValue) {
@ -545,5 +546,5 @@ public final class IndexSettings {
this.maxSlicesPerScroll = value; this.maxSlicesPerScroll = value;
} }
IndexScopedSettings getScopedSettings() { return scopedSettings;} public IndexScopedSettings getScopedSettings() { return scopedSettings;}
} }

View File

@ -23,14 +23,11 @@ import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory;
import org.elasticsearch.index.analysis.compound.HyphenationCompoundWordTokenFilterFactory;
import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.indices.analysis.HunspellService; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers;
import org.elasticsearch.indices.analysis.PreBuiltCharFilters; import org.elasticsearch.indices.analysis.PreBuiltCharFilters;
import org.elasticsearch.indices.analysis.PreBuiltTokenFilters; import org.elasticsearch.indices.analysis.PreBuiltTokenFilters;
@ -45,48 +42,32 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static java.util.Collections.unmodifiableMap;
/** /**
* An internal registry for tokenizer, token filter, char filter and analyzer. * An internal registry for tokenizer, token filter, char filter and analyzer.
* This class exists per node and allows to create per-index {@link AnalysisService} via {@link #build(IndexSettings)} * This class exists per node and allows to create per-index {@link AnalysisService} via {@link #build(IndexSettings)}
*/ */
public final class AnalysisRegistry implements Closeable { public final class AnalysisRegistry implements Closeable {
private final Map<String, AnalysisModule.AnalysisProvider<CharFilterFactory>> charFilters; private final PrebuiltAnalysis prebuiltAnalysis = new PrebuiltAnalysis();
private final Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilters;
private final Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> tokenizers;
private final Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider>> analyzers;
private final Map<String, Analyzer> cachedAnalyzer = new ConcurrentHashMap<>(); private final Map<String, Analyzer> cachedAnalyzer = new ConcurrentHashMap<>();
private final PrebuiltAnalysis prebuiltAnalysis;
private final HunspellService hunspellService;
private final Environment environment; private final Environment environment;
private final Map<String, AnalysisProvider<CharFilterFactory>> charFilters;
private final Map<String, AnalysisProvider<TokenFilterFactory>> tokenFilters;
private final Map<String, AnalysisProvider<TokenizerFactory>> tokenizers;
private final Map<String, AnalysisProvider<AnalyzerProvider<?>>> analyzers;
public AnalysisRegistry(HunspellService hunspellService, Environment environment) { public AnalysisRegistry(Environment environment,
this(hunspellService, environment, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); Map<String, AnalysisProvider<CharFilterFactory>> charFilters,
} Map<String, AnalysisProvider<TokenFilterFactory>> tokenFilters,
Map<String, AnalysisProvider<TokenizerFactory>> tokenizers,
public AnalysisRegistry(HunspellService hunspellService, Environment environment, Map<String, AnalysisProvider<AnalyzerProvider<?>>> analyzers) {
Map<String, AnalysisModule.AnalysisProvider<CharFilterFactory>> charFilters,
Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilters,
Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> tokenizers,
Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider>> analyzers) {
prebuiltAnalysis = new PrebuiltAnalysis();
this.hunspellService = hunspellService;
this.environment = environment; this.environment = environment;
final Map<String, AnalysisModule.AnalysisProvider<CharFilterFactory>> charFilterBuilder = new HashMap<>(charFilters); this.charFilters = unmodifiableMap(charFilters);
final Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilterBuilder = new HashMap<>(tokenFilters); this.tokenFilters = unmodifiableMap(tokenFilters);
final Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> tokenizerBuilder = new HashMap<>(tokenizers); this.tokenizers = unmodifiableMap(tokenizers);
final Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider>> analyzerBuilder= new HashMap<>(analyzers); this.analyzers = unmodifiableMap(analyzers);
registerBuiltInAnalyzer(analyzerBuilder);
registerBuiltInCharFilter(charFilterBuilder);
registerBuiltInTokenizer(tokenizerBuilder);
registerBuiltInTokenFilters(tokenFilterBuilder);
this.tokenFilters = Collections.unmodifiableMap(tokenFilterBuilder);
this.tokenizers = Collections.unmodifiableMap(tokenizerBuilder);
this.charFilters = Collections.unmodifiableMap(charFilterBuilder);
this.analyzers = Collections.unmodifiableMap(analyzerBuilder);
}
public HunspellService getHunspellService() {
return hunspellService;
} }
/** /**
@ -114,9 +95,9 @@ public final class AnalysisRegistry implements Closeable {
* Returns a registered {@link Analyzer} provider by name or <code>null</code> if the analyzer was not registered * Returns a registered {@link Analyzer} provider by name or <code>null</code> if the analyzer was not registered
*/ */
public Analyzer getAnalyzer(String analyzer) throws IOException { public Analyzer getAnalyzer(String analyzer) throws IOException {
AnalysisModule.AnalysisProvider<AnalyzerProvider> analyzerProvider = this.prebuiltAnalysis.getAnalyzerProvider(analyzer); AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> analyzerProvider = this.prebuiltAnalysis.getAnalyzerProvider(analyzer);
if (analyzerProvider == null) { if (analyzerProvider == null) {
AnalysisModule.AnalysisProvider<AnalyzerProvider> provider = analyzers.get(analyzer); AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> provider = analyzers.get(analyzer);
return provider == null ? null : cachedAnalyzer.computeIfAbsent(analyzer, (key) -> { return provider == null ? null : cachedAnalyzer.computeIfAbsent(analyzer, (key) -> {
try { try {
return provider.get(environment, key).get(); return provider.get(environment, key).get();
@ -157,7 +138,8 @@ public final class AnalysisRegistry implements Closeable {
*/ */
tokenFilters.put("synonym", requriesAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, tokenizerFactories, name, settings))); tokenFilters.put("synonym", requriesAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, tokenizerFactories, name, settings)));
final Map<String, TokenFilterFactory> tokenFilterFactories = buildMapping(false, "tokenfilter", indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.tokenFilterFactories); final Map<String, TokenFilterFactory> tokenFilterFactories = buildMapping(false, "tokenfilter", indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.tokenFilterFactories);
final Map<String, AnalyzerProvider> analyzierFactories = buildMapping(true, "analyzer", indexSettings, analyzersSettings, analyzers, prebuiltAnalysis.analyzerProviderFactories); final Map<String, AnalyzerProvider<?>> analyzierFactories = buildMapping(true, "analyzer", indexSettings, analyzersSettings,
analyzers, prebuiltAnalysis.analyzerProviderFactories);
return new AnalysisService(indexSettings, analyzierFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories); return new AnalysisService(indexSettings, analyzierFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories);
} }
@ -175,140 +157,9 @@ public final class AnalysisRegistry implements Closeable {
}; };
} }
private void registerBuiltInCharFilter(Map<String, AnalysisModule.AnalysisProvider<CharFilterFactory>> charFilters) { private <T> Map<String, T> buildMapping(boolean analyzer, String toBuild, IndexSettings settings, Map<String, Settings> settingsMap,
charFilters.put("html_strip", HtmlStripCharFilterFactory::new); Map<String, AnalysisModule.AnalysisProvider<T>> providerMap, Map<String, AnalysisModule.AnalysisProvider<T>> defaultInstance)
charFilters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceCharFilterFactory::new)); throws IOException {
charFilters.put("mapping", requriesAnalysisSettings(MappingCharFilterFactory::new));
}
private void registerBuiltInTokenizer(Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> tokenizers) {
tokenizers.put("standard", StandardTokenizerFactory::new);
tokenizers.put("uax_url_email", UAX29URLEmailTokenizerFactory::new);
tokenizers.put("path_hierarchy", PathHierarchyTokenizerFactory::new);
tokenizers.put("PathHierarchy", PathHierarchyTokenizerFactory::new);
tokenizers.put("keyword", KeywordTokenizerFactory::new);
tokenizers.put("letter", LetterTokenizerFactory::new);
tokenizers.put("lowercase", LowerCaseTokenizerFactory::new);
tokenizers.put("whitespace", WhitespaceTokenizerFactory::new);
tokenizers.put("nGram", NGramTokenizerFactory::new);
tokenizers.put("ngram", NGramTokenizerFactory::new);
tokenizers.put("edgeNGram", EdgeNGramTokenizerFactory::new);
tokenizers.put("edge_ngram", EdgeNGramTokenizerFactory::new);
tokenizers.put("pattern", PatternTokenizerFactory::new);
tokenizers.put("classic", ClassicTokenizerFactory::new);
tokenizers.put("thai", ThaiTokenizerFactory::new);
}
private void registerBuiltInTokenFilters(Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilters) {
tokenFilters.put("stop", StopTokenFilterFactory::new);
tokenFilters.put("reverse", ReverseTokenFilterFactory::new);
tokenFilters.put("asciifolding", ASCIIFoldingTokenFilterFactory::new);
tokenFilters.put("length", LengthTokenFilterFactory::new);
tokenFilters.put("lowercase", LowerCaseTokenFilterFactory::new);
tokenFilters.put("uppercase", UpperCaseTokenFilterFactory::new);
tokenFilters.put("porter_stem", PorterStemTokenFilterFactory::new);
tokenFilters.put("kstem", KStemTokenFilterFactory::new);
tokenFilters.put("standard", StandardTokenFilterFactory::new);
tokenFilters.put("nGram", NGramTokenFilterFactory::new);
tokenFilters.put("ngram", NGramTokenFilterFactory::new);
tokenFilters.put("edgeNGram", EdgeNGramTokenFilterFactory::new);
tokenFilters.put("edge_ngram", EdgeNGramTokenFilterFactory::new);
tokenFilters.put("shingle", ShingleTokenFilterFactory::new);
tokenFilters.put("unique", UniqueTokenFilterFactory::new);
tokenFilters.put("truncate", requriesAnalysisSettings(TruncateTokenFilterFactory::new));
tokenFilters.put("trim", TrimTokenFilterFactory::new);
tokenFilters.put("limit", LimitTokenCountFilterFactory::new);
tokenFilters.put("common_grams", requriesAnalysisSettings(CommonGramsTokenFilterFactory::new));
tokenFilters.put("snowball", SnowballTokenFilterFactory::new);
tokenFilters.put("stemmer", StemmerTokenFilterFactory::new);
tokenFilters.put("word_delimiter", WordDelimiterTokenFilterFactory::new);
tokenFilters.put("delimited_payload_filter", DelimitedPayloadTokenFilterFactory::new);
tokenFilters.put("elision", ElisionTokenFilterFactory::new);
tokenFilters.put("keep", requriesAnalysisSettings(KeepWordFilterFactory::new));
tokenFilters.put("keep_types", requriesAnalysisSettings(KeepTypesFilterFactory::new));
tokenFilters.put("pattern_capture", requriesAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new));
tokenFilters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new));
tokenFilters.put("dictionary_decompounder", requriesAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new));
tokenFilters.put("hyphenation_decompounder", requriesAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new));
tokenFilters.put("arabic_stem", ArabicStemTokenFilterFactory::new);
tokenFilters.put("brazilian_stem", BrazilianStemTokenFilterFactory::new);
tokenFilters.put("czech_stem", CzechStemTokenFilterFactory::new);
tokenFilters.put("dutch_stem", DutchStemTokenFilterFactory::new);
tokenFilters.put("french_stem", FrenchStemTokenFilterFactory::new);
tokenFilters.put("german_stem", GermanStemTokenFilterFactory::new);
tokenFilters.put("russian_stem", RussianStemTokenFilterFactory::new);
tokenFilters.put("keyword_marker", requriesAnalysisSettings(KeywordMarkerTokenFilterFactory::new));
tokenFilters.put("stemmer_override", requriesAnalysisSettings(StemmerOverrideTokenFilterFactory::new));
tokenFilters.put("arabic_normalization", ArabicNormalizationFilterFactory::new);
tokenFilters.put("german_normalization", GermanNormalizationFilterFactory::new);
tokenFilters.put("hindi_normalization", HindiNormalizationFilterFactory::new);
tokenFilters.put("indic_normalization", IndicNormalizationFilterFactory::new);
tokenFilters.put("sorani_normalization", SoraniNormalizationFilterFactory::new);
tokenFilters.put("persian_normalization", PersianNormalizationFilterFactory::new);
tokenFilters.put("scandinavian_normalization", ScandinavianNormalizationFilterFactory::new);
tokenFilters.put("scandinavian_folding", ScandinavianFoldingFilterFactory::new);
tokenFilters.put("serbian_normalization", SerbianNormalizationFilterFactory::new);
if (hunspellService != null) {
tokenFilters.put("hunspell", requriesAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory(indexSettings, name, settings, hunspellService)));
}
tokenFilters.put("cjk_bigram", CJKBigramFilterFactory::new);
tokenFilters.put("cjk_width", CJKWidthFilterFactory::new);
tokenFilters.put("apostrophe", ApostropheFilterFactory::new);
tokenFilters.put("classic", ClassicFilterFactory::new);
tokenFilters.put("decimal_digit", DecimalDigitFilterFactory::new);
tokenFilters.put("fingerprint", FingerprintTokenFilterFactory::new);
}
private void registerBuiltInAnalyzer(Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider>> analyzers) {
analyzers.put("default", StandardAnalyzerProvider::new);
analyzers.put("standard", StandardAnalyzerProvider::new);
analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new);
analyzers.put("simple", SimpleAnalyzerProvider::new);
analyzers.put("stop", StopAnalyzerProvider::new);
analyzers.put("whitespace", WhitespaceAnalyzerProvider::new);
analyzers.put("keyword", KeywordAnalyzerProvider::new);
analyzers.put("pattern", PatternAnalyzerProvider::new);
analyzers.put("snowball", SnowballAnalyzerProvider::new);
analyzers.put("arabic", ArabicAnalyzerProvider::new);
analyzers.put("armenian", ArmenianAnalyzerProvider::new);
analyzers.put("basque", BasqueAnalyzerProvider::new);
analyzers.put("brazilian", BrazilianAnalyzerProvider::new);
analyzers.put("bulgarian", BulgarianAnalyzerProvider::new);
analyzers.put("catalan", CatalanAnalyzerProvider::new);
analyzers.put("chinese", ChineseAnalyzerProvider::new);
analyzers.put("cjk", CjkAnalyzerProvider::new);
analyzers.put("czech", CzechAnalyzerProvider::new);
analyzers.put("danish", DanishAnalyzerProvider::new);
analyzers.put("dutch", DutchAnalyzerProvider::new);
analyzers.put("english", EnglishAnalyzerProvider::new);
analyzers.put("finnish", FinnishAnalyzerProvider::new);
analyzers.put("french", FrenchAnalyzerProvider::new);
analyzers.put("galician", GalicianAnalyzerProvider::new);
analyzers.put("german", GermanAnalyzerProvider::new);
analyzers.put("greek", GreekAnalyzerProvider::new);
analyzers.put("hindi", HindiAnalyzerProvider::new);
analyzers.put("hungarian", HungarianAnalyzerProvider::new);
analyzers.put("indonesian", IndonesianAnalyzerProvider::new);
analyzers.put("irish", IrishAnalyzerProvider::new);
analyzers.put("italian", ItalianAnalyzerProvider::new);
analyzers.put("latvian", LatvianAnalyzerProvider::new);
analyzers.put("lithuanian", LithuanianAnalyzerProvider::new);
analyzers.put("norwegian", NorwegianAnalyzerProvider::new);
analyzers.put("persian", PersianAnalyzerProvider::new);
analyzers.put("portuguese", PortugueseAnalyzerProvider::new);
analyzers.put("romanian", RomanianAnalyzerProvider::new);
analyzers.put("russian", RussianAnalyzerProvider::new);
analyzers.put("sorani", SoraniAnalyzerProvider::new);
analyzers.put("spanish", SpanishAnalyzerProvider::new);
analyzers.put("swedish", SwedishAnalyzerProvider::new);
analyzers.put("turkish", TurkishAnalyzerProvider::new);
analyzers.put("thai", ThaiAnalyzerProvider::new);
analyzers.put("fingerprint", FingerprintAnalyzerProvider::new);
}
private <T> Map<String, T> buildMapping(boolean analyzer, String toBuild, IndexSettings settings, Map<String, Settings> settingsMap, Map<String, AnalysisModule.AnalysisProvider<T>> providerMap, Map<String, AnalysisModule.AnalysisProvider<T>> defaultInstance) throws IOException {
Settings defaultSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, settings.getIndexVersionCreated()).build(); Settings defaultSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, settings.getIndexVersionCreated()).build();
Map<String, T> factories = new HashMap<>(); Map<String, T> factories = new HashMap<>();
for (Map.Entry<String, Settings> entry : settingsMap.entrySet()) { for (Map.Entry<String, Settings> entry : settingsMap.entrySet()) {
@ -383,7 +234,7 @@ public final class AnalysisRegistry implements Closeable {
private static class PrebuiltAnalysis implements Closeable { private static class PrebuiltAnalysis implements Closeable {
final Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider>> analyzerProviderFactories; final Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider<?>>> analyzerProviderFactories;
final Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> tokenizerFactories; final Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> tokenizerFactories;
final Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilterFactories; final Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilterFactories;
final Map<String, AnalysisModule.AnalysisProvider<CharFilterFactory>> charFilterFactories; final Map<String, AnalysisModule.AnalysisProvider<CharFilterFactory>> charFilterFactories;
@ -446,7 +297,7 @@ public final class AnalysisRegistry implements Closeable {
return tokenizerFactories.get(name); return tokenizerFactories.get(name);
} }
public AnalysisModule.AnalysisProvider<AnalyzerProvider> getAnalyzerProvider(String name) { public AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> getAnalyzerProvider(String name) {
return analyzerProviderFactories.get(name); return analyzerProviderFactories.get(name);
} }

View File

@ -48,7 +48,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable
private final NamedAnalyzer defaultSearchQuoteAnalyzer; private final NamedAnalyzer defaultSearchQuoteAnalyzer;
public AnalysisService(IndexSettings indexSettings, public AnalysisService(IndexSettings indexSettings,
Map<String, AnalyzerProvider> analyzerProviders, Map<String, AnalyzerProvider<?>> analyzerProviders,
Map<String, TokenizerFactory> tokenizerFactoryFactories, Map<String, TokenizerFactory> tokenizerFactoryFactories,
Map<String, CharFilterFactory> charFilterFactoryFactories, Map<String, CharFilterFactory> charFilterFactoryFactories,
Map<String, TokenFilterFactory> tokenFilterFactoryFactories) { Map<String, TokenFilterFactory> tokenFilterFactoryFactories) {
@ -69,8 +69,8 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable
} }
Map<String, NamedAnalyzer> analyzers = new HashMap<>(); Map<String, NamedAnalyzer> analyzers = new HashMap<>();
for (Map.Entry<String, AnalyzerProvider> entry : analyzerProviders.entrySet()) { for (Map.Entry<String, AnalyzerProvider<?>> entry : analyzerProviders.entrySet()) {
AnalyzerProvider analyzerFactory = entry.getValue(); AnalyzerProvider<?> analyzerFactory = entry.getValue();
String name = entry.getKey(); String name = entry.getKey();
/* /*
* Lucene defaults positionIncrementGap to 0 in all analyzers but * Lucene defaults positionIncrementGap to 0 in all analyzers but

View File

@ -32,7 +32,7 @@ import java.io.IOException;
/** /**
* *
*/ */
public class PreBuiltAnalyzerProviderFactory implements AnalysisModule.AnalysisProvider<AnalyzerProvider> { public class PreBuiltAnalyzerProviderFactory implements AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> {
private final PreBuiltAnalyzerProvider analyzerProvider; private final PreBuiltAnalyzerProvider analyzerProvider;
@ -40,7 +40,7 @@ public class PreBuiltAnalyzerProviderFactory implements AnalysisModule.AnalysisP
analyzerProvider = new PreBuiltAnalyzerProvider(name, scope, analyzer); analyzerProvider = new PreBuiltAnalyzerProvider(name, scope, analyzer);
} }
public AnalyzerProvider create(String name, Settings settings) { public AnalyzerProvider<?> create(String name, Settings settings) {
Version indexVersion = Version.indexCreated(settings); Version indexVersion = Version.indexCreated(settings);
if (!Version.CURRENT.equals(indexVersion)) { if (!Version.CURRENT.equals(indexVersion)) {
PreBuiltAnalyzers preBuiltAnalyzers = PreBuiltAnalyzers.getOrDefault(name, null); PreBuiltAnalyzers preBuiltAnalyzers = PreBuiltAnalyzers.getOrDefault(name, null);
@ -54,7 +54,8 @@ public class PreBuiltAnalyzerProviderFactory implements AnalysisModule.AnalysisP
} }
@Override @Override
public AnalyzerProvider get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { public AnalyzerProvider<?> get(IndexSettings indexSettings, Environment environment, String name, Settings settings)
throws IOException {
return create(name, settings); return create(name, settings);
} }

View File

@ -378,6 +378,10 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
if (!response.isExists()) { if (!response.isExists()) {
throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] not found"); throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] not found");
} }
if (response.isSourceEmpty()) {
throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() +
"] source disabled");
}
String[] pathElements = path.split("\\."); String[] pathElements = path.split("\\.");
int currentPathSlot = 0; int currentPathSlot = 0;

View File

@ -71,14 +71,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
PARSER.declareBoolean(InnerHitBuilder::setExplain, SearchSourceBuilder.EXPLAIN_FIELD); PARSER.declareBoolean(InnerHitBuilder::setExplain, SearchSourceBuilder.EXPLAIN_FIELD);
PARSER.declareBoolean(InnerHitBuilder::setVersion, SearchSourceBuilder.VERSION_FIELD); PARSER.declareBoolean(InnerHitBuilder::setVersion, SearchSourceBuilder.VERSION_FIELD);
PARSER.declareBoolean(InnerHitBuilder::setTrackScores, SearchSourceBuilder.TRACK_SCORES_FIELD); PARSER.declareBoolean(InnerHitBuilder::setTrackScores, SearchSourceBuilder.TRACK_SCORES_FIELD);
PARSER.declareStringArray(InnerHitBuilder::setStoredFieldNames, SearchSourceBuilder.STORED_FIELDS_FIELD); PARSER.declareStringArray(InnerHitBuilder::setFieldNames, SearchSourceBuilder.FIELDS_FIELD);
PARSER.declareField((p, i, c) -> { PARSER.declareStringArray(InnerHitBuilder::setFieldDataFields, SearchSourceBuilder.FIELDDATA_FIELDS_FIELD);
throw new ParsingException(p.getTokenLocation(), "The field [" +
SearchSourceBuilder.FIELDS_FIELD + "] is not longer supported, please use [" +
SearchSourceBuilder.STORED_FIELDS_FIELD + "] to retrieve stored fields or _source filtering " +
"if the field is not stored");
}, SearchSourceBuilder.FIELDS_FIELD, ObjectParser.ValueType.STRING_ARRAY);
PARSER.declareStringArray(InnerHitBuilder::setDocValueFields, SearchSourceBuilder.DOCVALUE_FIELDS_FIELD);
PARSER.declareField((p, i, c) -> { PARSER.declareField((p, i, c) -> {
try { try {
Set<ScriptField> scriptFields = new HashSet<>(); Set<ScriptField> scriptFields = new HashSet<>();
@ -137,10 +131,10 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
private boolean version; private boolean version;
private boolean trackScores; private boolean trackScores;
private List<String> storedFieldNames; private List<String> fieldNames;
private QueryBuilder query = DEFAULT_INNER_HIT_QUERY; private QueryBuilder query = DEFAULT_INNER_HIT_QUERY;
private List<SortBuilder<?>> sorts; private List<SortBuilder<?>> sorts;
private List<String> docValueFields; private List<String> fieldDataFields;
private Set<ScriptField> scriptFields; private Set<ScriptField> scriptFields;
private HighlightBuilder highlightBuilder; private HighlightBuilder highlightBuilder;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
@ -149,6 +143,46 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
public InnerHitBuilder() { public InnerHitBuilder() {
} }
/**
* Read from a stream.
*/
public InnerHitBuilder(StreamInput in) throws IOException {
name = in.readOptionalString();
nestedPath = in.readOptionalString();
parentChildType = in.readOptionalString();
from = in.readVInt();
size = in.readVInt();
explain = in.readBoolean();
version = in.readBoolean();
trackScores = in.readBoolean();
fieldNames = (List<String>) in.readGenericValue();
fieldDataFields = (List<String>) in.readGenericValue();
if (in.readBoolean()) {
int size = in.readVInt();
scriptFields = new HashSet<>(size);
for (int i = 0; i < size; i++) {
scriptFields.add(new ScriptField(in));
}
}
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new);
if (in.readBoolean()) {
int size = in.readVInt();
sorts = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
sorts.add(in.readNamedWriteable(SortBuilder.class));
}
}
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
query = in.readNamedWriteable(QueryBuilder.class);
if (in.readBoolean()) {
int size = in.readVInt();
childInnerHits = new HashMap<>(size);
for (int i = 0; i < size; i++) {
childInnerHits.put(in.readString(), new InnerHitBuilder(in));
}
}
}
private InnerHitBuilder(InnerHitBuilder other) { private InnerHitBuilder(InnerHitBuilder other) {
name = other.name; name = other.name;
from = other.from; from = other.from;
@ -156,11 +190,11 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
explain = other.explain; explain = other.explain;
version = other.version; version = other.version;
trackScores = other.trackScores; trackScores = other.trackScores;
if (other.storedFieldNames != null) { if (other.fieldNames != null) {
storedFieldNames = new ArrayList<>(other.storedFieldNames); fieldNames = new ArrayList<>(other.fieldNames);
} }
if (other.docValueFields != null) { if (other.fieldDataFields != null) {
docValueFields = new ArrayList<>(other.docValueFields); fieldDataFields = new ArrayList<>(other.fieldDataFields);
} }
if (other.scriptFields != null) { if (other.scriptFields != null) {
scriptFields = new HashSet<>(other.scriptFields); scriptFields = new HashSet<>(other.scriptFields);
@ -198,46 +232,6 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
} }
} }
/**
* Read from a stream.
*/
public InnerHitBuilder(StreamInput in) throws IOException {
name = in.readOptionalString();
nestedPath = in.readOptionalString();
parentChildType = in.readOptionalString();
from = in.readVInt();
size = in.readVInt();
explain = in.readBoolean();
version = in.readBoolean();
trackScores = in.readBoolean();
storedFieldNames = (List<String>) in.readGenericValue();
docValueFields = (List<String>) in.readGenericValue();
if (in.readBoolean()) {
int size = in.readVInt();
scriptFields = new HashSet<>(size);
for (int i = 0; i < size; i++) {
scriptFields.add(new ScriptField(in));
}
}
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new);
if (in.readBoolean()) {
int size = in.readVInt();
sorts = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
sorts.add(in.readNamedWriteable(SortBuilder.class));
}
}
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
query = in.readNamedWriteable(QueryBuilder.class);
if (in.readBoolean()) {
int size = in.readVInt();
childInnerHits = new HashMap<>(size);
for (int i = 0; i < size; i++) {
childInnerHits.put(in.readString(), new InnerHitBuilder(in));
}
}
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(name); out.writeOptionalString(name);
@ -248,8 +242,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
out.writeBoolean(explain); out.writeBoolean(explain);
out.writeBoolean(version); out.writeBoolean(version);
out.writeBoolean(trackScores); out.writeBoolean(trackScores);
out.writeGenericValue(storedFieldNames); out.writeGenericValue(fieldNames);
out.writeGenericValue(docValueFields); out.writeGenericValue(fieldDataFields);
boolean hasScriptFields = scriptFields != null; boolean hasScriptFields = scriptFields != null;
out.writeBoolean(hasScriptFields); out.writeBoolean(hasScriptFields);
if (hasScriptFields) { if (hasScriptFields) {
@ -340,103 +334,29 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
return this; return this;
} }
/**
* Gets the stored fields to load and return.
*
* @deprecated Use {@link InnerHitBuilder#getStoredFieldNames()} instead.
*/
@Deprecated
public List<String> getFieldNames() { public List<String> getFieldNames() {
return storedFieldNames; return fieldNames;
} }
/**
* Sets the stored fields to load and return. If none
* are specified, the source of the document will be returned.
*
* @deprecated Use {@link InnerHitBuilder#setStoredFieldNames(List)} instead.
*/
@Deprecated
public InnerHitBuilder setFieldNames(List<String> fieldNames) { public InnerHitBuilder setFieldNames(List<String> fieldNames) {
this.storedFieldNames = fieldNames; this.fieldNames = fieldNames;
return this; return this;
} }
/**
* Gets the stored fields to load and return.
*/
public List<String> getStoredFieldNames() {
return storedFieldNames;
}
/**
* Sets the stored fields to load and return. If none
* are specified, the source of the document will be returned.
*/
public InnerHitBuilder setStoredFieldNames(List<String> fieldNames) {
this.storedFieldNames = fieldNames;
return this;
}
/**
* Gets the docvalue fields.
*
* @deprecated Use {@link InnerHitBuilder#getDocValueFields()} instead.
*/
@Deprecated
public List<String> getFieldDataFields() { public List<String> getFieldDataFields() {
return docValueFields; return fieldDataFields;
} }
/**
* Sets the stored fields to load from the docvalue and return.
*
* @deprecated Use {@link InnerHitBuilder#setDocValueFields(List)} instead.
*/
@Deprecated
public InnerHitBuilder setFieldDataFields(List<String> fieldDataFields) { public InnerHitBuilder setFieldDataFields(List<String> fieldDataFields) {
this.docValueFields = fieldDataFields; this.fieldDataFields = fieldDataFields;
return this; return this;
} }
/**
* Adds a field to load from the docvalue and return.
*
* @deprecated Use {@link InnerHitBuilder#addDocValueField(String)} instead.
*/
@Deprecated
public InnerHitBuilder addFieldDataField(String field) { public InnerHitBuilder addFieldDataField(String field) {
if (docValueFields == null) { if (fieldDataFields == null) {
docValueFields = new ArrayList<>(); fieldDataFields = new ArrayList<>();
} }
docValueFields.add(field); fieldDataFields.add(field);
return this;
}
/**
* Gets the docvalue fields.
*/
public List<String> getDocValueFields() {
return docValueFields;
}
/**
* Sets the stored fields to load from the docvalue and return.
*/
public InnerHitBuilder setDocValueFields(List<String> docValueFields) {
this.docValueFields = docValueFields;
return this;
}
/**
* Adds a field to load from the docvalue and return.
*/
public InnerHitBuilder addDocValueField(String field) {
if (docValueFields == null) {
docValueFields = new ArrayList<>();
}
docValueFields.add(field);
return this; return this;
} }
@ -564,19 +484,19 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
innerHitsContext.explain(explain); innerHitsContext.explain(explain);
innerHitsContext.version(version); innerHitsContext.version(version);
innerHitsContext.trackScores(trackScores); innerHitsContext.trackScores(trackScores);
if (storedFieldNames != null) { if (fieldNames != null) {
if (storedFieldNames.isEmpty()) { if (fieldNames.isEmpty()) {
innerHitsContext.emptyFieldNames(); innerHitsContext.emptyFieldNames();
} else { } else {
for (String fieldName : storedFieldNames) { for (String fieldName : fieldNames) {
innerHitsContext.fieldNames().add(fieldName); innerHitsContext.fieldNames().add(fieldName);
} }
} }
} }
if (docValueFields != null) { if (fieldDataFields != null) {
FieldDataFieldsContext fieldDataFieldsContext = innerHitsContext FieldDataFieldsContext fieldDataFieldsContext = innerHitsContext
.getFetchSubPhaseContext(FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY); .getFetchSubPhaseContext(FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY);
for (String field : docValueFields) { for (String field : fieldDataFields) {
fieldDataFieldsContext.add(new FieldDataFieldsContext.FieldDataField(field)); fieldDataFieldsContext.add(new FieldDataFieldsContext.FieldDataField(field));
} }
fieldDataFieldsContext.setHitExecutionNeeded(true); fieldDataFieldsContext.setHitExecutionNeeded(true);
@ -633,20 +553,20 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
if (fetchSourceContext != null) { if (fetchSourceContext != null) {
builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), fetchSourceContext, params); builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), fetchSourceContext, params);
} }
if (storedFieldNames != null) { if (fieldNames != null) {
if (storedFieldNames.size() == 1) { if (fieldNames.size() == 1) {
builder.field(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), storedFieldNames.get(0)); builder.field(SearchSourceBuilder.FIELDS_FIELD.getPreferredName(), fieldNames.get(0));
} else { } else {
builder.startArray(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName()); builder.startArray(SearchSourceBuilder.FIELDS_FIELD.getPreferredName());
for (String fieldName : storedFieldNames) { for (String fieldName : fieldNames) {
builder.value(fieldName); builder.value(fieldName);
} }
builder.endArray(); builder.endArray();
} }
} }
if (docValueFields != null) { if (fieldDataFields != null) {
builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName()); builder.startArray(SearchSourceBuilder.FIELDDATA_FIELDS_FIELD.getPreferredName());
for (String fieldDataField : docValueFields) { for (String fieldDataField : fieldDataFields) {
builder.value(fieldDataField); builder.value(fieldDataField);
} }
builder.endArray(); builder.endArray();
@ -693,8 +613,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
Objects.equals(explain, that.explain) && Objects.equals(explain, that.explain) &&
Objects.equals(version, that.version) && Objects.equals(version, that.version) &&
Objects.equals(trackScores, that.trackScores) && Objects.equals(trackScores, that.trackScores) &&
Objects.equals(storedFieldNames, that.storedFieldNames) && Objects.equals(fieldNames, that.fieldNames) &&
Objects.equals(docValueFields, that.docValueFields) && Objects.equals(fieldDataFields, that.fieldDataFields) &&
Objects.equals(scriptFields, that.scriptFields) && Objects.equals(scriptFields, that.scriptFields) &&
Objects.equals(fetchSourceContext, that.fetchSourceContext) && Objects.equals(fetchSourceContext, that.fetchSourceContext) &&
Objects.equals(sorts, that.sorts) && Objects.equals(sorts, that.sorts) &&
@ -705,8 +625,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(name, nestedPath, parentChildType, from, size, explain, version, trackScores, storedFieldNames, return Objects.hash(name, nestedPath, parentChildType, from, size, explain, version, trackScores, fieldNames,
docValueFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, childInnerHits); fieldDataFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, childInnerHits);
} }
public static InnerHitBuilder fromXContent(QueryParseContext context) throws IOException { public static InnerHitBuilder fromXContent(QueryParseContext context) throws IOException {

View File

@ -328,7 +328,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id()) GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id())
.preference("_local").routing(termsLookup.routing()); .preference("_local").routing(termsLookup.routing());
final GetResponse getResponse = client.get(getRequest).actionGet(); final GetResponse getResponse = client.get(getRequest).actionGet();
if (getResponse.isExists()) { if (getResponse.isSourceEmpty() == false) { // extract terms only if the doc source exists
List<Object> extractedValues = XContentMapValues.extractRawValues(termsLookup.path(), getResponse.getSourceAsMap()); List<Object> extractedValues = XContentMapValues.extractRawValues(termsLookup.path(), getResponse.getSourceAsMap());
terms.addAll(extractedValues); terms.addAll(extractedValues);
} }

View File

@ -425,12 +425,13 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService>
// this will also fail if some plugin fails etc. which is nice since we can verify that early // this will also fail if some plugin fails etc. which is nice since we can verify that early
final IndexService service = createIndexService("metadata verification", nodeServicesProvider, final IndexService service = createIndexService("metadata verification", nodeServicesProvider,
metaData, indicesQueryCache, indicesFieldDataCache, Collections.emptyList()); metaData, indicesQueryCache, indicesFieldDataCache, Collections.emptyList());
closeables.add(() -> service.close("metadata verification", false));
for (ObjectCursor<MappingMetaData> typeMapping : metaData.getMappings().values()) { for (ObjectCursor<MappingMetaData> typeMapping : metaData.getMappings().values()) {
// don't apply the default mapping, it has been applied when the mapping was created // don't apply the default mapping, it has been applied when the mapping was created
service.mapperService().merge(typeMapping.value.type(), typeMapping.value.source(), service.mapperService().merge(typeMapping.value.type(), typeMapping.value.source(),
MapperService.MergeReason.MAPPING_RECOVERY, true); MapperService.MergeReason.MAPPING_RECOVERY, true);
} }
closeables.add(() -> service.close("metadata verification", false)); service.getIndexSettings().getScopedSettings().validateUpdate(metaData.getSettings());
} finally { } finally {
IOUtils.close(closeables); IOUtils.close(closeables);
} }

View File

@ -19,49 +19,141 @@
package org.elasticsearch.indices.analysis; package org.elasticsearch.indices.analysis;
import org.apache.lucene.analysis.hunspell.Dictionary;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.ASCIIFoldingTokenFilterFactory;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.AnalyzerProvider;
import org.elasticsearch.index.analysis.ApostropheFilterFactory;
import org.elasticsearch.index.analysis.ArabicAnalyzerProvider;
import org.elasticsearch.index.analysis.ArabicNormalizationFilterFactory;
import org.elasticsearch.index.analysis.ArabicStemTokenFilterFactory;
import org.elasticsearch.index.analysis.ArmenianAnalyzerProvider;
import org.elasticsearch.index.analysis.BasqueAnalyzerProvider;
import org.elasticsearch.index.analysis.BrazilianAnalyzerProvider;
import org.elasticsearch.index.analysis.BrazilianStemTokenFilterFactory;
import org.elasticsearch.index.analysis.BulgarianAnalyzerProvider;
import org.elasticsearch.index.analysis.CJKBigramFilterFactory;
import org.elasticsearch.index.analysis.CJKWidthFilterFactory;
import org.elasticsearch.index.analysis.CatalanAnalyzerProvider;
import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.ChineseAnalyzerProvider;
import org.elasticsearch.index.analysis.CjkAnalyzerProvider;
import org.elasticsearch.index.analysis.ClassicFilterFactory;
import org.elasticsearch.index.analysis.ClassicTokenizerFactory;
import org.elasticsearch.index.analysis.CommonGramsTokenFilterFactory;
import org.elasticsearch.index.analysis.CzechAnalyzerProvider;
import org.elasticsearch.index.analysis.CzechStemTokenFilterFactory;
import org.elasticsearch.index.analysis.DanishAnalyzerProvider;
import org.elasticsearch.index.analysis.DecimalDigitFilterFactory;
import org.elasticsearch.index.analysis.DelimitedPayloadTokenFilterFactory;
import org.elasticsearch.index.analysis.DutchAnalyzerProvider;
import org.elasticsearch.index.analysis.DutchStemTokenFilterFactory;
import org.elasticsearch.index.analysis.EdgeNGramTokenFilterFactory;
import org.elasticsearch.index.analysis.EdgeNGramTokenizerFactory;
import org.elasticsearch.index.analysis.ElisionTokenFilterFactory;
import org.elasticsearch.index.analysis.EnglishAnalyzerProvider;
import org.elasticsearch.index.analysis.FingerprintAnalyzerProvider;
import org.elasticsearch.index.analysis.FingerprintTokenFilterFactory;
import org.elasticsearch.index.analysis.FinnishAnalyzerProvider;
import org.elasticsearch.index.analysis.FrenchAnalyzerProvider;
import org.elasticsearch.index.analysis.FrenchStemTokenFilterFactory;
import org.elasticsearch.index.analysis.GalicianAnalyzerProvider;
import org.elasticsearch.index.analysis.GermanAnalyzerProvider;
import org.elasticsearch.index.analysis.GermanNormalizationFilterFactory;
import org.elasticsearch.index.analysis.GermanStemTokenFilterFactory;
import org.elasticsearch.index.analysis.GreekAnalyzerProvider;
import org.elasticsearch.index.analysis.HindiAnalyzerProvider;
import org.elasticsearch.index.analysis.HindiNormalizationFilterFactory;
import org.elasticsearch.index.analysis.HtmlStripCharFilterFactory;
import org.elasticsearch.index.analysis.HungarianAnalyzerProvider;
import org.elasticsearch.index.analysis.HunspellTokenFilterFactory;
import org.elasticsearch.index.analysis.IndicNormalizationFilterFactory;
import org.elasticsearch.index.analysis.IndonesianAnalyzerProvider;
import org.elasticsearch.index.analysis.IrishAnalyzerProvider;
import org.elasticsearch.index.analysis.ItalianAnalyzerProvider;
import org.elasticsearch.index.analysis.KStemTokenFilterFactory;
import org.elasticsearch.index.analysis.KeepTypesFilterFactory;
import org.elasticsearch.index.analysis.KeepWordFilterFactory;
import org.elasticsearch.index.analysis.KeywordAnalyzerProvider;
import org.elasticsearch.index.analysis.KeywordMarkerTokenFilterFactory;
import org.elasticsearch.index.analysis.KeywordTokenizerFactory;
import org.elasticsearch.index.analysis.LatvianAnalyzerProvider;
import org.elasticsearch.index.analysis.LengthTokenFilterFactory;
import org.elasticsearch.index.analysis.LetterTokenizerFactory;
import org.elasticsearch.index.analysis.LimitTokenCountFilterFactory;
import org.elasticsearch.index.analysis.LithuanianAnalyzerProvider;
import org.elasticsearch.index.analysis.LowerCaseTokenFilterFactory;
import org.elasticsearch.index.analysis.LowerCaseTokenizerFactory;
import org.elasticsearch.index.analysis.MappingCharFilterFactory;
import org.elasticsearch.index.analysis.NGramTokenFilterFactory;
import org.elasticsearch.index.analysis.NGramTokenizerFactory;
import org.elasticsearch.index.analysis.NorwegianAnalyzerProvider;
import org.elasticsearch.index.analysis.PathHierarchyTokenizerFactory;
import org.elasticsearch.index.analysis.PatternAnalyzerProvider;
import org.elasticsearch.index.analysis.PatternCaptureGroupTokenFilterFactory;
import org.elasticsearch.index.analysis.PatternReplaceCharFilterFactory;
import org.elasticsearch.index.analysis.PatternReplaceTokenFilterFactory;
import org.elasticsearch.index.analysis.PatternTokenizerFactory;
import org.elasticsearch.index.analysis.PersianAnalyzerProvider;
import org.elasticsearch.index.analysis.PersianNormalizationFilterFactory;
import org.elasticsearch.index.analysis.PorterStemTokenFilterFactory;
import org.elasticsearch.index.analysis.PortugueseAnalyzerProvider;
import org.elasticsearch.index.analysis.ReverseTokenFilterFactory;
import org.elasticsearch.index.analysis.RomanianAnalyzerProvider;
import org.elasticsearch.index.analysis.RussianAnalyzerProvider;
import org.elasticsearch.index.analysis.RussianStemTokenFilterFactory;
import org.elasticsearch.index.analysis.ScandinavianFoldingFilterFactory;
import org.elasticsearch.index.analysis.ScandinavianNormalizationFilterFactory;
import org.elasticsearch.index.analysis.SerbianNormalizationFilterFactory;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.analysis.SimpleAnalyzerProvider;
import org.elasticsearch.index.analysis.SnowballAnalyzerProvider;
import org.elasticsearch.index.analysis.SnowballTokenFilterFactory;
import org.elasticsearch.index.analysis.SoraniAnalyzerProvider;
import org.elasticsearch.index.analysis.SoraniNormalizationFilterFactory;
import org.elasticsearch.index.analysis.SpanishAnalyzerProvider;
import org.elasticsearch.index.analysis.StandardAnalyzerProvider;
import org.elasticsearch.index.analysis.StandardHtmlStripAnalyzerProvider;
import org.elasticsearch.index.analysis.StandardTokenFilterFactory;
import org.elasticsearch.index.analysis.StandardTokenizerFactory;
import org.elasticsearch.index.analysis.StemmerOverrideTokenFilterFactory;
import org.elasticsearch.index.analysis.StemmerTokenFilterFactory;
import org.elasticsearch.index.analysis.StopAnalyzerProvider;
import org.elasticsearch.index.analysis.StopTokenFilterFactory;
import org.elasticsearch.index.analysis.SwedishAnalyzerProvider;
import org.elasticsearch.index.analysis.ThaiAnalyzerProvider;
import org.elasticsearch.index.analysis.ThaiTokenizerFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.index.analysis.TrimTokenFilterFactory;
import org.elasticsearch.index.analysis.TruncateTokenFilterFactory;
import org.elasticsearch.index.analysis.TurkishAnalyzerProvider;
import org.elasticsearch.index.analysis.UAX29URLEmailTokenizerFactory;
import org.elasticsearch.index.analysis.UniqueTokenFilterFactory;
import org.elasticsearch.index.analysis.UpperCaseTokenFilterFactory;
import org.elasticsearch.index.analysis.WhitespaceAnalyzerProvider;
import org.elasticsearch.index.analysis.WhitespaceTokenizerFactory;
import org.elasticsearch.index.analysis.WordDelimiterTokenFilterFactory;
import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory;
import org.elasticsearch.index.analysis.compound.HyphenationCompoundWordTokenFilterFactory;
import org.elasticsearch.plugins.AnalysisPlugin;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Function;
import static java.util.Objects.requireNonNull;
/** /**
* The AnalysisModule is the main extension point for node and index level analysis components. The lucene classes * Sets up {@link AnalysisRegistry}.
* {@link org.apache.lucene.analysis.Analyzer}, {@link org.apache.lucene.analysis.TokenFilter}, {@link org.apache.lucene.analysis.Tokenizer}
* and {@link org.apache.lucene.analysis.CharFilter} can be extended in plugins and registered on node startup when the analysis module
* gets loaded. Since elasticsearch needs to create multiple instances for different configurations dedicated factories need to be provided for
* each of the components:
* <ul>
* <li> {@link org.apache.lucene.analysis.Analyzer} can be exposed via {@link AnalyzerProvider} and registered on {@link #registerAnalyzer(String, AnalysisProvider)}</li>
* <li> {@link org.apache.lucene.analysis.TokenFilter} can be exposed via {@link TokenFilterFactory} and registered on {@link #registerTokenFilter(String, AnalysisProvider)}</li>
* <li> {@link org.apache.lucene.analysis.Tokenizer} can be exposed via {@link TokenizerFactory} and registered on {@link #registerTokenizer(String, AnalysisProvider)}</li>
* <li> {@link org.apache.lucene.analysis.CharFilter} can be exposed via {@link CharFilterFactory} and registered on {@link #registerCharFilter(String, AnalysisProvider)}</li>
* </ul>
*
* The {@link org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider} is only a functional interface that allows to register factory constructors directly like the plugin example below:
* <pre>
* public class MyAnalysisPlugin extends Plugin {
* public void onModule(AnalysisModule module) {
* module.registerAnalyzer("my-analyzer-name", MyAnalyzer::new);
* }
* }
* </pre>
*/ */
public final class AnalysisModule extends AbstractModule { public final class AnalysisModule {
static { static {
Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
@ -71,108 +163,195 @@ public final class AnalysisModule extends AbstractModule {
NA_INDEX_SETTINGS = new IndexSettings(metaData, Settings.EMPTY); NA_INDEX_SETTINGS = new IndexSettings(metaData, Settings.EMPTY);
} }
private static final IndexSettings NA_INDEX_SETTINGS; private static final IndexSettings NA_INDEX_SETTINGS;
private final Environment environment;
private final Map<String, AnalysisProvider<CharFilterFactory>> charFilters = new HashMap<>();
private final Map<String, AnalysisProvider<TokenFilterFactory>> tokenFilters = new HashMap<>();
private final Map<String, AnalysisProvider<TokenizerFactory>> tokenizers = new HashMap<>();
private final Map<String, AnalysisProvider<AnalyzerProvider>> analyzers = new HashMap<>();
private final Map<String, org.apache.lucene.analysis.hunspell.Dictionary> knownDictionaries = new HashMap<>();
/** private final HunspellService hunspellService;
* Creates a new AnalysisModule private final AnalysisRegistry analysisRegistry;
*/
public AnalysisModule(Environment environment) { public AnalysisModule(Environment environment, List<AnalysisPlugin> plugins) throws IOException {
this.environment = environment; NamedRegistry<AnalysisProvider<CharFilterFactory>> charFilters = setupCharFilters(plugins);
NamedRegistry<org.apache.lucene.analysis.hunspell.Dictionary> hunspellDictionaries = setupHunspellDictionaries(plugins);
hunspellService = new HunspellService(environment.settings(), environment, hunspellDictionaries.registry);
NamedRegistry<AnalysisProvider<TokenFilterFactory>> tokenFilters = setupTokenFilters(plugins, hunspellService);
NamedRegistry<AnalysisProvider<TokenizerFactory>> tokenizers = setupTokenizers(plugins);
NamedRegistry<AnalysisProvider<AnalyzerProvider<?>>> analyzers = setupAnalyzers(plugins);
analysisRegistry = new AnalysisRegistry(environment, charFilters.registry, tokenFilters.registry,
tokenizers.registry, analyzers.registry);
}
HunspellService getHunspellService() {
return hunspellService;
}
public AnalysisRegistry getAnalysisRegistry() {
return analysisRegistry;
}
private NamedRegistry<AnalysisProvider<CharFilterFactory>> setupCharFilters(List<AnalysisPlugin> plugins) {
NamedRegistry<AnalysisProvider<CharFilterFactory>> charFilters = new NamedRegistry<>("char_filter");
charFilters.register("html_strip", HtmlStripCharFilterFactory::new);
charFilters.register("pattern_replace", requriesAnalysisSettings(PatternReplaceCharFilterFactory::new));
charFilters.register("mapping", requriesAnalysisSettings(MappingCharFilterFactory::new));
charFilters.registerPlugins(plugins, AnalysisPlugin::getCharFilters);
return charFilters;
}
public NamedRegistry<org.apache.lucene.analysis.hunspell.Dictionary> setupHunspellDictionaries(List<AnalysisPlugin> plugins) {
NamedRegistry<org.apache.lucene.analysis.hunspell.Dictionary> hunspellDictionaries = new NamedRegistry<>("dictionary");
hunspellDictionaries.registerPlugins(plugins, AnalysisPlugin::getHunspellDictionaries);
return hunspellDictionaries;
}
private NamedRegistry<AnalysisProvider<TokenFilterFactory>> setupTokenFilters(List<AnalysisPlugin> plugins,
HunspellService hunspellService) {
NamedRegistry<AnalysisProvider<TokenFilterFactory>> tokenFilters = new NamedRegistry<>("token_filter");
tokenFilters.register("stop", StopTokenFilterFactory::new);
tokenFilters.register("reverse", ReverseTokenFilterFactory::new);
tokenFilters.register("asciifolding", ASCIIFoldingTokenFilterFactory::new);
tokenFilters.register("length", LengthTokenFilterFactory::new);
tokenFilters.register("lowercase", LowerCaseTokenFilterFactory::new);
tokenFilters.register("uppercase", UpperCaseTokenFilterFactory::new);
tokenFilters.register("porter_stem", PorterStemTokenFilterFactory::new);
tokenFilters.register("kstem", KStemTokenFilterFactory::new);
tokenFilters.register("standard", StandardTokenFilterFactory::new);
tokenFilters.register("nGram", NGramTokenFilterFactory::new);
tokenFilters.register("ngram", NGramTokenFilterFactory::new);
tokenFilters.register("edgeNGram", EdgeNGramTokenFilterFactory::new);
tokenFilters.register("edge_ngram", EdgeNGramTokenFilterFactory::new);
tokenFilters.register("shingle", ShingleTokenFilterFactory::new);
tokenFilters.register("unique", UniqueTokenFilterFactory::new);
tokenFilters.register("truncate", requriesAnalysisSettings(TruncateTokenFilterFactory::new));
tokenFilters.register("trim", TrimTokenFilterFactory::new);
tokenFilters.register("limit", LimitTokenCountFilterFactory::new);
tokenFilters.register("common_grams", requriesAnalysisSettings(CommonGramsTokenFilterFactory::new));
tokenFilters.register("snowball", SnowballTokenFilterFactory::new);
tokenFilters.register("stemmer", StemmerTokenFilterFactory::new);
tokenFilters.register("word_delimiter", WordDelimiterTokenFilterFactory::new);
tokenFilters.register("delimited_payload_filter", DelimitedPayloadTokenFilterFactory::new);
tokenFilters.register("elision", ElisionTokenFilterFactory::new);
tokenFilters.register("keep", requriesAnalysisSettings(KeepWordFilterFactory::new));
tokenFilters.register("keep_types", requriesAnalysisSettings(KeepTypesFilterFactory::new));
tokenFilters.register("pattern_capture", requriesAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new));
tokenFilters.register("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new));
tokenFilters.register("dictionary_decompounder", requriesAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new));
tokenFilters.register("hyphenation_decompounder", requriesAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new));
tokenFilters.register("arabic_stem", ArabicStemTokenFilterFactory::new);
tokenFilters.register("brazilian_stem", BrazilianStemTokenFilterFactory::new);
tokenFilters.register("czech_stem", CzechStemTokenFilterFactory::new);
tokenFilters.register("dutch_stem", DutchStemTokenFilterFactory::new);
tokenFilters.register("french_stem", FrenchStemTokenFilterFactory::new);
tokenFilters.register("german_stem", GermanStemTokenFilterFactory::new);
tokenFilters.register("russian_stem", RussianStemTokenFilterFactory::new);
tokenFilters.register("keyword_marker", requriesAnalysisSettings(KeywordMarkerTokenFilterFactory::new));
tokenFilters.register("stemmer_override", requriesAnalysisSettings(StemmerOverrideTokenFilterFactory::new));
tokenFilters.register("arabic_normalization", ArabicNormalizationFilterFactory::new);
tokenFilters.register("german_normalization", GermanNormalizationFilterFactory::new);
tokenFilters.register("hindi_normalization", HindiNormalizationFilterFactory::new);
tokenFilters.register("indic_normalization", IndicNormalizationFilterFactory::new);
tokenFilters.register("sorani_normalization", SoraniNormalizationFilterFactory::new);
tokenFilters.register("persian_normalization", PersianNormalizationFilterFactory::new);
tokenFilters.register("scandinavian_normalization", ScandinavianNormalizationFilterFactory::new);
tokenFilters.register("scandinavian_folding", ScandinavianFoldingFilterFactory::new);
tokenFilters.register("serbian_normalization", SerbianNormalizationFilterFactory::new);
tokenFilters.register("hunspell", requriesAnalysisSettings(
(indexSettings, env, name, settings) -> new HunspellTokenFilterFactory(indexSettings, name, settings, hunspellService)));
tokenFilters.register("cjk_bigram", CJKBigramFilterFactory::new);
tokenFilters.register("cjk_width", CJKWidthFilterFactory::new);
tokenFilters.register("apostrophe", ApostropheFilterFactory::new);
tokenFilters.register("classic", ClassicFilterFactory::new);
tokenFilters.register("decimal_digit", DecimalDigitFilterFactory::new);
tokenFilters.register("fingerprint", FingerprintTokenFilterFactory::new);
tokenFilters.registerPlugins(plugins, AnalysisPlugin::getTokenFilters);
return tokenFilters;
}
private NamedRegistry<AnalysisProvider<TokenizerFactory>> setupTokenizers(List<AnalysisPlugin> plugins) {
NamedRegistry<AnalysisProvider<TokenizerFactory>> tokenizers = new NamedRegistry<>("tokenizer");
tokenizers.register("standard", StandardTokenizerFactory::new);
tokenizers.register("uax_url_email", UAX29URLEmailTokenizerFactory::new);
tokenizers.register("path_hierarchy", PathHierarchyTokenizerFactory::new);
tokenizers.register("PathHierarchy", PathHierarchyTokenizerFactory::new);
tokenizers.register("keyword", KeywordTokenizerFactory::new);
tokenizers.register("letter", LetterTokenizerFactory::new);
tokenizers.register("lowercase", LowerCaseTokenizerFactory::new);
tokenizers.register("whitespace", WhitespaceTokenizerFactory::new);
tokenizers.register("nGram", NGramTokenizerFactory::new);
tokenizers.register("ngram", NGramTokenizerFactory::new);
tokenizers.register("edgeNGram", EdgeNGramTokenizerFactory::new);
tokenizers.register("edge_ngram", EdgeNGramTokenizerFactory::new);
tokenizers.register("pattern", PatternTokenizerFactory::new);
tokenizers.register("classic", ClassicTokenizerFactory::new);
tokenizers.register("thai", ThaiTokenizerFactory::new);
tokenizers.registerPlugins(plugins, AnalysisPlugin::getTokenizers);
return tokenizers;
}
private NamedRegistry<AnalysisProvider<AnalyzerProvider<?>>> setupAnalyzers(List<AnalysisPlugin> plugins) {
NamedRegistry<AnalysisProvider<AnalyzerProvider<?>>> analyzers = new NamedRegistry<>("analyzer");
analyzers.register("default", StandardAnalyzerProvider::new);
analyzers.register("standard", StandardAnalyzerProvider::new);
analyzers.register("standard_html_strip", StandardHtmlStripAnalyzerProvider::new);
analyzers.register("simple", SimpleAnalyzerProvider::new);
analyzers.register("stop", StopAnalyzerProvider::new);
analyzers.register("whitespace", WhitespaceAnalyzerProvider::new);
analyzers.register("keyword", KeywordAnalyzerProvider::new);
analyzers.register("pattern", PatternAnalyzerProvider::new);
analyzers.register("snowball", SnowballAnalyzerProvider::new);
analyzers.register("arabic", ArabicAnalyzerProvider::new);
analyzers.register("armenian", ArmenianAnalyzerProvider::new);
analyzers.register("basque", BasqueAnalyzerProvider::new);
analyzers.register("brazilian", BrazilianAnalyzerProvider::new);
analyzers.register("bulgarian", BulgarianAnalyzerProvider::new);
analyzers.register("catalan", CatalanAnalyzerProvider::new);
analyzers.register("chinese", ChineseAnalyzerProvider::new);
analyzers.register("cjk", CjkAnalyzerProvider::new);
analyzers.register("czech", CzechAnalyzerProvider::new);
analyzers.register("danish", DanishAnalyzerProvider::new);
analyzers.register("dutch", DutchAnalyzerProvider::new);
analyzers.register("english", EnglishAnalyzerProvider::new);
analyzers.register("finnish", FinnishAnalyzerProvider::new);
analyzers.register("french", FrenchAnalyzerProvider::new);
analyzers.register("galician", GalicianAnalyzerProvider::new);
analyzers.register("german", GermanAnalyzerProvider::new);
analyzers.register("greek", GreekAnalyzerProvider::new);
analyzers.register("hindi", HindiAnalyzerProvider::new);
analyzers.register("hungarian", HungarianAnalyzerProvider::new);
analyzers.register("indonesian", IndonesianAnalyzerProvider::new);
analyzers.register("irish", IrishAnalyzerProvider::new);
analyzers.register("italian", ItalianAnalyzerProvider::new);
analyzers.register("latvian", LatvianAnalyzerProvider::new);
analyzers.register("lithuanian", LithuanianAnalyzerProvider::new);
analyzers.register("norwegian", NorwegianAnalyzerProvider::new);
analyzers.register("persian", PersianAnalyzerProvider::new);
analyzers.register("portuguese", PortugueseAnalyzerProvider::new);
analyzers.register("romanian", RomanianAnalyzerProvider::new);
analyzers.register("russian", RussianAnalyzerProvider::new);
analyzers.register("sorani", SoraniAnalyzerProvider::new);
analyzers.register("spanish", SpanishAnalyzerProvider::new);
analyzers.register("swedish", SwedishAnalyzerProvider::new);
analyzers.register("turkish", TurkishAnalyzerProvider::new);
analyzers.register("thai", ThaiAnalyzerProvider::new);
analyzers.register("fingerprint", FingerprintAnalyzerProvider::new);
analyzers.registerPlugins(plugins, AnalysisPlugin::getAnalyzers);
return analyzers;
}
private static <T> AnalysisModule.AnalysisProvider<T> requriesAnalysisSettings(AnalysisModule.AnalysisProvider<T> provider) {
return new AnalysisModule.AnalysisProvider<T>() {
@Override
public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {
return provider.get(indexSettings, environment, name, settings);
}
@Override
public boolean requiresAnalysisSettings() {
return true;
}
};
} }
/** /**
* Registers a new {@link AnalysisProvider} to create * The basic factory interface for analysis components.
* {@link CharFilterFactory} instance per node as well as per index.
*/
public void registerCharFilter(String name, AnalysisProvider<CharFilterFactory> charFilter) {
if (charFilter == null) {
throw new IllegalArgumentException("char_filter provider must not be null");
}
if (charFilters.putIfAbsent(name, charFilter) != null) {
throw new IllegalArgumentException("char_filter provider for name " + name + " already registered");
}
}
/**
* Registers a new {@link AnalysisProvider} to create
* {@link TokenFilterFactory} instance per node as well as per index.
*/
public void registerTokenFilter(String name, AnalysisProvider<TokenFilterFactory> tokenFilter) {
if (tokenFilter == null) {
throw new IllegalArgumentException("token_filter provider must not be null");
}
if (tokenFilters.putIfAbsent(name, tokenFilter) != null) {
throw new IllegalArgumentException("token_filter provider for name " + name + " already registered");
}
}
/**
* Registers a new {@link AnalysisProvider} to create
* {@link TokenizerFactory} instance per node as well as per index.
*/
public void registerTokenizer(String name, AnalysisProvider<TokenizerFactory> tokenizer) {
if (tokenizer == null) {
throw new IllegalArgumentException("tokenizer provider must not be null");
}
if (tokenizers.putIfAbsent(name, tokenizer) != null) {
throw new IllegalArgumentException("tokenizer provider for name " + name + " already registered");
}
}
/**
* Registers a new {@link AnalysisProvider} to create
* {@link AnalyzerProvider} instance per node as well as per index.
*/
public void registerAnalyzer(String name, AnalysisProvider<AnalyzerProvider> analyzer) {
if (analyzer == null) {
throw new IllegalArgumentException("analyzer provider must not be null");
}
if (analyzers.putIfAbsent(name, analyzer) != null) {
throw new IllegalArgumentException("analyzer provider for name " + name + " already registered");
}
}
/**
* Registers a new hunspell {@link Dictionary} that can be referenced by the given name in
* hunspell analysis configuration.
*/
public void registerHunspellDictionary(String name, Dictionary dictionary) {
if (knownDictionaries.putIfAbsent(name, dictionary) != null) {
throw new IllegalArgumentException("dictionary for [" + name + "] is already registered");
}
}
@Override
protected void configure() {
try {
AnalysisRegistry registry = buildRegistry();
bind(HunspellService.class).toInstance(registry.getHunspellService());
bind(AnalysisRegistry.class).toInstance(registry);
} catch (IOException e) {
throw new ElasticsearchException("failed to load hunspell service", e);
}
}
/**
* Builds an {@link AnalysisRegistry} from the current configuration.
*/
public AnalysisRegistry buildRegistry() throws IOException {
return new AnalysisRegistry(new HunspellService(environment.settings(), environment, knownDictionaries), environment, charFilters, tokenFilters, tokenizers, analyzers);
}
/**
* AnalysisProvider is the basic factory interface for registering analysis components like:
* <ul>
* <li>{@link TokenizerFactory} - see {@link AnalysisModule#registerTokenizer(String, AnalysisProvider)}</li>
* <li>{@link CharFilterFactory} - see {@link AnalysisModule#registerCharFilter(String, AnalysisProvider)}</li>
* <li>{@link AnalyzerProvider} - see {@link AnalysisModule#registerAnalyzer(String, AnalysisProvider)}</li>
* <li>{@link TokenFilterFactory}- see {@link AnalysisModule#registerTokenFilter(String, AnalysisProvider)} )}</li>
* </ul>
*/ */
public interface AnalysisProvider<T> { public interface AnalysisProvider<T> {
@ -195,7 +374,8 @@ public final class AnalysisModule extends AbstractModule {
* @param name the name of the analysis component * @param name the name of the analysis component
* @return a new provider instance * @return a new provider instance
* @throws IOException if an {@link IOException} occurs * @throws IOException if an {@link IOException} occurs
* @throws IllegalArgumentException if the provider requires analysis settings ie. if {@link #requiresAnalysisSettings()} returns <code>true</code> * @throws IllegalArgumentException if the provider requires analysis settings ie. if {@link #requiresAnalysisSettings()} returns
* <code>true</code>
*/ */
default T get(Environment environment, String name) throws IOException { default T get(Environment environment, String name) throws IOException {
if (requiresAnalysisSettings()) { if (requiresAnalysisSettings()) {
@ -212,4 +392,29 @@ public final class AnalysisModule extends AbstractModule {
return false; return false;
} }
} }
private static class NamedRegistry<T> {
private final Map<String, T> registry = new HashMap<>();
private final String targetName;
public NamedRegistry(String targetName) {
this.targetName = targetName;
}
private void register(String name, T t) {
requireNonNull(name, "name is required");
requireNonNull(t, targetName + " is required");
if (registry.putIfAbsent(name, t) != null) {
throw new IllegalArgumentException(targetName + " for name " + name + " already registered");
}
}
private <P> void registerPlugins(List<P> plugins, Function<P, Map<String, T>> lookup) {
for (P plugin : plugins) {
for (Map.Entry<String, T> entry : lookup.apply(plugin).entrySet()) {
register(entry.getKey(), entry.getValue());
}
}
}
}
} }

View File

@ -52,7 +52,8 @@ import java.util.function.Function;
* The following settings can be set for each dictionary: * The following settings can be set for each dictionary:
* <ul> * <ul>
* <li>{@code ignore_case} - If true, dictionary matching will be case insensitive (defaults to {@code false})</li> * <li>{@code ignore_case} - If true, dictionary matching will be case insensitive (defaults to {@code false})</li>
* <li>{@code strict_affix_parsing} - Determines whether errors while reading a affix rules file will cause exception or simple be ignored (defaults to {@code true})</li> * <li>{@code strict_affix_parsing} - Determines whether errors while reading a affix rules file will cause exception or simple be ignored
* (defaults to {@code true})</li>
* </ul> * </ul>
* <p> * <p>
* These settings can either be configured as node level configuration, such as: * These settings can either be configured as node level configuration, such as:
@ -86,7 +87,8 @@ public class HunspellService extends AbstractComponent {
private final Path hunspellDir; private final Path hunspellDir;
private final Function<String, Dictionary> loadingFunction; private final Function<String, Dictionary> loadingFunction;
public HunspellService(final Settings settings, final Environment env, final Map<String, Dictionary> knownDictionaries) throws IOException { public HunspellService(final Settings settings, final Environment env, final Map<String, Dictionary> knownDictionaries)
throws IOException {
super(settings); super(settings);
this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries); this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries);
this.hunspellDir = resolveHunspellDirectory(env); this.hunspellDir = resolveHunspellDirectory(env);
@ -166,7 +168,7 @@ public class HunspellService extends AbstractComponent {
// merging node settings with hunspell dictionary specific settings // merging node settings with hunspell dictionary specific settings
Settings dictSettings = HUNSPELL_DICTIONARY_OPTIONS.get(nodeSettings); Settings dictSettings = HUNSPELL_DICTIONARY_OPTIONS.get(nodeSettings);
nodeSettings = loadDictionarySettings(dicDir, dictSettings.getByPrefix(locale)); nodeSettings = loadDictionarySettings(dicDir, dictSettings.getByPrefix(locale + "."));
boolean ignoreCase = nodeSettings.getAsBoolean("ignore_case", defaultIgnoreCase); boolean ignoreCase = nodeSettings.getAsBoolean("ignore_case", defaultIgnoreCase);

View File

@ -29,7 +29,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest;
import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.action.support.nodes.TransportNodesAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
@ -56,18 +55,17 @@ import org.elasticsearch.transport.TransportService;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
/** /**
* *
*/ */
public class TransportNodesListShardStoreMetaData extends TransportNodesAction<TransportNodesListShardStoreMetaData.Request, public class TransportNodesListShardStoreMetaData extends TransportNodesAction<TransportNodesListShardStoreMetaData.Request,
TransportNodesListShardStoreMetaData.NodesStoreFilesMetaData, TransportNodesListShardStoreMetaData.NodesStoreFilesMetaData,
TransportNodesListShardStoreMetaData.NodeRequest, TransportNodesListShardStoreMetaData.NodeRequest,
TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData>
implements AsyncShardFetch.Lister<TransportNodesListShardStoreMetaData.NodesStoreFilesMetaData, implements AsyncShardFetch.Lister<TransportNodesListShardStoreMetaData.NodesStoreFilesMetaData,
TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> { TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> {
public static final String ACTION_NAME = "internal:cluster/nodes/indices/shard/store"; public static final String ACTION_NAME = "internal:cluster/nodes/indices/shard/store";
@ -81,21 +79,14 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
IndicesService indicesService, NodeEnvironment nodeEnv, ActionFilters actionFilters, IndicesService indicesService, NodeEnvironment nodeEnv, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver) { IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, ACTION_NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, super(settings, ACTION_NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
Request::new, NodeRequest::new, ThreadPool.Names.FETCH_SHARD_STORE, NodeStoreFilesMetaData.class); Request::new, NodeRequest::new, ThreadPool.Names.FETCH_SHARD_STORE, NodeStoreFilesMetaData.class);
this.indicesService = indicesService; this.indicesService = indicesService;
this.nodeEnv = nodeEnv; this.nodeEnv = nodeEnv;
} }
@Override @Override
public void list(ShardId shardId, String[] nodesIds, ActionListener<NodesStoreFilesMetaData> listener) { public void list(ShardId shardId, DiscoveryNode[] nodes, ActionListener<NodesStoreFilesMetaData> listener) {
execute(new Request(shardId, false, nodesIds), listener); execute(new Request(shardId, nodes), listener);
}
@Override
protected String[] resolveNodes(Request request, ClusterState clusterState) {
// default implementation may filter out non existent nodes. it's important to keep exactly the ids
// we were given for accounting on the caller
return request.nodesIds();
} }
@Override @Override
@ -116,19 +107,6 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
@Override @Override
protected NodeStoreFilesMetaData nodeOperation(NodeRequest request) { protected NodeStoreFilesMetaData nodeOperation(NodeRequest request) {
if (request.unallocated) {
IndexService indexService = indicesService.indexService(request.shardId.getIndex());
if (indexService == null) {
return new NodeStoreFilesMetaData(clusterService.localNode(), null);
}
if (!indexService.hasShard(request.shardId.id())) {
return new NodeStoreFilesMetaData(clusterService.localNode(), null);
}
}
IndexMetaData metaData = clusterService.state().metaData().index(request.shardId.getIndex());
if (metaData == null) {
return new NodeStoreFilesMetaData(clusterService.localNode(), null);
}
try { try {
return new NodeStoreFilesMetaData(clusterService.localNode(), listStoreMetaData(request.shardId)); return new NodeStoreFilesMetaData(clusterService.localNode(), listStoreMetaData(request.shardId));
} catch (IOException e) { } catch (IOException e) {
@ -149,7 +127,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
store.incRef(); store.incRef();
try { try {
exists = true; exists = true;
return new StoreFilesMetaData(true, shardId, store.getMetadataOrEmpty()); return new StoreFilesMetaData(shardId, store.getMetadataOrEmpty());
} finally { } finally {
store.decRef(); store.decRef();
} }
@ -158,14 +136,21 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
// try and see if we an list unallocated // try and see if we an list unallocated
IndexMetaData metaData = clusterService.state().metaData().index(shardId.getIndex()); IndexMetaData metaData = clusterService.state().metaData().index(shardId.getIndex());
if (metaData == null) { if (metaData == null) {
return new StoreFilesMetaData(false, shardId, Store.MetadataSnapshot.EMPTY); // we may send this requests while processing the cluster state that recovered the index
// sometimes the request comes in before the local node processed that cluster state
// in such cases we can load it from disk
metaData = IndexMetaData.FORMAT.loadLatestState(logger, nodeEnv.indexPaths(shardId.getIndex()));
}
if (metaData == null) {
logger.trace("{} node doesn't have meta data for the requests index, responding with empty", shardId);
return new StoreFilesMetaData(shardId, Store.MetadataSnapshot.EMPTY);
} }
final IndexSettings indexSettings = indexService != null ? indexService.getIndexSettings() : new IndexSettings(metaData, settings); final IndexSettings indexSettings = indexService != null ? indexService.getIndexSettings() : new IndexSettings(metaData, settings);
final ShardPath shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, indexSettings); final ShardPath shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, indexSettings);
if (shardPath == null) { if (shardPath == null) {
return new StoreFilesMetaData(false, shardId, Store.MetadataSnapshot.EMPTY); return new StoreFilesMetaData(shardId, Store.MetadataSnapshot.EMPTY);
} }
return new StoreFilesMetaData(false, shardId, Store.readMetadataSnapshot(shardPath.resolveIndex(), shardId, logger)); return new StoreFilesMetaData(shardId, Store.readMetadataSnapshot(shardPath.resolveIndex(), shardId, logger));
} finally { } finally {
TimeValue took = new TimeValue(System.nanoTime() - startTimeNS, TimeUnit.NANOSECONDS); TimeValue took = new TimeValue(System.nanoTime() - startTimeNS, TimeUnit.NANOSECONDS);
if (exists) { if (exists) {
@ -182,28 +167,25 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
} }
public static class StoreFilesMetaData implements Iterable<StoreFileMetaData>, Streamable { public static class StoreFilesMetaData implements Iterable<StoreFileMetaData>, Streamable {
// here also trasmit sync id, else recovery will not use sync id because of stupid gateway allocator every now and then...
private boolean allocated;
private ShardId shardId; private ShardId shardId;
Store.MetadataSnapshot metadataSnapshot; Store.MetadataSnapshot metadataSnapshot;
StoreFilesMetaData() { StoreFilesMetaData() {
} }
public StoreFilesMetaData(boolean allocated, ShardId shardId, Store.MetadataSnapshot metadataSnapshot) { public StoreFilesMetaData(ShardId shardId, Store.MetadataSnapshot metadataSnapshot) {
this.allocated = allocated;
this.shardId = shardId; this.shardId = shardId;
this.metadataSnapshot = metadataSnapshot; this.metadataSnapshot = metadataSnapshot;
} }
public boolean allocated() {
return allocated;
}
public ShardId shardId() { public ShardId shardId() {
return this.shardId; return this.shardId;
} }
public boolean isEmpty() {
return metadataSnapshot.size() == 0;
}
@Override @Override
public Iterator<StoreFileMetaData> iterator() { public Iterator<StoreFileMetaData> iterator() {
return metadataSnapshot.iterator(); return metadataSnapshot.iterator();
@ -225,14 +207,12 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
allocated = in.readBoolean();
shardId = ShardId.readShardId(in); shardId = ShardId.readShardId(in);
this.metadataSnapshot = new Store.MetadataSnapshot(in); this.metadataSnapshot = new Store.MetadataSnapshot(in);
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(allocated);
shardId.writeTo(out); shardId.writeTo(out);
metadataSnapshot.writeTo(out); metadataSnapshot.writeTo(out);
} }
@ -243,6 +223,14 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
public String syncId() { public String syncId() {
return metadataSnapshot.getSyncId(); return metadataSnapshot.getSyncId();
} }
@Override
public String toString() {
return "StoreFilesMetaData{" +
", shardId=" + shardId +
", metadataSnapshot{size=" + metadataSnapshot.size() + ", syncId=" + metadataSnapshot.getSyncId() + "}" +
'}';
}
} }
@ -250,35 +238,24 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
private ShardId shardId; private ShardId shardId;
private boolean unallocated;
public Request() { public Request() {
} }
public Request(ShardId shardId, boolean unallocated, Set<String> nodesIds) { public Request(ShardId shardId, DiscoveryNode[] nodes) {
super(nodesIds.toArray(new String[nodesIds.size()])); super(nodes);
this.shardId = shardId; this.shardId = shardId;
this.unallocated = unallocated;
}
public Request(ShardId shardId, boolean unallocated, String... nodesIds) {
super(nodesIds);
this.shardId = shardId;
this.unallocated = unallocated;
} }
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
shardId = ShardId.readShardId(in); shardId = ShardId.readShardId(in);
unallocated = in.readBoolean();
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
shardId.writeTo(out); shardId.writeTo(out);
out.writeBoolean(unallocated);
} }
} }
@ -307,29 +284,24 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
private ShardId shardId; private ShardId shardId;
private boolean unallocated;
public NodeRequest() { public NodeRequest() {
} }
NodeRequest(String nodeId, TransportNodesListShardStoreMetaData.Request request) { NodeRequest(String nodeId, TransportNodesListShardStoreMetaData.Request request) {
super(nodeId); super(nodeId);
this.shardId = request.shardId; this.shardId = request.shardId;
this.unallocated = request.unallocated;
} }
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
shardId = ShardId.readShardId(in); shardId = ShardId.readShardId(in);
unallocated = in.readBoolean();
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
shardId.writeTo(out); shardId.writeTo(out);
out.writeBoolean(unallocated);
} }
} }
@ -358,20 +330,18 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
if (in.readBoolean()) { storeFilesMetaData = StoreFilesMetaData.readStoreFilesMetaData(in);
storeFilesMetaData = StoreFilesMetaData.readStoreFilesMetaData(in);
}
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
if (storeFilesMetaData == null) { storeFilesMetaData.writeTo(out);
out.writeBoolean(false); }
} else {
out.writeBoolean(true); @Override
storeFilesMetaData.writeTo(out); public String toString() {
} return "[[" + getNode() + "][" + storeFilesMetaData + "]]";
} }
} }
} }

View File

@ -72,6 +72,7 @@ import org.elasticsearch.gateway.GatewayModule;
import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.http.HttpServer; import org.elasticsearch.http.HttpServer;
import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.AnalysisModule;
@ -86,6 +87,7 @@ import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.node.service.NodeService; import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.plugins.ScriptPlugin;
@ -228,6 +230,7 @@ public class Node implements Closeable {
final ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, threadPool); final ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, threadPool);
final ScriptModule scriptModule = ScriptModule.create(settings, environment, resourceWatcherService, final ScriptModule scriptModule = ScriptModule.create(settings, environment, resourceWatcherService,
pluginsService.filterPlugins(ScriptPlugin.class)); pluginsService.filterPlugins(ScriptPlugin.class));
AnalysisModule analysisModule = new AnalysisModule(environment, pluginsService.filterPlugins(AnalysisPlugin.class));
additionalSettings.addAll(scriptModule.getSettings()); additionalSettings.addAll(scriptModule.getSettings());
// this is as early as we can validate settings at this point. we already pass them to ScriptModule as well as ThreadPool // this is as early as we can validate settings at this point. we already pass them to ScriptModule as well as ThreadPool
// so we might be late here already // so we might be late here already
@ -261,7 +264,6 @@ public class Node implements Closeable {
modules.add(new ActionModule(DiscoveryNode.isIngestNode(settings), false)); modules.add(new ActionModule(DiscoveryNode.isIngestNode(settings), false));
modules.add(new GatewayModule()); modules.add(new GatewayModule());
modules.add(new RepositoriesModule()); modules.add(new RepositoriesModule());
modules.add(new AnalysisModule(environment));
pluginsService.processModules(modules); pluginsService.processModules(modules);
CircuitBreakerService circuitBreakerService = createCircuitBreakerService(settingsModule.getSettings(), CircuitBreakerService circuitBreakerService = createCircuitBreakerService(settingsModule.getSettings(),
settingsModule.getClusterSettings()); settingsModule.getClusterSettings());
@ -280,6 +282,7 @@ public class Node implements Closeable {
b.bind(CircuitBreakerService.class).toInstance(circuitBreakerService); b.bind(CircuitBreakerService.class).toInstance(circuitBreakerService);
b.bind(BigArrays.class).toInstance(bigArrays); b.bind(BigArrays.class).toInstance(bigArrays);
b.bind(ScriptService.class).toInstance(scriptModule.getScriptService()); b.bind(ScriptService.class).toInstance(scriptModule.getScriptService());
b.bind(AnalysisRegistry.class).toInstance(analysisModule.getAnalysisRegistry());
} }
); );
injector = modules.createInjector(); injector = modules.createInjector();

View File

@ -0,0 +1,84 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugins;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharFilter;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.Tokenizer;
import org.elasticsearch.index.analysis.AnalyzerProvider;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import java.util.Map;
import static java.util.Collections.emptyMap;
/**
* An additional extension point for {@link Plugin}s that extends Elasticsearch's analysis functionality. To add an additional
* {@link TokenFilter} just implement the interface and implement the {@link #getTokenFilters()} method:
*
* <pre>{@code
* public class AnalysisPhoneticPlugin extends Plugin implements AnalysisPlugin {
* &#64;Override
* public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
* return singletonMap("phonetic", PhoneticTokenFilterFactory::new);
* }
* }
* }</pre>
*/
public interface AnalysisPlugin {
/**
* Override to add additional {@link CharFilter}s.
*/
default Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
return emptyMap();
}
/**
* Override to add additional {@link TokenFilter}s.
*/
default Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return emptyMap();
}
/**
* Override to add additional {@link Tokenizer}s.
*/
default Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
return emptyMap();
}
/**
* Override to add additional {@link Analyzer}s.
*/
default Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
return emptyMap();
}
/**
* Override to add additional hunspell {@link org.apache.lucene.analysis.hunspell.Dictionary}s.
*/
default Map<String, org.apache.lucene.analysis.hunspell.Dictionary> getHunspellDictionaries() {
return emptyMap();
}
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexModule;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ExecutorBuilder;
@ -103,6 +104,14 @@ public abstract class Plugin {
@Deprecated @Deprecated
public final void onModule(ScriptModule module) {} public final void onModule(ScriptModule module) {}
/**
* Old-style analysis extension point.
*
* @deprecated implement {@link AnalysisPlugin} instead
*/
@Deprecated
public final void onModule(AnalysisModule module) {}
/** /**
* Provides the list of this plugin's custom thread pools, empty if * Provides the list of this plugin's custom thread pools, empty if
* none. * none.

View File

@ -41,6 +41,7 @@ import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexModule;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.script.NativeScriptFactory; import org.elasticsearch.script.NativeScriptFactory;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptEngineService;
@ -207,8 +208,8 @@ public class PluginsService extends AbstractComponent {
} }
Class moduleClass = method.getParameterTypes()[0]; Class moduleClass = method.getParameterTypes()[0];
if (!Module.class.isAssignableFrom(moduleClass)) { if (!Module.class.isAssignableFrom(moduleClass)) {
if (moduleClass == ScriptModule.class) { if (method.getDeclaringClass() == Plugin.class) {
// This is still part of the Plugin class to point the user to the new implementation // These are still part of the Plugin class to point the user to the new implementations
continue; continue;
} }
throw new RuntimeException( throw new RuntimeException(

View File

@ -27,8 +27,7 @@ import java.util.Collections;
import java.util.List; import java.util.List;
/** /**
* An additional extension point to {@link Plugin}. Plugins extending the scripting functionality must implement this inteface * An additional extension point for {@link Plugin}s that extends Elasticsearch's scripting functionality.
* to provide access to script engines or script factories.
*/ */
public interface ScriptPlugin { public interface ScriptPlugin {

View File

@ -78,7 +78,7 @@ public class RestGetSourceAction extends BaseRestHandler {
@Override @Override
public RestResponse buildResponse(GetResponse response) throws Exception { public RestResponse buildResponse(GetResponse response) throws Exception {
XContentBuilder builder = channel.newBuilder(response.getSourceInternal(), false); XContentBuilder builder = channel.newBuilder(response.getSourceInternal(), false);
if (!response.isExists()) { if (response.isSourceEmpty()) { // check if doc source (or doc itself) is missing
return new BytesRestResponse(NOT_FOUND, builder); return new BytesRestResponse(NOT_FOUND, builder);
} else { } else {
builder.rawValue(response.getSourceInternal()); builder.rawValue(response.getSourceInternal());

View File

@ -39,15 +39,47 @@ import static org.elasticsearch.rest.RestStatus.NOT_FOUND;
import static org.elasticsearch.rest.RestStatus.OK; import static org.elasticsearch.rest.RestStatus.OK;
/** /**
* * Base class for {@code HEAD} request handlers for a single document.
*/ */
public class RestHeadAction extends BaseRestHandler { public abstract class RestHeadAction extends BaseRestHandler {
@Inject /**
public RestHeadAction(Settings settings, RestController controller, Client client) { * Handler to check for document existence.
*/
public static class Document extends RestHeadAction {
@Inject
public Document(Settings settings, RestController controller, Client client) {
super(settings, client, false);
controller.registerHandler(HEAD, "/{index}/{type}/{id}", this);
}
}
/**
* Handler to check for document source existence (may be disabled in the mapping).
*/
public static class Source extends RestHeadAction {
@Inject
public Source(Settings settings, RestController controller, Client client) {
super(settings, client, true);
controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this);
}
}
private final boolean source;
/**
* All subclasses must be registered in {@link org.elasticsearch.common.network.NetworkModule}.
*
* @param settings injected settings
* @param client injected client
* @param source {@code false} to check for {@link GetResponse#isExists()}.
* {@code true} to also check for {@link GetResponse#isSourceEmpty()}.
*/
public RestHeadAction(Settings settings, Client client, boolean source) {
super(settings, client); super(settings, client);
controller.registerHandler(HEAD, "/{index}/{type}/{id}", this); this.source = source;
controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this);
} }
@Override @Override
@ -68,6 +100,8 @@ public class RestHeadAction extends BaseRestHandler {
public RestResponse buildResponse(GetResponse response) { public RestResponse buildResponse(GetResponse response) {
if (!response.isExists()) { if (!response.isExists()) {
return new BytesRestResponse(NOT_FOUND, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY); return new BytesRestResponse(NOT_FOUND, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY);
} else if (source && response.isSourceEmpty()) { // doc exists, but source might not (disabled in the mapping)
return new BytesRestResponse(NOT_FOUND, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY);
} else { } else {
return new BytesRestResponse(OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY); return new BytesRestResponse(OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY);
} }

View File

@ -24,7 +24,6 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -176,35 +175,27 @@ public class RestSearchAction extends BaseRestHandler {
} }
} }
if (request.param("fields") != null) { String sField = request.param("fields");
throw new IllegalArgumentException("The parameter [" +
SearchSourceBuilder.FIELDS_FIELD + "] is not longer supported, please use [" +
SearchSourceBuilder.STORED_FIELDS_FIELD + "] to retrieve stored fields or _source filtering " +
"if the field is not stored");
}
String sField = request.param("stored_fields");
if (sField != null) { if (sField != null) {
if (!Strings.hasText(sField)) { if (!Strings.hasText(sField)) {
searchSourceBuilder.noStoredFields(); searchSourceBuilder.noFields();
} else { } else {
String[] sFields = Strings.splitStringByCommaToArray(sField); String[] sFields = Strings.splitStringByCommaToArray(sField);
if (sFields != null) { if (sFields != null) {
for (String field : sFields) { for (String field : sFields) {
searchSourceBuilder.storedField(field); searchSourceBuilder.field(field);
} }
} }
} }
} }
String sDocValueFields = request.param("docvalue_fields"); String sFieldDataFields = request.param("fielddata_fields");
if (sDocValueFields == null) { if (sFieldDataFields != null) {
sDocValueFields = request.param("fielddata_fields"); if (Strings.hasText(sFieldDataFields)) {
} String[] sFields = Strings.splitStringByCommaToArray(sFieldDataFields);
if (sDocValueFields != null) { if (sFields != null) {
if (Strings.hasText(sDocValueFields)) { for (String field : sFields) {
String[] sFields = Strings.splitStringByCommaToArray(sDocValueFields); searchSourceBuilder.fieldDataField(field);
for (String field : sFields) { }
searchSourceBuilder.docValueField(field);
} }
} }
} }

View File

@ -713,8 +713,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
throw new SearchContextException(context, "failed to create RescoreSearchContext", e); throw new SearchContextException(context, "failed to create RescoreSearchContext", e);
} }
} }
if (source.storedFields() != null) { if (source.fields() != null) {
context.fieldNames().addAll(source.storedFields()); context.fieldNames().addAll(source.fields());
} }
if (source.explain() != null) { if (source.explain() != null) {
context.explain(source.explain()); context.explain(source.explain());
@ -722,9 +722,9 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
if (source.fetchSource() != null) { if (source.fetchSource() != null) {
context.fetchSourceContext(source.fetchSource()); context.fetchSourceContext(source.fetchSource());
} }
if (source.docValueFields() != null) { if (source.fieldDataFields() != null) {
FieldDataFieldsContext fieldDataFieldsContext = context.getFetchSubPhaseContext(FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY); FieldDataFieldsContext fieldDataFieldsContext = context.getFetchSubPhaseContext(FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY);
for (String field : source.docValueFields()) { for (String field : source.fieldDataFields()) {
fieldDataFieldsContext.add(new FieldDataField(field)); fieldDataFieldsContext.add(new FieldDataField(field));
} }
fieldDataFieldsContext.setHitExecutionNeeded(true); fieldDataFieldsContext.setHitExecutionNeeded(true);

View File

@ -69,9 +69,7 @@ public abstract class AbstractHistogramAggregatorFactory<AF extends AbstractHist
@Override @Override
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException { throws IOException {
Rounding rounding = createRounding(); return createAggregator(null, parent, pipelineAggregators, metaData);
return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, extendedBounds, null, config.format(),
histogramFactory, context, parent, pipelineAggregators, metaData);
} }
protected Rounding createRounding() { protected Rounding createRounding() {
@ -92,6 +90,11 @@ public abstract class AbstractHistogramAggregatorFactory<AF extends AbstractHist
if (collectsFromSingleBucket == false) { if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, context, parent); return asMultiBucketAggregator(this, context, parent);
} }
return createAggregator(valuesSource, parent, pipelineAggregators, metaData);
}
private Aggregator createAggregator(ValuesSource.Numeric valuesSource, Aggregator parent, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) throws IOException {
Rounding rounding = createRounding(); Rounding rounding = createRounding();
// we need to round the bounds given by the user and we have to do it // we need to round the bounds given by the user and we have to do it
// for every aggregator we create // for every aggregator we create

View File

@ -20,8 +20,8 @@
package org.elasticsearch.search.aggregations.bucket.range; package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamInputReader;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
@ -47,7 +47,7 @@ public abstract class AbstractRangeBuilder<AB extends AbstractRangeBuilder<AB, R
/** /**
* Read from a stream. * Read from a stream.
*/ */
protected AbstractRangeBuilder(StreamInput in, InternalRange.Factory<?, ?> rangeFactory, StreamInputReader<R> rangeReader) protected AbstractRangeBuilder(StreamInput in, InternalRange.Factory<?, ?> rangeFactory, Writeable.Reader<R> rangeReader)
throws IOException { throws IOException {
super(in, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType()); super(in, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType());
this.rangeFactory = rangeFactory; this.rangeFactory = rangeFactory;

View File

@ -567,9 +567,9 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} }
if (fieldNames != null) { if (fieldNames != null) {
if (fieldNames.size() == 1) { if (fieldNames.size() == 1) {
builder.field(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), fieldNames.get(0)); builder.field(SearchSourceBuilder.FIELDS_FIELD.getPreferredName(), fieldNames.get(0));
} else { } else {
builder.startArray(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName()); builder.startArray(SearchSourceBuilder.FIELDS_FIELD.getPreferredName());
for (String fieldName : fieldNames) { for (String fieldName : fieldNames) {
builder.value(fieldName); builder.value(fieldName);
} }
@ -577,7 +577,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} }
} }
if (fieldDataFields != null) { if (fieldDataFields != null) {
builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName()); builder.startArray(SearchSourceBuilder.FIELDDATA_FIELDS_FIELD.getPreferredName());
for (String fieldDataField : fieldDataFields) { for (String fieldDataField : fieldDataFields) {
builder.value(fieldDataField); builder.value(fieldDataField);
} }
@ -628,7 +628,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
factory.trackScores(parser.booleanValue()); factory.trackScores(parser.booleanValue());
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) {
factory.fetchSource(FetchSourceContext.parse(context)); factory.fetchSource(FetchSourceContext.parse(context));
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.STORED_FIELDS_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.FIELDS_FIELD)) {
List<String> fieldNames = new ArrayList<>(); List<String> fieldNames = new ArrayList<>();
fieldNames.add(parser.text()); fieldNames.add(parser.text());
factory.fields(fieldNames); factory.fields(fieldNames);
@ -694,7 +694,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} }
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.STORED_FIELDS_FIELD)) { if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.FIELDS_FIELD)) {
List<String> fieldNames = new ArrayList<>(); List<String> fieldNames = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) { if (token == XContentParser.Token.VALUE_STRING) {
@ -705,7 +705,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} }
} }
factory.fields(fieldNames); factory.fields(fieldNames);
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.DOCVALUE_FIELDS_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.FIELDDATA_FIELDS_FIELD)) {
List<String> fieldDataFields = new ArrayList<>(); List<String> fieldDataFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) { if (token == XContentParser.Token.VALUE_STRING) {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.search.builder;
import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.ObjectFloatHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
@ -42,7 +41,6 @@ import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.slice.SliceBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
@ -51,6 +49,7 @@ import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.searchafter.SearchAfterBuilder;
import org.elasticsearch.search.slice.SliceBuilder;
import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder;
import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortBuilders;
@ -84,8 +83,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
public static final ParseField EXPLAIN_FIELD = new ParseField("explain"); public static final ParseField EXPLAIN_FIELD = new ParseField("explain");
public static final ParseField _SOURCE_FIELD = new ParseField("_source"); public static final ParseField _SOURCE_FIELD = new ParseField("_source");
public static final ParseField FIELDS_FIELD = new ParseField("fields"); public static final ParseField FIELDS_FIELD = new ParseField("fields");
public static final ParseField STORED_FIELDS_FIELD = new ParseField("stored_fields"); public static final ParseField FIELDDATA_FIELDS_FIELD = new ParseField("fielddata_fields");
public static final ParseField DOCVALUE_FIELDS_FIELD = new ParseField("docvalue_fields", "fielddata_fields");
public static final ParseField SCRIPT_FIELDS_FIELD = new ParseField("script_fields"); public static final ParseField SCRIPT_FIELDS_FIELD = new ParseField("script_fields");
public static final ParseField SCRIPT_FIELD = new ParseField("script"); public static final ParseField SCRIPT_FIELD = new ParseField("script");
public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure"); public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure");
@ -148,8 +146,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
private long timeoutInMillis = -1; private long timeoutInMillis = -1;
private int terminateAfter = SearchContext.DEFAULT_TERMINATE_AFTER; private int terminateAfter = SearchContext.DEFAULT_TERMINATE_AFTER;
private List<String> storedFieldNames; private List<String> fieldNames;
private List<String> docValueFields; private List<String> fieldDataFields;
private List<ScriptField> scriptFields; private List<ScriptField> scriptFields;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
@ -183,8 +181,22 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new); aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new);
explain = in.readOptionalBoolean(); explain = in.readOptionalBoolean();
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new);
docValueFields = (List<String>) in.readGenericValue(); boolean hasFieldDataFields = in.readBoolean();
storedFieldNames = (List<String>) in.readGenericValue(); if (hasFieldDataFields) {
int size = in.readVInt();
fieldDataFields = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
fieldDataFields.add(in.readString());
}
}
boolean hasFieldNames = in.readBoolean();
if (hasFieldNames) {
int size = in.readVInt();
fieldNames = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
fieldNames.add(in.readString());
}
}
from = in.readVInt(); from = in.readVInt();
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new); highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
boolean hasIndexBoost = in.readBoolean(); boolean hasIndexBoost = in.readBoolean();
@ -243,8 +255,22 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
out.writeOptionalWriteable(aggregations); out.writeOptionalWriteable(aggregations);
out.writeOptionalBoolean(explain); out.writeOptionalBoolean(explain);
out.writeOptionalStreamable(fetchSourceContext); out.writeOptionalStreamable(fetchSourceContext);
out.writeGenericValue(docValueFields); boolean hasFieldDataFields = fieldDataFields != null;
out.writeGenericValue(storedFieldNames); out.writeBoolean(hasFieldDataFields);
if (hasFieldDataFields) {
out.writeVInt(fieldDataFields.size());
for (String field : fieldDataFields) {
out.writeString(field);
}
}
boolean hasFieldNames = fieldNames != null;
out.writeBoolean(hasFieldNames);
if (hasFieldNames) {
out.writeVInt(fieldNames.size());
for (String field : fieldNames) {
out.writeString(field);
}
}
out.writeVInt(from); out.writeVInt(from);
out.writeOptionalWriteable(highlightBuilder); out.writeOptionalWriteable(highlightBuilder);
boolean hasIndexBoost = indexBoost != null; boolean hasIndexBoost = indexBoost != null;
@ -706,87 +732,60 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
/** /**
* Adds a stored field to load and return as part of the * Adds a field to load and return (note, it must be stored) as part of the
* search request. If none are specified, the source of the document will be * search request. If none are specified, the source of the document will be
* return. * return.
*/ */
public SearchSourceBuilder storedField(String name) { public SearchSourceBuilder field(String name) {
if (storedFieldNames == null) { if (fieldNames == null) {
storedFieldNames = new ArrayList<>(); fieldNames = new ArrayList<>();
} }
storedFieldNames.add(name); fieldNames.add(name);
return this; return this;
} }
/** /**
* Sets the stored fields to load and return as part of the search request. If none * Sets the fields to load and return as part of the search request. If none
* are specified, the source of the document will be returned. * are specified, the source of the document will be returned.
*/ */
public SearchSourceBuilder storedFields(List<String> fields) { public SearchSourceBuilder fields(List<String> fields) {
this.storedFieldNames = fields; this.fieldNames = fields;
return this; return this;
} }
/** /**
* Sets no stored fields to be loaded, resulting in only id and type to be returned * Sets no fields to be loaded, resulting in only id and type to be returned
* per field. * per field.
*/ */
public SearchSourceBuilder noStoredFields() { public SearchSourceBuilder noFields() {
this.storedFieldNames = Collections.emptyList(); this.fieldNames = Collections.emptyList();
return this; return this;
} }
/** /**
* Gets the stored fields to load and return as part of the search request. * Gets the fields to load and return as part of the search request.
*/ */
public List<String> storedFields() { public List<String> fields() {
return storedFieldNames; return fieldNames;
} }
/** /**
* Adds a field to load from the docvalue and return as part of the * Adds a field to load from the field data cache and return as part of the
* search request. * search request.
*
* @deprecated Use {@link SearchSourceBuilder#docValueField(String)} instead.
*/ */
@Deprecated
public SearchSourceBuilder fieldDataField(String name) { public SearchSourceBuilder fieldDataField(String name) {
if (docValueFields == null) { if (fieldDataFields == null) {
docValueFields = new ArrayList<>(); fieldDataFields = new ArrayList<>();
} }
docValueFields.add(name); fieldDataFields.add(name);
return this; return this;
} }
/** /**
* Gets the docvalue fields. * Gets the field-data fields.
*
* @deprecated Use {@link SearchSourceBuilder#docValueFields()} instead.
*/ */
@Deprecated
public List<String> fieldDataFields() { public List<String> fieldDataFields() {
return docValueFields; return fieldDataFields;
}
/**
* Gets the docvalue fields.
*/
public List<String> docValueFields() {
return docValueFields;
}
/**
* Adds a field to load from the docvalue and return as part of the
* search request.
*/
public SearchSourceBuilder docValueField(String name) {
if (docValueFields == null) {
docValueFields = new ArrayList<>();
}
docValueFields.add(name);
return this;
} }
/** /**
@ -911,8 +910,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
rewrittenBuilder.explain = explain; rewrittenBuilder.explain = explain;
rewrittenBuilder.ext = ext; rewrittenBuilder.ext = ext;
rewrittenBuilder.fetchSourceContext = fetchSourceContext; rewrittenBuilder.fetchSourceContext = fetchSourceContext;
rewrittenBuilder.docValueFields = docValueFields; rewrittenBuilder.fieldDataFields = fieldDataFields;
rewrittenBuilder.storedFieldNames = storedFieldNames; rewrittenBuilder.fieldNames = fieldNames;
rewrittenBuilder.from = from; rewrittenBuilder.from = from;
rewrittenBuilder.highlightBuilder = highlightBuilder; rewrittenBuilder.highlightBuilder = highlightBuilder;
rewrittenBuilder.indexBoost = indexBoost; rewrittenBuilder.indexBoost = indexBoost;
@ -959,7 +958,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (context.getParseFieldMatcher().match(currentFieldName, SIZE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SIZE_FIELD)) {
size = parser.intValue(); size = parser.intValue();
} else if (context.getParseFieldMatcher().match(currentFieldName, TIMEOUT_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, TIMEOUT_FIELD)) {
timeoutInMillis = parser.longValue(); timeoutInMillis = TimeValue.parseTimeValue(parser.text(), null, TIMEOUT_FIELD.getPreferredName()).millis();
} else if (context.getParseFieldMatcher().match(currentFieldName, TERMINATE_AFTER_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, TERMINATE_AFTER_FIELD)) {
terminateAfter = parser.intValue(); terminateAfter = parser.intValue();
} else if (context.getParseFieldMatcher().match(currentFieldName, MIN_SCORE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, MIN_SCORE_FIELD)) {
@ -972,16 +971,12 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
trackScores = parser.booleanValue(); trackScores = parser.booleanValue();
} else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
fetchSourceContext = FetchSourceContext.parse(context); fetchSourceContext = FetchSourceContext.parse(context);
} else if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
storedField(parser.text()); field(parser.text());
} else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
sort(parser.text()); sort(parser.text());
} else if (context.getParseFieldMatcher().match(currentFieldName, PROFILE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, PROFILE_FIELD)) {
profile = parser.booleanValue(); profile = parser.booleanValue();
} else if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
throw new ParsingException(parser.getTokenLocation(), "Deprecated field [" +
SearchSourceBuilder.FIELDS_FIELD + "] used, expected [" +
SearchSourceBuilder.STORED_FIELDS_FIELD + "] instead");
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation()); parser.getTokenLocation());
@ -1031,21 +1026,22 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
parser.getTokenLocation()); parser.getTokenLocation());
} }
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) {
storedFieldNames = new ArrayList<>(); if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
fieldNames = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) { if (token == XContentParser.Token.VALUE_STRING) {
storedFieldNames.add(parser.text()); fieldNames.add(parser.text());
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in [" throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in ["
+ currentFieldName + "] but found [" + token + "]", parser.getTokenLocation()); + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
} }
} }
} else if (context.getParseFieldMatcher().match(currentFieldName, DOCVALUE_FIELDS_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, FIELDDATA_FIELDS_FIELD)) {
docValueFields = new ArrayList<>(); fieldDataFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) { if (token == XContentParser.Token.VALUE_STRING) {
docValueFields.add(parser.text()); fieldDataFields.add(parser.text());
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in [" throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in ["
+ currentFieldName + "] but found [" + token + "]", parser.getTokenLocation()); + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
@ -1072,11 +1068,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
fetchSourceContext = FetchSourceContext.parse(context); fetchSourceContext = FetchSourceContext.parse(context);
} else if (context.getParseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) {
searchAfterBuilder = SearchAfterBuilder.fromXContent(parser, context.getParseFieldMatcher()); searchAfterBuilder = SearchAfterBuilder.fromXContent(parser, context.getParseFieldMatcher());
} else if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
throw new ParsingException(parser.getTokenLocation(), "The field [" +
SearchSourceBuilder.FIELDS_FIELD + "] is not longer supported, please use [" +
SearchSourceBuilder.STORED_FIELDS_FIELD + "] to retrieve stored fields or _source filtering " +
"if the field is not stored");
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation()); parser.getTokenLocation());
@ -1105,7 +1096,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
if (timeoutInMillis != -1) { if (timeoutInMillis != -1) {
builder.field(TIMEOUT_FIELD.getPreferredName(), timeoutInMillis); builder.field(TIMEOUT_FIELD.getPreferredName(), TimeValue.timeValueMillis(timeoutInMillis).toString());
} }
if (terminateAfter != SearchContext.DEFAULT_TERMINATE_AFTER) { if (terminateAfter != SearchContext.DEFAULT_TERMINATE_AFTER) {
@ -1140,21 +1131,21 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
builder.field(_SOURCE_FIELD.getPreferredName(), fetchSourceContext); builder.field(_SOURCE_FIELD.getPreferredName(), fetchSourceContext);
} }
if (storedFieldNames != null) { if (fieldNames != null) {
if (storedFieldNames.size() == 1) { if (fieldNames.size() == 1) {
builder.field(STORED_FIELDS_FIELD.getPreferredName(), storedFieldNames.get(0)); builder.field(FIELDS_FIELD.getPreferredName(), fieldNames.get(0));
} else { } else {
builder.startArray(STORED_FIELDS_FIELD.getPreferredName()); builder.startArray(FIELDS_FIELD.getPreferredName());
for (String fieldName : storedFieldNames) { for (String fieldName : fieldNames) {
builder.value(fieldName); builder.value(fieldName);
} }
builder.endArray(); builder.endArray();
} }
} }
if (docValueFields != null) { if (fieldDataFields != null) {
builder.startArray(DOCVALUE_FIELDS_FIELD.getPreferredName()); builder.startArray(FIELDDATA_FIELDS_FIELD.getPreferredName());
for (String fieldDataField : docValueFields) { for (String fieldDataField : fieldDataFields) {
builder.value(fieldDataField); builder.value(fieldDataField);
} }
builder.endArray(); builder.endArray();
@ -1348,7 +1339,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldNames, from, return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from,
highlightBuilder, indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, highlightBuilder, indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields,
size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile); size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile);
} }
@ -1365,8 +1356,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
return Objects.equals(aggregations, other.aggregations) return Objects.equals(aggregations, other.aggregations)
&& Objects.equals(explain, other.explain) && Objects.equals(explain, other.explain)
&& Objects.equals(fetchSourceContext, other.fetchSourceContext) && Objects.equals(fetchSourceContext, other.fetchSourceContext)
&& Objects.equals(docValueFields, other.docValueFields) && Objects.equals(fieldDataFields, other.fieldDataFields)
&& Objects.equals(storedFieldNames, other.storedFieldNames) && Objects.equals(fieldNames, other.fieldNames)
&& Objects.equals(from, other.from) && Objects.equals(from, other.from)
&& Objects.equals(highlightBuilder, other.highlightBuilder) && Objects.equals(highlightBuilder, other.highlightBuilder)
&& Objects.equals(indexBoost, other.indexBoost) && Objects.equals(indexBoost, other.indexBoost)

View File

@ -39,7 +39,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService;
import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService;
import org.elasticsearch.cluster.metadata.RepositoriesMetaData; import org.elasticsearch.cluster.metadata.RepositoriesMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RestoreSource; import org.elasticsearch.cluster.routing.RestoreSource;
@ -436,7 +435,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
if (request.includeGlobalState()) { if (request.includeGlobalState()) {
if (metaData.persistentSettings() != null) { if (metaData.persistentSettings() != null) {
Settings settings = metaData.persistentSettings(); Settings settings = metaData.persistentSettings();
clusterSettings.dryRun(settings); clusterSettings.validateUpdate(settings);
mdBuilder.persistentSettings(settings); mdBuilder.persistentSettings(settings);
} }
if (metaData.templates() != null) { if (metaData.templates() != null) {

View File

@ -38,7 +38,6 @@ import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
@ -261,17 +260,6 @@ public class TestTaskPlugin extends Plugin {
return new NodesResponse(clusterService.getClusterName(), responses, failures); return new NodesResponse(clusterService.getClusterName(), responses, failures);
} }
@Override
protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) {
List<String> list = new ArrayList<>();
for (String node : nodesIds) {
if (nodes.nodeExists(node)) {
list.add(node);
}
}
return list.toArray(new String[list.size()]);
}
@Override @Override
protected NodeRequest newNodeRequest(String nodeId, NodesRequest request) { protected NodeRequest newNodeRequest(String nodeId, NodesRequest request) {
return new NodeRequest(request, nodeId, request.getShouldBlock()); return new NodeRequest(request, nodeId, request.getShouldBlock());

View File

@ -29,12 +29,15 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import static java.util.Collections.emptyList;
public class TransportAnalyzeActionTests extends ESTestCase { public class TransportAnalyzeActionTests extends ESTestCase {
private AnalysisService analysisService; private AnalysisService analysisService;
@ -56,7 +59,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
.putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter").build(); .putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter").build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
environment = new Environment(settings); environment = new Environment(settings);
registry = new AnalysisRegistry(null, environment); registry = new AnalysisModule(environment, emptyList()).getAnalysisRegistry();
analysisService = registry.build(idxSettings); analysisService = registry.build(idxSettings);
} }

View File

@ -96,7 +96,7 @@ public class TransportNodesActionTests extends ESTestCase {
TestNodesRequest request = new TestNodesRequest(finalNodesIds); TestNodesRequest request = new TestNodesRequest(finalNodesIds);
action.new AsyncAction(null, request, new PlainActionFuture<>()).start(); action.new AsyncAction(null, request, new PlainActionFuture<>()).start();
Map<String, List<CapturingTransport.CapturedRequest>> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); Map<String, List<CapturingTransport.CapturedRequest>> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear();
assertEquals(clusterService.state().nodes().resolveNodesIds(finalNodesIds).length, capturedRequests.size()); assertEquals(clusterService.state().nodes().resolveNodes(finalNodesIds).length, capturedRequests.size());
} }
public void testNewResponseNullArray() { public void testNewResponseNullArray() {
@ -129,9 +129,9 @@ public class TransportNodesActionTests extends ESTestCase {
assertTrue(failures.containsAll(response.failures())); assertTrue(failures.containsAll(response.failures()));
} }
public void testFiltering() throws Exception { public void testCustomResolving() throws Exception {
TransportNodesAction action = getFilteringTestTransportNodesAction(transportService); TransportNodesAction action = getDataNodesOnlyTransportNodesAction(transportService);
TestNodesRequest request = new TestNodesRequest(); TestNodesRequest request = new TestNodesRequest(randomBoolean() ? null : generateRandomStringArray(10, 5, false, true));
PlainActionFuture<TestNodesResponse> listener = new PlainActionFuture<>(); PlainActionFuture<TestNodesResponse> listener = new PlainActionFuture<>();
action.new AsyncAction(null, request, listener).start(); action.new AsyncAction(null, request, listener).start();
Map<String, List<CapturingTransport.CapturedRequest>> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); Map<String, List<CapturingTransport.CapturedRequest>> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear();
@ -221,8 +221,8 @@ public class TransportNodesActionTests extends ESTestCase {
); );
} }
public FilteringTestTransportNodesAction getFilteringTestTransportNodesAction(TransportService transportService) { public DataNodesOnlyTransportNodesAction getDataNodesOnlyTransportNodesAction(TransportService transportService) {
return new FilteringTestTransportNodesAction( return new DataNodesOnlyTransportNodesAction(
Settings.EMPTY, Settings.EMPTY,
THREAD_POOL, THREAD_POOL,
clusterService, clusterService,
@ -276,18 +276,18 @@ public class TransportNodesActionTests extends ESTestCase {
} }
} }
private static class FilteringTestTransportNodesAction private static class DataNodesOnlyTransportNodesAction
extends TestTransportNodesAction { extends TestTransportNodesAction {
FilteringTestTransportNodesAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService DataNodesOnlyTransportNodesAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService
transportService, ActionFilters actionFilters, Supplier<TestNodesRequest> request, transportService, ActionFilters actionFilters, Supplier<TestNodesRequest> request,
Supplier<TestNodeRequest> nodeRequest, String nodeExecutor) { Supplier<TestNodeRequest> nodeRequest, String nodeExecutor) {
super(settings, threadPool, clusterService, transportService, actionFilters, request, nodeRequest, nodeExecutor); super(settings, threadPool, clusterService, transportService, actionFilters, request, nodeRequest, nodeExecutor);
} }
@Override @Override
protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) { protected void resolveRequest(TestNodesRequest request, ClusterState clusterState) {
return nodes.getDataNodes().keys().toArray(String.class); request.setConcreteNodes(clusterState.nodes().getDataNodes().values().toArray(DiscoveryNode.class));
} }
} }

View File

@ -91,7 +91,7 @@ public class DiscoveryNodesTests extends ESTestCase {
expectedNodeIdsSet.add(discoveryNode.getId()); expectedNodeIdsSet.add(discoveryNode.getId());
} }
String[] resolvedNodesIds = discoveryNodes.resolveNodesIds(nodeSelectors.toArray(new String[nodeSelectors.size()])); String[] resolvedNodesIds = discoveryNodes.resolveNodes(nodeSelectors.toArray(new String[nodeSelectors.size()]));
Arrays.sort(resolvedNodesIds); Arrays.sort(resolvedNodesIds);
String[] expectedNodesIds = expectedNodeIdsSet.toArray(new String[expectedNodeIdsSet.size()]); String[] expectedNodesIds = expectedNodeIdsSet.toArray(new String[expectedNodeIdsSet.size()]);
Arrays.sort(expectedNodesIds); Arrays.sort(expectedNodesIds);

View File

@ -24,9 +24,7 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.FilterInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -35,6 +33,8 @@ import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
public class StreamTests extends ESTestCase { public class StreamTests extends ESTestCase {
public void testRandomVLongSerialization() throws IOException { public void testRandomVLongSerialization() throws IOException {
for (int i = 0; i < 1024; i++) { for (int i = 0; i < 1024; i++) {
@ -121,4 +121,62 @@ public class StreamTests extends ESTestCase {
streamInput.readBytes(new byte[bytesToRead], 0, bytesToRead); streamInput.readBytes(new byte[bytesToRead], 0, bytesToRead);
assertEquals(streamInput.available(), length - bytesToRead); assertEquals(streamInput.available(), length - bytesToRead);
} }
public void testWritableArrays() throws IOException {
final String[] strings = generateRandomStringArray(10, 10, false, true);
WriteableString[] sourceArray = Arrays.stream(strings).<WriteableString>map(WriteableString::new).toArray(WriteableString[]::new);
WriteableString[] targetArray;
BytesStreamOutput out = new BytesStreamOutput();
if (randomBoolean()) {
if (randomBoolean()) {
sourceArray = null;
}
out.writeOptionalArray(sourceArray);
targetArray = out.bytes().streamInput().readOptionalArray(WriteableString::new, WriteableString[]::new);
} else {
out.writeArray(sourceArray);
targetArray = out.bytes().streamInput().readArray(WriteableString::new, WriteableString[]::new);
}
assertThat(targetArray, equalTo(sourceArray));
}
final static class WriteableString implements Writeable {
final String string;
public WriteableString(String string) {
this.string = string;
}
public WriteableString(StreamInput in) throws IOException {
this(in.readString());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
WriteableString that = (WriteableString) o;
return string.equals(that.string);
}
@Override
public int hashCode() {
return string.hashCode();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(string);
}
}
} }

View File

@ -98,7 +98,7 @@ public class ScopedSettingsTests extends ESTestCase {
assertEquals(0, aC.get()); assertEquals(0, aC.get());
assertEquals(0, bC.get()); assertEquals(0, bC.get());
try { try {
service.dryRun(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", -15).build()); service.validateUpdate(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", -15).build());
fail("invalid value"); fail("invalid value");
} catch (IllegalArgumentException ex) { } catch (IllegalArgumentException ex) {
assertEquals("illegal value can't update [foo.bar.baz] from [1] to [-15]", ex.getMessage()); assertEquals("illegal value can't update [foo.bar.baz] from [1] to [-15]", ex.getMessage());
@ -108,7 +108,7 @@ public class ScopedSettingsTests extends ESTestCase {
assertEquals(0, consumer2.get()); assertEquals(0, consumer2.get());
assertEquals(0, aC.get()); assertEquals(0, aC.get());
assertEquals(0, bC.get()); assertEquals(0, bC.get());
service.dryRun(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); service.validateUpdate(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build());
assertEquals(0, consumer.get()); assertEquals(0, consumer.get());
assertEquals(0, consumer2.get()); assertEquals(0, consumer2.get());
assertEquals(0, aC.get()); assertEquals(0, aC.get());

View File

@ -371,6 +371,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
* This test isolates the master from rest of the cluster, waits for a new master to be elected, restores the partition * This test isolates the master from rest of the cluster, waits for a new master to be elected, restores the partition
* and verifies that all node agree on the new cluster state * and verifies that all node agree on the new cluster state
*/ */
@TestLogging("_root:DEBUG,cluster.service:TRACE,gateway:TRACE,indices.store:TRACE")
public void testIsolateMasterAndVerifyClusterStateConsensus() throws Exception { public void testIsolateMasterAndVerifyClusterStateConsensus() throws Exception {
final List<String> nodes = startCluster(3); final List<String> nodes = startCluster(3);

View File

@ -270,8 +270,9 @@ public class AsyncShardFetchTests extends ESTestCase {
} }
@Override @Override
protected void asyncFetch(final ShardId shardId, String[] nodesIds) { protected void asyncFetch(final ShardId shardId, DiscoveryNode[] nodes) {
for (final String nodeId : nodesIds) { for (final DiscoveryNode node : nodes) {
final String nodeId = node.getId();
threadPool.generic().execute(new Runnable() { threadPool.generic().execute(new Runnable() {
@Override @Override
public void run() { public void run() {

View File

@ -565,7 +565,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
TransportNodesListGatewayStartedShards.NodesGatewayStartedShards response; TransportNodesListGatewayStartedShards.NodesGatewayStartedShards response;
response = internalCluster().getInstance(TransportNodesListGatewayStartedShards.class) response = internalCluster().getInstance(TransportNodesListGatewayStartedShards.class)
.execute(new TransportNodesListGatewayStartedShards.Request(shardId, new String[]{node.getId()})) .execute(new TransportNodesListGatewayStartedShards.Request(shardId, new DiscoveryNode[]{node}))
.get(); .get();
assertThat(response.getNodes(), hasSize(1)); assertThat(response.getNodes(), hasSize(1));

View File

@ -36,11 +36,11 @@ import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.util.set.Sets;
@ -118,8 +118,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
public void testSimpleFullMatchAllocation() { public void testSimpleFullMatchAllocation() {
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders());
DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3; DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3;
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(nodeToMatch, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); .addData(nodeToMatch, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
testAllocator.allocateUnassigned(allocation); testAllocator.allocateUnassigned(allocation);
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId())); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId()));
@ -131,8 +131,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
public void testSyncIdMatch() { public void testSyncIdMatch() {
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders());
DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3; DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3;
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(nodeToMatch, false, "MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM")); .addData(nodeToMatch, "MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM"));
testAllocator.allocateUnassigned(allocation); testAllocator.allocateUnassigned(allocation);
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId())); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId()));
@ -144,8 +144,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
public void testFileChecksumMatch() { public void testFileChecksumMatch() {
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders());
DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3; DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3;
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(nodeToMatch, false, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); .addData(nodeToMatch, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
testAllocator.allocateUnassigned(allocation); testAllocator.allocateUnassigned(allocation);
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId())); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId()));
@ -159,7 +159,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
*/ */
public void testNoPrimaryData() { public void testNoPrimaryData() {
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders());
testAllocator.addData(node2, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
testAllocator.allocateUnassigned(allocation); testAllocator.allocateUnassigned(allocation);
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId));
@ -171,7 +171,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
*/ */
public void testNoDataForReplicaOnAnyNode() { public void testNoDataForReplicaOnAnyNode() {
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders());
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
testAllocator.allocateUnassigned(allocation); testAllocator.allocateUnassigned(allocation);
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId));
@ -183,8 +183,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
*/ */
public void testNoMatchingFilesForReplicaOnAnyNode() { public void testNoMatchingFilesForReplicaOnAnyNode() {
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders());
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(node2, false, "NO_MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM")); .addData(node2, "NO_MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM"));
testAllocator.allocateUnassigned(allocation); testAllocator.allocateUnassigned(allocation);
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId));
@ -196,8 +196,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
*/ */
public void testNoOrThrottleDecidersRemainsInUnassigned() { public void testNoOrThrottleDecidersRemainsInUnassigned() {
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(randomBoolean() ? noAllocationDeciders() : throttleAllocationDeciders()); RoutingAllocation allocation = onePrimaryOnNode1And1Replica(randomBoolean() ? noAllocationDeciders() : throttleAllocationDeciders());
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
testAllocator.allocateUnassigned(allocation); testAllocator.allocateUnassigned(allocation);
assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1));
assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId));
@ -209,7 +209,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
*/ */
public void testThrottleWhenAllocatingToMatchingNode() { public void testThrottleWhenAllocatingToMatchingNode() {
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(new AllocationDeciders(Settings.EMPTY, RoutingAllocation allocation = onePrimaryOnNode1And1Replica(new AllocationDeciders(Settings.EMPTY,
new AllocationDecider[]{new TestAllocateDecision(Decision.YES), new AllocationDecider(Settings.EMPTY) { new AllocationDecider[]{new TestAllocateDecision(Decision.YES), new SameShardAllocationDecider(Settings.EMPTY),
new AllocationDecider(Settings.EMPTY) {
@Override @Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
if (node.node().equals(node2)) { if (node.node().equals(node2)) {
@ -218,8 +219,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
return Decision.YES; return Decision.YES;
} }
}})); }}));
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
testAllocator.allocateUnassigned(allocation); testAllocator.allocateUnassigned(allocation);
assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1));
assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId));
@ -228,10 +229,10 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
public void testDelayedAllocation() { public void testDelayedAllocation() {
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(),
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT);
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
if (randomBoolean()) { if (randomBoolean()) {
// we sometime return empty list of files, make sure we test this as well // we sometime return empty list of files, make sure we test this as well
testAllocator.addData(node2, false, null); testAllocator.addData(node2, null);
} }
boolean changed = testAllocator.allocateUnassigned(allocation); boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false)); assertThat(changed, equalTo(false));
@ -240,7 +241,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(),
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT);
testAllocator.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
changed = testAllocator.allocateUnassigned(allocation); changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true)); assertThat(changed, equalTo(true));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
@ -249,9 +250,9 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
public void testCancelRecoveryBetterSyncId() { public void testCancelRecoveryBetterSyncId() {
RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders()); RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders());
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(node2, false, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) .addData(node2, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(node3, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); .addData(node3, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
boolean changed = testAllocator.processExistingRecoveries(allocation); boolean changed = testAllocator.processExistingRecoveries(allocation);
assertThat(changed, equalTo(true)); assertThat(changed, equalTo(true));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1));
@ -260,9 +261,9 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
public void testNotCancellingRecoveryIfSyncedOnExistingRecovery() { public void testNotCancellingRecoveryIfSyncedOnExistingRecovery() {
RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders()); RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders());
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(node3, false, randomBoolean() ? "MATCH" : "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); .addData(node3, randomBoolean() ? "MATCH" : "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
boolean changed = testAllocator.processExistingRecoveries(allocation); boolean changed = testAllocator.processExistingRecoveries(allocation);
assertThat(changed, equalTo(false)); assertThat(changed, equalTo(false));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(0)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(0));
@ -270,8 +271,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
public void testNotCancellingRecovery() { public void testNotCancellingRecovery() {
RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders()); RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders());
testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"))
.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM"));
boolean changed = testAllocator.processExistingRecoveries(allocation); boolean changed = testAllocator.processExistingRecoveries(allocation);
assertThat(changed, equalTo(false)); assertThat(changed, equalTo(false));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(0)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(0));
@ -352,7 +353,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
return fetchDataCalled.getAndSet(false); return fetchDataCalled.getAndSet(false);
} }
public TestAllocator addData(DiscoveryNode node, boolean allocated, String syncId, StoreFileMetaData... files) { public TestAllocator addData(DiscoveryNode node, String syncId, StoreFileMetaData... files) {
if (data == null) { if (data == null) {
data = new HashMap<>(); data = new HashMap<>();
} }
@ -364,7 +365,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
if (syncId != null) { if (syncId != null) {
commitData.put(Engine.SYNC_COMMIT_ID, syncId); commitData.put(Engine.SYNC_COMMIT_ID, syncId);
} }
data.put(node, new TransportNodesListShardStoreMetaData.StoreFilesMetaData(allocated, shardId, data.put(node, new TransportNodesListShardStoreMetaData.StoreFilesMetaData(shardId,
new Store.MetadataSnapshot(unmodifiableMap(filesAsMap), unmodifiableMap(commitData), randomInt()))); new Store.MetadataSnapshot(unmodifiableMap(filesAsMap), unmodifiableMap(commitData), randomInt())));
return this; return this;
} }

View File

@ -36,7 +36,6 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.test.ClusterServiceUtils;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -45,9 +44,9 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLock; import org.elasticsearch.env.ShardLock;
import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.cache.query.QueryCache;
import org.elasticsearch.index.cache.query.IndexQueryCache;
import org.elasticsearch.index.cache.query.DisabledQueryCache; import org.elasticsearch.index.cache.query.DisabledQueryCache;
import org.elasticsearch.index.cache.query.IndexQueryCache;
import org.elasticsearch.index.cache.query.QueryCache;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.EngineException;
import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache;
@ -61,9 +60,9 @@ import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.IndicesQueryCache;
import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.IndicesQueryCache;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
@ -73,6 +72,7 @@ import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ClusterServiceUtils;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.test.TestSearchContext;
@ -84,11 +84,12 @@ import org.elasticsearch.watcher.ResourceWatcherService;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer; import java.util.function.Consumer;
import static java.util.Collections.emptyMap;
public class IndexModuleTests extends ESTestCase { public class IndexModuleTests extends ESTestCase {
private Index index; private Index index;
private Settings settings; private Settings settings;
@ -147,7 +148,8 @@ public class IndexModuleTests extends ESTestCase {
} }
public void testWrapperIsBound() throws IOException { public void testWrapperIsBound() throws IOException {
IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(indexSettings, null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
module.setSearcherWrapper((s) -> new Wrapper()); module.setSearcherWrapper((s) -> new Wrapper());
module.engineFactory.set(new MockEngineFactory(AssertingDirectoryReader.class)); module.engineFactory.set(new MockEngineFactory(AssertingDirectoryReader.class));
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, listener)); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, listener));
@ -165,7 +167,8 @@ public class IndexModuleTests extends ESTestCase {
.put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store") .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store")
.build(); .build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings);
IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(indexSettings, null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
module.addIndexStore("foo_store", FooStore::new); module.addIndexStore("foo_store", FooStore::new);
try { try {
module.addIndexStore("foo_store", FooStore::new); module.addIndexStore("foo_store", FooStore::new);
@ -188,7 +191,8 @@ public class IndexModuleTests extends ESTestCase {
} }
}; };
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings);
IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(indexSettings, null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
Consumer<Settings> listener = (s) -> {}; Consumer<Settings> listener = (s) -> {};
module.addIndexEventListener(eventListener); module.addIndexEventListener(eventListener);
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry,
@ -204,7 +208,8 @@ public class IndexModuleTests extends ESTestCase {
public void testListener() throws IOException { public void testListener() throws IOException {
Setting<Boolean> booleanSetting = Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope); Setting<Boolean> booleanSetting = Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope);
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
Setting<Boolean> booleanSetting2 = Setting.boolSetting("index.foo.bar.baz", false, Property.Dynamic, Property.IndexScope); Setting<Boolean> booleanSetting2 = Setting.boolSetting("index.foo.bar.baz", false, Property.Dynamic, Property.IndexScope);
AtomicBoolean atomicBoolean = new AtomicBoolean(false); AtomicBoolean atomicBoolean = new AtomicBoolean(false);
module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set);
@ -224,7 +229,8 @@ public class IndexModuleTests extends ESTestCase {
} }
public void testAddIndexOperationListener() throws IOException { public void testAddIndexOperationListener() throws IOException {
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
AtomicBoolean executed = new AtomicBoolean(false); AtomicBoolean executed = new AtomicBoolean(false);
IndexingOperationListener listener = new IndexingOperationListener() { IndexingOperationListener listener = new IndexingOperationListener() {
@Override @Override
@ -254,7 +260,8 @@ public class IndexModuleTests extends ESTestCase {
} }
public void testAddSearchOperationListener() throws IOException { public void testAddSearchOperationListener() throws IOException {
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
AtomicBoolean executed = new AtomicBoolean(false); AtomicBoolean executed = new AtomicBoolean(false);
SearchOperationListener listener = new SearchOperationListener() { SearchOperationListener listener = new SearchOperationListener() {
@ -289,7 +296,8 @@ public class IndexModuleTests extends ESTestCase {
.put("index.similarity.my_similarity.key", "there is a key") .put("index.similarity.my_similarity.key", "there is a key")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build(); .build();
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
module.addSimilarity("test_similarity", (string, settings) -> new SimilarityProvider() { module.addSimilarity("test_similarity", (string, settings) -> new SimilarityProvider() {
@Override @Override
public String name() { public String name() {
@ -313,7 +321,8 @@ public class IndexModuleTests extends ESTestCase {
} }
public void testFrozen() { public void testFrozen() {
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
module.freeze(); module.freeze();
String msg = "Can't modify IndexModule once the index service has been created"; String msg = "Can't modify IndexModule once the index service has been created";
assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSearchOperationListener(null)).getMessage()); assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSearchOperationListener(null)).getMessage());
@ -331,7 +340,8 @@ public class IndexModuleTests extends ESTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build(); .build();
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
try { try {
module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry,
new IndicesFieldDataCache(settings, listener)); new IndicesFieldDataCache(settings, listener));
@ -346,7 +356,8 @@ public class IndexModuleTests extends ESTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build(); .build();
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
try { try {
module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry,
new IndicesFieldDataCache(settings, listener)); new IndicesFieldDataCache(settings, listener));
@ -359,7 +370,8 @@ public class IndexModuleTests extends ESTestCase {
Settings indexSettings = Settings.builder() Settings indexSettings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
module.forceQueryCacheProvider((a, b) -> new CustomQueryCache()); module.forceQueryCacheProvider((a, b) -> new CustomQueryCache());
expectThrows(AlreadySetException.class, () -> module.forceQueryCacheProvider((a, b) -> new CustomQueryCache())); expectThrows(AlreadySetException.class, () -> module.forceQueryCacheProvider((a, b) -> new CustomQueryCache()));
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry,
@ -372,7 +384,8 @@ public class IndexModuleTests extends ESTestCase {
Settings indexSettings = Settings.builder() Settings indexSettings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry,
new IndicesFieldDataCache(settings, listener)); new IndicesFieldDataCache(settings, listener));
assertTrue(indexService.cache().query() instanceof IndexQueryCache); assertTrue(indexService.cache().query() instanceof IndexQueryCache);
@ -384,7 +397,8 @@ public class IndexModuleTests extends ESTestCase {
.put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null,
new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap()));
module.forceQueryCacheProvider((a, b) -> new CustomQueryCache()); module.forceQueryCacheProvider((a, b) -> new CustomQueryCache());
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry,
new IndicesFieldDataCache(settings, listener)); new IndicesFieldDataCache(settings, listener));

View File

@ -65,6 +65,31 @@ public class IndexSettingsTests extends ESTestCase {
assertEquals(42, integer.get()); assertEquals(42, integer.get());
} }
public void testSettingsUpdateValidator() {
Version version = VersionUtils.getPreviousVersion();
Settings theSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version)
.put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build();
final AtomicInteger integer = new AtomicInteger(0);
Setting<Integer> integerSetting = Setting.intSetting("index.test.setting.int", -1,
Property.Dynamic, Property.IndexScope);
IndexMetaData metaData = newIndexMeta("index", theSettings);
IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting);
settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set,
(i) -> {if (i == 42) throw new AssertionError("boom");});
assertEquals(version, settings.getIndexVersionCreated());
assertEquals("0xdeadbeef", settings.getUUID());
assertFalse(settings.updateIndexMetaData(metaData));
assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap());
assertEquals(0, integer.get());
expectThrows(IllegalArgumentException.class, () -> settings.updateIndexMetaData(newIndexMeta("index",
Settings.builder().put(theSettings).put("index.test.setting.int", 42).build())));
assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 41)
.build())));
assertEquals(41, integer.get());
}
public void testMergedSettingsArePassed() { public void testMergedSettingsArePassed() {
Version version = VersionUtils.getPreviousVersion(); Version version = VersionUtils.getPreviousVersion();
Settings theSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version) Settings theSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version)

View File

@ -677,7 +677,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
client().prepareIndex(IDX, "doc", "4").setSource("foo", "eggplant").get(); client().prepareIndex(IDX, "doc", "4").setSource("foo", "eggplant").get();
flushAndRefresh(IDX); flushAndRefresh(IDX);
SearchResponse resp = client().prepareSearch(IDX).setQuery(matchAllQuery()).addDocValueField("foo").addSort("foo", SortOrder.ASC).get(); SearchResponse resp = client().prepareSearch(IDX).setQuery(matchAllQuery()).addFieldDataField("foo").addSort("foo", SortOrder.ASC).get();
assertHitCount(resp, 4); assertHitCount(resp, 4);
assertOrderedSearchHits(resp, "2", "3", "4", "1"); assertOrderedSearchHits(resp, "2", "3", "4", "1");
SearchHit[] hits = resp.getHits().hits(); SearchHit[] hits = resp.getHits().hits();

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.analysis; package org.elasticsearch.index.analysis;
import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.en.EnglishAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer;
@ -29,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
@ -41,12 +43,15 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class AnalysisServiceTests extends ESTestCase { public class AnalysisServiceTests extends ESTestCase {
private static AnalyzerProvider analyzerProvider(final String name) { private static AnalyzerProvider<?> analyzerProvider(final String name) {
return new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDEX, new EnglishAnalyzer()); return new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDEX, new EnglishAnalyzer());
} }
@ -58,7 +63,8 @@ public class AnalysisServiceTests extends ESTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build(); .build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); AnalysisService analysisService = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap())
.build(idxSettings);
assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
@ -68,33 +74,28 @@ public class AnalysisServiceTests extends ESTestCase {
Version version = VersionUtils.randomVersion(random()); Version version = VersionUtils.randomVersion(random());
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
Collections.singletonMap("default", analyzerProvider("default")), singletonMap("default", analyzerProvider("default")), emptyMap(), emptyMap(), emptyMap());
Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
} }
public void testOverrideDefaultIndexAnalyzer() { public void testOverrideDefaultIndexAnalyzerIsUnsupported() {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
try { AnalyzerProvider<?> defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer());
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
Collections.singletonMap("default_index", new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer())), () -> new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); singletonMap("default_index", defaultIndex), emptyMap(), emptyMap(), emptyMap()));
fail("Expected ISE"); assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported"));
} catch (IllegalArgumentException e) {
// expected
assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported"));
}
} }
public void testBackCompatOverrideDefaultIndexAnalyzer() { public void testBackCompatOverrideDefaultIndexAnalyzer() {
Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(),
VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
Collections.singletonMap("default_index", analyzerProvider("default_index")), singletonMap("default_index", analyzerProvider("default_index")), emptyMap(), emptyMap(), emptyMap());
Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
@ -104,17 +105,17 @@ public class AnalysisServiceTests extends ESTestCase {
Version version = VersionUtils.randomVersion(random()); Version version = VersionUtils.randomVersion(random());
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
Collections.singletonMap("default_search", analyzerProvider("default_search")), singletonMap("default_search", analyzerProvider("default_search")), emptyMap(), emptyMap(), emptyMap());
Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
} }
public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() { public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() {
Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(),
VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
Map<String, AnalyzerProvider> analyzers = new HashMap<>(); Map<String, AnalyzerProvider<?>> analyzers = new HashMap<>();
analyzers.put("default_index", analyzerProvider("default_index")); analyzers.put("default_index", analyzerProvider("default_index"));
analyzers.put("default_search", analyzerProvider("default_search")); analyzers.put("default_search", analyzerProvider("default_search"));
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
@ -125,7 +126,6 @@ public class AnalysisServiceTests extends ESTestCase {
} }
public void testConfigureCamelCaseTokenFilter() throws IOException { public void testConfigureCamelCaseTokenFilter() throws IOException {
// tests a filter that
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = Settings.builder() Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
@ -137,7 +137,9 @@ public class AnalysisServiceTests extends ESTestCase {
.putArray("index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter").build(); .putArray("index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter").build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
AnalysisService analysisService = new AnalysisModule(new Environment(settings), emptyList()).getAnalysisRegistry()
.build(idxSettings);
try (NamedAnalyzer custom_analyser = analysisService.analyzer("custom_analyzer")) { try (NamedAnalyzer custom_analyser = analysisService.analyzer("custom_analyzer")) {
assertNotNull(custom_analyser); assertNotNull(custom_analyser);
TokenStream tokenStream = custom_analyser.tokenStream("foo", "J2SE j2ee"); TokenStream tokenStream = custom_analyser.tokenStream("foo", "J2SE j2ee");
@ -176,8 +178,10 @@ public class AnalysisServiceTests extends ESTestCase {
Settings indexSettings = Settings.builder() Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); AnalysisService analysisService = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap())
AnalysisService otherAnalysisSergice = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); .build(idxSettings);
AnalysisService otherAnalysisSergice = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(),
emptyMap()).build(idxSettings);
final int numIters = randomIntBetween(5, 20); final int numIters = randomIntBetween(5, 20);
for (int i = 0; i < numIters; i++) { for (int i = 0; i < numIters; i++) {
PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values()); PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values());
@ -196,7 +200,8 @@ public class AnalysisServiceTests extends ESTestCase {
.build(); .build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new AnalysisRegistry(null, new Environment(settings)).build(idxSettings)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap()).build(idxSettings));
assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer")); assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer"));
} }
} }

View File

@ -23,14 +23,14 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.indices.analysis.HunspellService; import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Collections;
import static java.util.Collections.emptyList;
public class AnalysisTestsHelper { public class AnalysisTestsHelper {
@ -49,7 +49,6 @@ public class AnalysisTestsHelper {
settings = Settings.builder().put(settings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); settings = Settings.builder().put(settings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
} }
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
Environment environment = new Environment(settings); return new AnalysisModule(new Environment(settings), emptyList()).getAnalysisRegistry().build(idxSettings);
return new AnalysisRegistry(new HunspellService(settings, environment, Collections.emptyMap()), environment).build(idxSettings);
} }
} }

View File

@ -26,6 +26,8 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.ESTokenStreamTestCase;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
import static org.elasticsearch.test.ESTestCase.createAnalysisService;
/** /**
*/ */
public class CharFilterTests extends ESTokenStreamTestCase { public class CharFilterTests extends ESTokenStreamTestCase {
@ -39,7 +41,7 @@ public class CharFilterTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build(); .build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); AnalysisService analysisService = createAnalysisService(idxSettings, settings);
NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter");
assertTokenStreamContents(analyzer1.tokenStream("test", "jeff quit phish"), new String[]{"jeff", "qit", "fish"}); assertTokenStreamContents(analyzer1.tokenStream("test", "jeff quit phish"), new String[]{"jeff", "qit", "fish"});
@ -56,7 +58,7 @@ public class CharFilterTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build(); .build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); AnalysisService analysisService = createAnalysisService(idxSettings, settings);
NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter");
@ -78,7 +80,7 @@ public class CharFilterTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build(); .build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); AnalysisService analysisService = createAnalysisService(idxSettings, settings);
NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter");
assertTokenStreamContents(analyzer1.tokenStream("test", "faBBbBB aBbbbBf"), new String[]{"foo", "oof"}); assertTokenStreamContents(analyzer1.tokenStream("test", "faBBbBB aBbbbBf"), new String[]{"foo", "oof"});

View File

@ -31,15 +31,20 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory; import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory;
import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory; import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
import org.hamcrest.MatcherAssert; import org.hamcrest.MatcherAssert;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
@ -50,8 +55,13 @@ public class CompoundAnalysisTests extends ESTestCase {
public void testDefaultsCompoundAnalysis() throws Exception { public void testDefaultsCompoundAnalysis() throws Exception {
Settings settings = getJsonSettings(); Settings settings = getJsonSettings();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings), AnalysisModule analysisModule = new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() {
Collections.emptyMap(),Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings); @Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
}
}));
AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings);
TokenFilterFactory filterFactory = analysisService.tokenFilter("dict_dec"); TokenFilterFactory filterFactory = analysisService.tokenFilter("dict_dec");
MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class)); MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class));
@ -62,14 +72,20 @@ public class CompoundAnalysisTests extends ESTestCase {
for (Settings settings : settingsArr) { for (Settings settings : settingsArr) {
List<String> terms = analyze(settings, "decompoundingAnalyzer", "donaudampfschiff spargelcremesuppe"); List<String> terms = analyze(settings, "decompoundingAnalyzer", "donaudampfschiff spargelcremesuppe");
MatcherAssert.assertThat(terms.size(), equalTo(8)); MatcherAssert.assertThat(terms.size(), equalTo(8));
MatcherAssert.assertThat(terms, hasItems("donau", "dampf", "schiff", "donaudampfschiff", "spargel", "creme", "suppe", "spargelcremesuppe")); MatcherAssert.assertThat(terms,
hasItems("donau", "dampf", "schiff", "donaudampfschiff", "spargel", "creme", "suppe", "spargelcremesuppe"));
} }
} }
private List<String> analyze(Settings settings, String analyzerName, String text) throws IOException { private List<String> analyze(Settings settings, String analyzerName, String text) throws IOException {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings), AnalysisModule analysisModule = new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() {
Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings); @Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
}
}));
AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings);
Analyzer analyzer = analysisService.analyzer(analyzerName).analyzer(); Analyzer analyzer = analysisService.analyzer(analyzerName).analyzer();

View File

@ -27,6 +27,7 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.ESTokenStreamTestCase;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
import static org.elasticsearch.test.ESTestCase.createAnalysisService;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase { public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
@ -39,7 +40,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
.build(); .build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); AnalysisService analysisService = createAnalysisService(idxSettings, settings);
NamedAnalyzer analyzer1 = analysisService.analyzer("single"); NamedAnalyzer analyzer1 = analysisService.analyzer("single");

View File

@ -27,6 +27,8 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.ESTokenStreamTestCase;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
import static org.elasticsearch.test.ESTestCase.createAnalysisService;
public class StopAnalyzerTests extends ESTokenStreamTestCase { public class StopAnalyzerTests extends ESTokenStreamTestCase {
public void testDefaultsCompoundAnalysis() throws Exception { public void testDefaultsCompoundAnalysis() throws Exception {
String json = "/org/elasticsearch/index/analysis/stop.json"; String json = "/org/elasticsearch/index/analysis/stop.json";
@ -36,7 +38,7 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build(); .build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); AnalysisService analysisService = createAnalysisService(idxSettings, settings);
NamedAnalyzer analyzer1 = analysisService.analyzer("analyzer1"); NamedAnalyzer analyzer1 = analysisService.analyzer("analyzer1");

View File

@ -26,13 +26,10 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.all.AllEntries;
import org.elasticsearch.common.lucene.all.AllTokenStream; import org.elasticsearch.common.lucene.all.AllTokenStream;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
@ -67,8 +64,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); analysisService = createAnalysisService(idxSettings, settings);
match("synonymAnalyzer", "kimchy is the dude abides", "shay is the elasticsearch man!"); match("synonymAnalyzer", "kimchy is the dude abides", "shay is the elasticsearch man!");
match("synonymAnalyzer_file", "kimchy is the dude abides", "shay is the elasticsearch man!"); match("synonymAnalyzer_file", "kimchy is the dude abides", "shay is the elasticsearch man!");

View File

@ -36,9 +36,7 @@ import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
@ -97,7 +95,7 @@ public class CodecTests extends ESTestCase {
.build(); .build();
IndexSettings settings = IndexSettingsModule.newIndexSettings("_na", nodeSettings); IndexSettings settings = IndexSettingsModule.newIndexSettings("_na", nodeSettings);
SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap()); SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap());
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(settings); AnalysisService analysisService = createAnalysisService(settings, nodeSettings);
MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()); MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap());
MapperService service = new MapperService(settings, analysisService, similarityService, mapperRegistry, () -> null); MapperService service = new MapperService(settings, analysisService, similarityService, mapperRegistry, () -> null);
return new CodecService(service, ESLoggerFactory.getLogger("test")); return new CodecService(service, ESLoggerFactory.getLogger("test"));

View File

@ -166,9 +166,9 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase {
private SearchRequestBuilder prepareSearch() { private SearchRequestBuilder prepareSearch() {
SearchRequestBuilder request = client().prepareSearch("test").setTypes("test"); SearchRequestBuilder request = client().prepareSearch("test").setTypes("test");
request.addStoredField("foo.token_count"); request.addField("foo.token_count");
if (loadCountedFields) { if (loadCountedFields) {
request.addStoredField("foo"); request.addField("foo");
} }
return request; return request;
} }

View File

@ -816,7 +816,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); .field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get();
// match all search with geohash field // match all search with geohash field
SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); SearchResponse searchResponse = client().prepareSearch().addField("location.geohash").setQuery(matchAllQuery()).execute().actionGet();
Map<String, SearchHitField> m = searchResponse.getHits().getAt(0).getFields(); Map<String, SearchHitField> m = searchResponse.getHits().getAt(0).getFields();
// ensure single geohash was indexed // ensure single geohash was indexed
@ -841,7 +841,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); .field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get();
// match all search with geohash field (includes prefixes) // match all search with geohash field (includes prefixes)
SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); SearchResponse searchResponse = client().prepareSearch().addField("location.geohash").setQuery(matchAllQuery()).execute().actionGet();
Map<String, SearchHitField> m = searchResponse.getHits().getAt(0).getFields(); Map<String, SearchHitField> m = searchResponse.getHits().getAt(0).getFields();
List<Object> hashes = m.get("location.geohash").values(); List<Object> hashes = m.get("location.geohash").values();
@ -872,11 +872,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
} }
// query by geohash subfield // query by geohash subfield
SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); SearchResponse searchResponse = client().prepareSearch().addField("location.geohash").setQuery(matchAllQuery()).execute().actionGet();
assertEquals(numDocs, searchResponse.getHits().totalHits()); assertEquals(numDocs, searchResponse.getHits().totalHits());
// query by latlon subfield // query by latlon subfield
searchResponse = client().prepareSearch().addStoredField("location.latlon").setQuery(matchAllQuery()).execute().actionGet(); searchResponse = client().prepareSearch().addField("location.latlon").setQuery(matchAllQuery()).execute().actionGet();
assertEquals(numDocs, searchResponse.getHits().totalHits()); assertEquals(numDocs, searchResponse.getHits().totalHits());
} }
} }

View File

@ -218,8 +218,8 @@ public class InnerHitBuilderTests extends ESTestCase {
innerHits.setExplain(randomBoolean()); innerHits.setExplain(randomBoolean());
innerHits.setVersion(randomBoolean()); innerHits.setVersion(randomBoolean());
innerHits.setTrackScores(randomBoolean()); innerHits.setTrackScores(randomBoolean());
innerHits.setStoredFieldNames(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); innerHits.setFieldNames(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)));
innerHits.setDocValueFields(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); innerHits.setFieldDataFields(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)));
// Random script fields deduped on their field name. // Random script fields deduped on their field name.
Map<String, SearchSourceBuilder.ScriptField> scriptFields = new HashMap<>(); Map<String, SearchSourceBuilder.ScriptField> scriptFields = new HashMap<>();
for (SearchSourceBuilder.ScriptField field: randomListStuff(16, InnerHitBuilderTests::randomScript)) { for (SearchSourceBuilder.ScriptField field: randomListStuff(16, InnerHitBuilderTests::randomScript)) {
@ -294,11 +294,11 @@ public class InnerHitBuilderTests extends ESTestCase {
break; break;
case 6: case 6:
if (randomBoolean()) { if (randomBoolean()) {
instance.setDocValueFields(randomValueOtherThan(instance.getDocValueFields(), () -> { instance.setFieldDataFields(randomValueOtherThan(instance.getFieldDataFields(), () -> {
return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)); return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16));
})); }));
} else { } else {
instance.addDocValueField(randomAsciiOfLengthBetween(1, 16)); instance.addFieldDataField(randomAsciiOfLengthBetween(1, 16));
} }
break; break;
case 7: case 7:
@ -341,12 +341,12 @@ public class InnerHitBuilderTests extends ESTestCase {
HighlightBuilderTests::randomHighlighterBuilder)); HighlightBuilderTests::randomHighlighterBuilder));
break; break;
case 11: case 11:
if (instance.getStoredFieldNames() == null || randomBoolean()) { if (instance.getFieldNames() == null || randomBoolean()) {
instance.setStoredFieldNames(randomValueOtherThan(instance.getStoredFieldNames(), () -> { instance.setFieldNames(randomValueOtherThan(instance.getFieldNames(), () -> {
return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)); return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16));
})); }));
} else { } else {
instance.getStoredFieldNames().add(randomAsciiOfLengthBetween(1, 16)); instance.getFieldNames().add(randomAsciiOfLengthBetween(1, 16));
} }
break; break;
default: default:

View File

@ -113,7 +113,7 @@ public class ExceptionRetryIT extends ESIntegTestCase {
} }
refresh(); refresh();
SearchResponse searchResponse = client().prepareSearch("index").setSize(numDocs * 2).addStoredField("_id").get(); SearchResponse searchResponse = client().prepareSearch("index").setSize(numDocs * 2).addField("_id").get();
Set<String> uniqueIds = new HashSet(); Set<String> uniqueIds = new HashSet();
long dupCounter = 0; long dupCounter = 0;

View File

@ -933,7 +933,7 @@ public class StoreTests extends ESTestCase {
public void testStreamStoreFilesMetaData() throws Exception { public void testStreamStoreFilesMetaData() throws Exception {
Store.MetadataSnapshot metadataSnapshot = createMetaDataSnapshot(); Store.MetadataSnapshot metadataSnapshot = createMetaDataSnapshot();
TransportNodesListShardStoreMetaData.StoreFilesMetaData outStoreFileMetaData = new TransportNodesListShardStoreMetaData.StoreFilesMetaData(randomBoolean(), new ShardId("test", "_na_", 0),metadataSnapshot); TransportNodesListShardStoreMetaData.StoreFilesMetaData outStoreFileMetaData = new TransportNodesListShardStoreMetaData.StoreFilesMetaData(new ShardId("test", "_na_", 0),metadataSnapshot);
ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer);
org.elasticsearch.Version targetNodeVersion = randomVersion(random()); org.elasticsearch.Version targetNodeVersion = randomVersion(random());

View File

@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
package org.elasticsearch.index.analysis; package org.elasticsearch.indices.analysis;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
@ -36,9 +36,20 @@ import org.elasticsearch.common.inject.ModuleTestCase;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.Analysis;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.AnalysisTestsHelper;
import org.elasticsearch.index.analysis.CustomAnalyzer;
import org.elasticsearch.index.analysis.MappingCharFilterFactory;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.PatternReplaceCharFilterFactory;
import org.elasticsearch.index.analysis.StandardTokenizerFactory;
import org.elasticsearch.index.analysis.StopTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory; import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory;
import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.indices.analysis.HunspellService; import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
import org.hamcrest.MatcherAssert; import org.hamcrest.MatcherAssert;
@ -49,9 +60,11 @@ import java.io.StringReader;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Collections; import java.util.Map;
import java.util.Set; import java.util.Set;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.either;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
@ -72,8 +85,16 @@ public class AnalysisModuleTests extends ModuleTestCase {
} }
public AnalysisRegistry getNewRegistry(Settings settings) { public AnalysisRegistry getNewRegistry(Settings settings) {
return new AnalysisRegistry(null, new Environment(settings), try {
Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new), Collections.emptyMap(), Collections.emptyMap()); return new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() {
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
}
})).getAnalysisRegistry();
} catch (IOException e) {
throw new RuntimeException(e);
}
} }
private Settings loadFromClasspath(String path) throws IOException { private Settings loadFromClasspath(String path) throws IOException {
@ -125,7 +146,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), analysisService2.analyzer("custom7").analyzer().getVersion()); assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), analysisService2.analyzer("custom7").analyzer().getVersion());
} }
private void assertTokenFilter(String name, Class clazz) throws IOException { private void assertTokenFilter(String name, Class<?> clazz) throws IOException {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
@ -148,17 +169,9 @@ public class AnalysisModuleTests extends ModuleTestCase {
StopTokenFilterFactory stop1 = (StopTokenFilterFactory) custom1.tokenFilters()[0]; StopTokenFilterFactory stop1 = (StopTokenFilterFactory) custom1.tokenFilters()[0];
assertThat(stop1.stopWords().size(), equalTo(1)); assertThat(stop1.stopWords().size(), equalTo(1));
//assertThat((Iterable<char[]>) stop1.stopWords(), hasItem("test-stop".toCharArray()));
analyzer = analysisService.analyzer("custom2").analyzer(); analyzer = analysisService.analyzer("custom2").analyzer();
assertThat(analyzer, instanceOf(CustomAnalyzer.class)); assertThat(analyzer, instanceOf(CustomAnalyzer.class));
CustomAnalyzer custom2 = (CustomAnalyzer) analyzer;
// HtmlStripCharFilterFactory html = (HtmlStripCharFilterFactory) custom2.charFilters()[0];
// assertThat(html.readAheadLimit(), equalTo(HTMLStripCharFilter.DEFAULT_READ_AHEAD));
//
// html = (HtmlStripCharFilterFactory) custom2.charFilters()[1];
// assertThat(html.readAheadLimit(), equalTo(1024));
// verify position increment gap // verify position increment gap
analyzer = analysisService.analyzer("custom6").analyzer(); analyzer = analysisService.analyzer("custom6").analyzer();
@ -248,7 +261,8 @@ public class AnalysisModuleTests extends ModuleTestCase {
getAnalysisService(settings); getAnalysisService(settings);
fail("This should fail with IllegalArgumentException because the analyzers name starts with _"); fail("This should fail with IllegalArgumentException because the analyzers name starts with _");
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertThat(e.getMessage(), either(equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")).or(equalTo("analyzer name must not start with '_'. got \"_invalidName\""))); assertThat(e.getMessage(), either(equalTo("analyzer name must not start with '_'. got \"_invalid_name\""))
.or(equalTo("analyzer name must not start with '_'. got \"_invalidName\"")));
} }
} }
@ -289,13 +303,18 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build(); .build();
Environment environment = new Environment(settings); Environment environment = new Environment(settings);
AnalysisModule module = new AnalysisModule(environment);
InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff"); InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff");
InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic"); InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic");
Dictionary dictionary;
try (Directory tmp = new SimpleFSDirectory(environment.tmpFile())) { try (Directory tmp = new SimpleFSDirectory(environment.tmpFile())) {
Dictionary dictionary = new Dictionary(tmp, "hunspell", aff, dic); dictionary = new Dictionary(tmp, "hunspell", aff, dic);
module.registerHunspellDictionary("foo", dictionary);
assertInstanceBinding(module, HunspellService.class, (x) -> x.getDictionary("foo") == dictionary);
} }
AnalysisModule module = new AnalysisModule(environment, singletonList(new AnalysisPlugin() {
@Override
public Map<String, Dictionary> getHunspellDictionaries() {
return singletonMap("foo", dictionary);
}
}));
assertSame(dictionary, module.getHunspellService().getDictionary("foo"));
} }
} }

View File

@ -19,15 +19,38 @@
package org.elasticsearch.indices.analysis; package org.elasticsearch.indices.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.index.analysis.AnalyzerProvider;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
public class DummyAnalysisPlugin extends Plugin { import java.util.Map;
public void onModule(AnalysisModule module) { import static java.util.Collections.singletonMap;
module.registerAnalyzer("dummy", (a, b, c, d) -> new DummyAnalyzerProvider());
module.registerTokenFilter("dummy_token_filter", (a, b, c, d) -> new DummyTokenFilterFactory()); public class DummyAnalysisPlugin extends Plugin implements AnalysisPlugin {
module.registerTokenizer("dummy_tokenizer", (a, b, c, d) -> new DummyTokenizerFactory()); @Override
module.registerCharFilter("dummy_char_filter", (a, b, c, d) -> new DummyCharFilterFactory()); public Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
return singletonMap("dummy_char_filter", (a, b, c, d) -> new DummyCharFilterFactory());
}
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return singletonMap("dummy_token_filter", (a, b, c, d) -> new DummyTokenFilterFactory());
}
@Override
public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
return singletonMap("dummy_tokenizer", (a, b, c, d) -> new DummyTokenizerFactory());
}
@Override
public Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
return singletonMap("dummy", (a, b, c, d) -> new DummyAnalyzerProvider());
} }
} }

View File

@ -19,36 +19,30 @@
package org.elasticsearch.indices.analyze; package org.elasticsearch.indices.analyze;
import org.apache.lucene.analysis.hunspell.Dictionary; import org.apache.lucene.analysis.hunspell.Dictionary;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.indices.analysis.HunspellService; import org.elasticsearch.indices.analysis.HunspellService;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.hamcrest.Matchers;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.indices.analysis.HunspellService.HUNSPELL_IGNORE_CASE; import static org.elasticsearch.indices.analysis.HunspellService.HUNSPELL_IGNORE_CASE;
import static org.elasticsearch.indices.analysis.HunspellService.HUNSPELL_LAZY_LOAD; import static org.elasticsearch.indices.analysis.HunspellService.HUNSPELL_LAZY_LOAD;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
/** public class HunspellServiceTests extends ESTestCase {
*
*/
@ClusterScope(scope= Scope.TEST, numDataNodes=0)
public class HunspellServiceIT extends ESIntegTestCase {
public void testLocaleDirectoryWithNodeLevelConfig() throws Exception { public void testLocaleDirectoryWithNodeLevelConfig() throws Exception {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir"))
.put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean())
.put(HUNSPELL_IGNORE_CASE.getKey(), true) .put(HUNSPELL_IGNORE_CASE.getKey(), true)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build(); .build();
internalCluster().startNode(settings); Dictionary dictionary = new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US");
Dictionary dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US");
assertThat(dictionary, notNullValue()); assertThat(dictionary, notNullValue());
assertIgnoreCase(true, dictionary); assertTrue(dictionary.getIgnoreCase());
} }
public void testLocaleDirectoryWithLocaleSpecificConfig() throws Exception { public void testLocaleDirectoryWithLocaleSpecificConfig() throws Exception {
@ -58,58 +52,42 @@ public class HunspellServiceIT extends ESIntegTestCase {
.put(HUNSPELL_IGNORE_CASE.getKey(), true) .put(HUNSPELL_IGNORE_CASE.getKey(), true)
.put("indices.analysis.hunspell.dictionary.en_US.strict_affix_parsing", false) .put("indices.analysis.hunspell.dictionary.en_US.strict_affix_parsing", false)
.put("indices.analysis.hunspell.dictionary.en_US.ignore_case", false) .put("indices.analysis.hunspell.dictionary.en_US.ignore_case", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build(); .build();
internalCluster().startNode(settings); Dictionary dictionary = new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US");
Dictionary dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US");
assertThat(dictionary, notNullValue()); assertThat(dictionary, notNullValue());
assertIgnoreCase(false, dictionary); assertFalse(dictionary.getIgnoreCase());
// testing that dictionary specific settings override node level settings // testing that dictionary specific settings override node level settings
dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US_custom"); dictionary = new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US_custom");
assertThat(dictionary, notNullValue()); assertThat(dictionary, notNullValue());
assertIgnoreCase(true, dictionary); assertTrue(dictionary.getIgnoreCase());
} }
public void testDicWithNoAff() throws Exception { public void testDicWithNoAff() throws Exception {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/no_aff_conf_dir")) .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/no_aff_conf_dir"))
.put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build(); .build();
Dictionary dictionary = null; IllegalStateException e = expectThrows(IllegalStateException.class,
try { () -> new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US"));
internalCluster().startNode(settings); assertEquals("failed to load hunspell dictionary for locale: en_US", e.getMessage());
dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US"); assertThat(e.getCause(), hasToString(containsString("Missing affix file")));
fail("Missing affix file didn't throw an error");
}
catch (Throwable t) {
assertNull(dictionary);
assertThat(ExceptionsHelper.unwrap(t, ElasticsearchException.class).toString(), Matchers.containsString("Missing affix file"));
}
} }
public void testDicWithTwoAffs() throws Exception { public void testDicWithTwoAffs() throws Exception {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/two_aff_conf_dir")) .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/two_aff_conf_dir"))
.put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build(); .build();
Dictionary dictionary = null; IllegalStateException e = expectThrows(IllegalStateException.class,
try { () -> new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US"));
internalCluster().startNode(settings); assertEquals("failed to load hunspell dictionary for locale: en_US", e.getMessage());
dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US"); assertThat(e.getCause(), hasToString(containsString("Too many affix files")));
fail("Multiple affix files didn't throw an error");
} catch (Throwable t) {
assertNull(dictionary);
assertThat(ExceptionsHelper.unwrap(t, ElasticsearchException.class).toString(), Matchers.containsString("Too many affix files"));
}
}
// TODO: on next upgrade of lucene, just use new getter
private void assertIgnoreCase(boolean expected, Dictionary dictionary) throws Exception {
// assertEquals(expected, dictionary.getIgnoreCase());
} }
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.indices.cluster; package org.elasticsearch.indices.cluster;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest;
import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction;
@ -156,7 +155,7 @@ public class ClusterStateChanges {
metaDataIndexUpgradeService, nodeServicesProvider, indicesService); metaDataIndexUpgradeService, nodeServicesProvider, indicesService);
MetaDataDeleteIndexService deleteIndexService = new MetaDataDeleteIndexService(settings, clusterService, allocationService); MetaDataDeleteIndexService deleteIndexService = new MetaDataDeleteIndexService(settings, clusterService, allocationService);
MetaDataUpdateSettingsService metaDataUpdateSettingsService = new MetaDataUpdateSettingsService(settings, clusterService, MetaDataUpdateSettingsService metaDataUpdateSettingsService = new MetaDataUpdateSettingsService(settings, clusterService,
allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, new IndexNameExpressionResolver(settings)); allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, indicesService, nodeServicesProvider);
MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService(settings, clusterService, indicesService, MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService(settings, clusterService, indicesService,
allocationService, new AliasValidator(settings), Collections.emptySet(), environment, allocationService, new AliasValidator(settings), Collections.emptySet(), environment,
nodeServicesProvider, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); nodeServicesProvider, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS);

View File

@ -29,7 +29,9 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.MergePolicyConfig;
@ -37,9 +39,13 @@ import org.elasticsearch.index.MergeSchedulerConfig;
import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.Store;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ;
@ -53,6 +59,42 @@ import static org.hamcrest.Matchers.nullValue;
public class UpdateSettingsIT extends ESIntegTestCase { public class UpdateSettingsIT extends ESIntegTestCase {
public void testInvalidDynamicUpdate() {
createIndex("test");
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () ->
client().admin().indices().prepareUpdateSettings("test")
.setSettings(Settings.builder()
.put("index.dummy", "boom")
)
.execute().actionGet());
assertEquals(exception.getCause().getMessage(), "this setting goes boom");
IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test");
assertNotEquals(indexMetaData.getSettings().get("index.dummy"), "invalid dynamic value");
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(DummySettingPlugin.class);
}
public static class DummySettingPlugin extends Plugin {
public static final Setting<String> DUMMY_SETTING = Setting.simpleString("index.dummy",
Setting.Property.IndexScope, Setting.Property.Dynamic);
@Override
public void onIndexModule(IndexModule indexModule) {
indexModule.addSettingsUpdateConsumer(DUMMY_SETTING, (s) -> {}, (s) -> {
if (s.equals("boom"))
throw new IllegalArgumentException("this setting goes boom");
});
}
@Override
public List<Setting<?>> getSettings() {
return Collections.singletonList(DUMMY_SETTING);
}
}
public void testResetDefault() { public void testResetDefault() {
createIndex("test"); createIndex("test");

View File

@ -116,7 +116,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
ensureGreen(); ensureGreen();
SearchResponse searchResponse = client().prepareSearch("test_index") SearchResponse searchResponse = client().prepareSearch("test_index")
.setQuery(termQuery("field1", "value1")) .setQuery(termQuery("field1", "value1"))
.addStoredField("field1").addStoredField("field2") .addField("field1").addField("field2")
.execute().actionGet(); .execute().actionGet();
assertHitCount(searchResponse, 1); assertHitCount(searchResponse, 1);
@ -130,7 +130,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
// now only match on one template (template_1) // now only match on one template (template_1)
searchResponse = client().prepareSearch("text_index") searchResponse = client().prepareSearch("text_index")
.setQuery(termQuery("field1", "value1")) .setQuery(termQuery("field1", "value1"))
.addStoredField("field1").addStoredField("field2") .addField("field1").addField("field2")
.execute().actionGet(); .execute().actionGet();
if (searchResponse.getFailedShards() > 0) { if (searchResponse.getFailedShards() > 0) {
logger.warn("failed search {}", Arrays.toString(searchResponse.getShardFailures())); logger.warn("failed search {}", Arrays.toString(searchResponse.getShardFailures()));

View File

@ -219,7 +219,7 @@ public class RelocationIT extends ESIntegTestCase {
for (int i = 0; i < 10; i++) { for (int i = 0; i < 10; i++) {
try { try {
logger.info("--> START search test round {}", i + 1); logger.info("--> START search test round {}", i + 1);
SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).setNoStoredFields().execute().actionGet().getHits(); SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).setNoFields().execute().actionGet().getHits();
ranOnce = true; ranOnce = true;
if (hits.totalHits() != indexer.totalIndexedDocs()) { if (hits.totalHits() != indexer.totalIndexedDocs()) {
int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; int[] hitIds = new int[(int) indexer.totalIndexedDocs()];

View File

@ -181,7 +181,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
// Added to debug a test failure where the terms aggregation seems to be reporting two documents with the same value for NUMBER_FIELD_NAME. This will check that after // Added to debug a test failure where the terms aggregation seems to be reporting two documents with the same value for NUMBER_FIELD_NAME. This will check that after
// random indexing each document only has 1 value for NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting // random indexing each document only has 1 value for NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting
// more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type
SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME).addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME) SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME).addField(NUMBER_FIELD_NAME).addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME)
.order(SortOrder.ASC)).setSize(5000).get(); .order(SortOrder.ASC)).setSize(5000).get();
assertSearchResponse(response); assertSearchResponse(response);
long totalHits = response.getHits().totalHits(); long totalHits = response.getHits().totalHits();

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.builder; package org.elasticsearch.search.builder;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -89,7 +90,9 @@ import java.util.concurrent.TimeUnit;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.elasticsearch.test.ClusterServiceUtils.setState;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasToString;
public class SearchSourceBuilderTests extends ESTestCase { public class SearchSourceBuilderTests extends ESTestCase {
private static Injector injector; private static Injector injector;
@ -219,12 +222,12 @@ public class SearchSourceBuilderTests extends ESTestCase {
for (int i = 0; i < fieldsSize; i++) { for (int i = 0; i < fieldsSize; i++) {
fields.add(randomAsciiOfLengthBetween(5, 50)); fields.add(randomAsciiOfLengthBetween(5, 50));
} }
builder.storedFields(fields); builder.fields(fields);
} }
if (randomBoolean()) { if (randomBoolean()) {
int fieldDataFieldsSize = randomInt(25); int fieldDataFieldsSize = randomInt(25);
for (int i = 0; i < fieldDataFieldsSize; i++) { for (int i = 0; i < fieldDataFieldsSize; i++) {
builder.docValueField(randomAsciiOfLengthBetween(5, 50)); builder.fieldDataField(randomAsciiOfLengthBetween(5, 50));
} }
} }
if (randomBoolean()) { if (randomBoolean()) {
@ -593,6 +596,27 @@ public class SearchSourceBuilderTests extends ESTestCase {
} }
} }
public void testTimeoutWithUnits() throws IOException {
final String timeout = randomTimeValue();
final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}";
try (XContentParser parser = XContentFactory.xContent(query).createParser(query)) {
final SearchSourceBuilder builder = SearchSourceBuilder.fromXContent(createParseContext(parser), aggParsers, suggesters);
assertThat(builder.timeoutInMillis(), equalTo(TimeValue.parseTimeValue(timeout, null, "timeout").millis()));
}
}
public void testTimeoutWithoutUnits() throws IOException {
final int timeout = randomIntBetween(1, 1024);
final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}";
try (XContentParser parser = XContentFactory.xContent(query).createParser(query)) {
final ElasticsearchParseException e =
expectThrows(
ElasticsearchParseException.class,
() -> SearchSourceBuilder.fromXContent(createParseContext(parser), aggParsers, suggesters));
assertThat(e, hasToString(containsString("unit is missing or unrecognized")));
}
}
public void testEmptyPostFilter() throws IOException { public void testEmptyPostFilter() throws IOException {
SearchSourceBuilder builder = new SearchSourceBuilder(); SearchSourceBuilder builder = new SearchSourceBuilder();
String query = "{ \"post_filter\": {} }"; String query = "{ \"post_filter\": {} }";

View File

@ -202,7 +202,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
refresh(); refresh();
// TEST FETCHING _parent from child // TEST FETCHING _parent from child
SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").addIds("c1")).storedFields("_parent").execute() SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").addIds("c1")).fields("_parent").execute()
.actionGet(); .actionGet();
assertNoFailures(searchResponse); assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
@ -210,7 +210,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1")); assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1"));
// TEST matching on parent // TEST matching on parent
searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).storedFields("_parent").get(); searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).fields("_parent").get();
assertNoFailures(searchResponse); assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(2L)); assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2")));
@ -218,7 +218,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2")));
assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1")); assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1"));
searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).storedFields("_parent").get(); searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).fields("_parent").get();
assertNoFailures(searchResponse); assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(2L)); assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2")));
@ -1394,7 +1394,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse scrollResponse = client().prepareSearch("test") SearchResponse scrollResponse = client().prepareSearch("test")
.setScroll(TimeValue.timeValueSeconds(30)) .setScroll(TimeValue.timeValueSeconds(30))
.setSize(1) .setSize(1)
.addStoredField("_id") .addField("_id")
.setQuery(query) .setQuery(query)
.execute() .execute()
.actionGet(); .actionGet();

View File

@ -415,13 +415,13 @@ public class GeoFilterIT extends ESIntegTestCase {
assertThat(hit.getId(), equalTo(key)); assertThat(hit.getId(), equalTo(key));
} }
SearchResponse world = client().prepareSearch().addStoredField("pin").setQuery( SearchResponse world = client().prepareSearch().addField("pin").setQuery(
geoBoundingBoxQuery("pin").setCorners(90, -179.99999, -90, 179.99999) geoBoundingBoxQuery("pin").setCorners(90, -179.99999, -90, 179.99999)
).execute().actionGet(); ).execute().actionGet();
assertHitCount(world, 53); assertHitCount(world, 53);
SearchResponse distance = client().prepareSearch().addStoredField("pin").setQuery( SearchResponse distance = client().prepareSearch().addField("pin").setQuery(
geoDistanceQuery("pin").distance("425km").point(51.11, 9.851) geoDistanceQuery("pin").distance("425km").point(51.11, 9.851)
).execute().actionGet(); ).execute().actionGet();

View File

@ -19,6 +19,8 @@
package org.elasticsearch.search.geo; package org.elasticsearch.search.geo;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.settings.Settings;
import org.locationtech.spatial4j.shape.Rectangle; import org.locationtech.spatial4j.shape.Rectangle;
import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Coordinate;
@ -54,6 +56,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
public class GeoShapeQueryTests extends ESSingleNodeTestCase { public class GeoShapeQueryTests extends ESSingleNodeTestCase {
@ -197,6 +200,30 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1")); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
} }
public void testIndexedShapeReferenceSourceDisabled() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.endObject()
.endObject()
.endObject();
client().admin().indices().prepareCreate("test").addMapping("type1", mapping).get();
createIndex("shapes", Settings.EMPTY, "shape_type", "_source", "enabled=false");
ensureGreen();
ShapeBuilder shape = ShapeBuilders.newEnvelope(new Coordinate(-45, 45), new Coordinate(45, -45));
client().prepareIndex("shapes", "shape_type", "Big_Rectangle").setSource(jsonBuilder().startObject()
.field("shape", shape).endObject()).setRefreshPolicy(IMMEDIATE).get();
ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client().prepareSearch("test").setTypes("type1")
.setQuery(geoIntersectionQuery("location", "Big_Rectangle", "shape_type")).get());
assertThat(e.getRootCause(), instanceOf(IllegalArgumentException.class));
assertThat(e.getRootCause().getMessage(), containsString("source disabled"));
}
public void testReusableBuilder() throws IOException { public void testReusableBuilder() throws IOException {
ShapeBuilder polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder() ShapeBuilder polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder()
.coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close()) .coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close())

View File

@ -156,7 +156,7 @@ public class InnerHitsIT extends ESIntegTestCase {
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(
new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message")) new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message"))
.setExplain(true) .setExplain(true)
.addDocValueField("comments.message") .addFieldDataField("comments.message")
.addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())) .addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap()))
.setSize(1) .setSize(1)
)).get(); )).get();
@ -287,7 +287,7 @@ public class InnerHitsIT extends ESIntegTestCase {
.setQuery( .setQuery(
hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit( hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(
new InnerHitBuilder() new InnerHitBuilder()
.addDocValueField("message") .addFieldDataField("message")
.setHighlightBuilder(new HighlightBuilder().field("message")) .setHighlightBuilder(new HighlightBuilder().field("message"))
.setExplain(true).setSize(1) .setExplain(true).setSize(1)
.addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, .addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE,

View File

@ -1148,6 +1148,7 @@ public class SearchQueryIT extends ESIntegTestCase {
jsonBuilder().startObject().startObject("type").startObject("properties") jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("arr").startObject("properties").startObject("term").field("type", "text") .startObject("arr").startObject("properties").startObject("term").field("type", "text")
.endObject().endObject().endObject().endObject().endObject().endObject())); .endObject().endObject().endObject().endObject().endObject().endObject()));
assertAcked(prepareCreate("lookup3").addMapping("type", "_source", "enabled=false", "terms","type=text"));
assertAcked(prepareCreate("test").addMapping("type", "term", "type=text")); assertAcked(prepareCreate("test").addMapping("type", "term", "type=text"));
indexRandom(true, indexRandom(true,
@ -1172,6 +1173,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.startObject().field("term", "4").endObject() .startObject().field("term", "4").endObject()
.endArray() .endArray()
.endObject()), .endObject()),
client().prepareIndex("lookup3", "type", "1").setSource("terms", new String[]{"1", "3"}),
client().prepareIndex("test", "type", "1").setSource("term", "1"), client().prepareIndex("test", "type", "1").setSource("term", "1"),
client().prepareIndex("test", "type", "2").setSource("term", "2"), client().prepareIndex("test", "type", "2").setSource("term", "2"),
client().prepareIndex("test", "type", "3").setSource("term", "3"), client().prepareIndex("test", "type", "3").setSource("term", "3"),
@ -1227,6 +1229,16 @@ public class SearchQueryIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test") searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term"))).get(); .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term"))).get();
assertHitCount(searchResponse, 0L); assertHitCount(searchResponse, 0L);
// index "lookup" type "type" id "missing" document does not exist: ignore the lookup terms
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term" , new TermsLookup("lookup", "type", "missing", "terms"))).get();
assertHitCount(searchResponse, 0L);
// index "lookup3" type "type" has the source disabled: ignore the lookup terms
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term" , new TermsLookup("lookup3", "type", "1", "terms"))).get();
assertHitCount(searchResponse, 0L);
} }
public void testBasicQueryById() throws Exception { public void testBasicQueryById() throws Exception {

View File

@ -37,10 +37,10 @@ public class SourceFetchingIT extends ESIntegTestCase {
SearchResponse response = client().prepareSearch("test").get(); SearchResponse response = client().prepareSearch("test").get();
assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue());
response = client().prepareSearch("test").addStoredField("bla").get(); response = client().prepareSearch("test").addField("bla").get();
assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue());
response = client().prepareSearch("test").addStoredField("_source").get(); response = client().prepareSearch("test").addField("_source").get();
assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue());
} }

View File

@ -87,7 +87,7 @@ public class TimestampTTLBWIT extends ESIntegTestCase {
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
.setSize(randomIntBetween(1, numDocs + 5)) .setSize(randomIntBetween(1, numDocs + 5))
.addSort("_timestamp", order) .addSort("_timestamp", order)
.addStoredField("_timestamp") .addField("_timestamp")
.execute().actionGet(); .execute().actionGet();
assertNoFailures(searchResponse); assertNoFailures(searchResponse);
SearchHit[] hits = searchResponse.getHits().hits(); SearchHit[] hits = searchResponse.getHits().hits();

View File

@ -246,7 +246,7 @@ PUT /test/person/1?refresh=true
} }
GET /test/person/_search GET /test/person/_search
{ {
"stored_fields": [ "file.content_type" ], "fields": [ "file.content_type" ],
"query": { "query": {
"match": { "match": {
"file.content_type": "text plain" "file.content_type": "text plain"
@ -367,7 +367,7 @@ PUT /test/person/1?refresh=true
} }
GET /test/person/_search GET /test/person/_search
{ {
"stored_fields": [], "fields": [],
"query": { "query": {
"match": { "match": {
"file.content": "king queen" "file.content": "king queen"

View File

@ -101,7 +101,7 @@ In the above example, the `init_script` creates an array `transactions` in the `
map_script:: Executed once per document collected. This is the only required script. If no combine_script is specified, the resulting state map_script:: Executed once per document collected. This is the only required script. If no combine_script is specified, the resulting state
needs to be stored in an object named `_agg`. needs to be stored in an object named `_agg`.
+ +
In the above example, the `map_script` checks the value of the type field. If the value if 'sale' the value of the amount field In the above example, the `map_script` checks the value of the type field. If the value is 'sale' the value of the amount field
is added to the transactions array. If the value of the type field is not 'sale' the negated value of the amount field is added is added to the transactions array. If the value of the type field is not 'sale' the negated value of the amount field is added
to transactions. to transactions.

View File

@ -22,7 +22,7 @@ The top_hits aggregation returns regular search hits, because of this many per h
* <<search-request-named-queries-and-filters,Named filters and queries>> * <<search-request-named-queries-and-filters,Named filters and queries>>
* <<search-request-source-filtering,Source filtering>> * <<search-request-source-filtering,Source filtering>>
* <<search-request-script-fields,Script fields>> * <<search-request-script-fields,Script fields>>
* <<search-request-docvalue-fields,Doc value fields>> * <<search-request-fielddata-fields,Fielddata fields>>
* <<search-request-version,Include versions>> * <<search-request-version,Include versions>>
==== Example ==== Example

View File

@ -146,7 +146,8 @@ You can also use the same source filtering parameters to control which parts of
curl -XGET 'http://localhost:9200/twitter/tweet/1/_source?_source_include=*.id&_source_exclude=entities' curl -XGET 'http://localhost:9200/twitter/tweet/1/_source?_source_include=*.id&_source_exclude=entities'
-------------------------------------------------- --------------------------------------------------
Note, there is also a HEAD variant for the _source endpoint to efficiently test for document existence. Note, there is also a HEAD variant for the _source endpoint to efficiently test for document _source existence.
An existing document will not have a _source if it is disabled in the <<mapping-source-field,mapping>>.
Curl example: Curl example:
[source,js] [source,js]

View File

@ -28,7 +28,7 @@ help reduce the cost of each fsync.
Make sure to watch for `TOO_MANY_REQUESTS (429)` response codes Make sure to watch for `TOO_MANY_REQUESTS (429)` response codes
(`EsRejectedExecutionException` with the Java client), which is the way that (`EsRejectedExecutionException` with the Java client), which is the way that
elasticsearch tells you that it cannot keep up with the current indexing rate. elasticsearch tells you that it cannot keep up with the current indexing rate.
When it happens, you should pause ndexing a bit before trying again, ideally When it happens, you should pause indexing a bit before trying again, ideally
with randomized exponential backoff. with randomized exponential backoff.
Similarly to sizing bulk requests, only testing can tell what the optimal Similarly to sizing bulk requests, only testing can tell what the optimal

View File

@ -190,5 +190,5 @@ depending on the file extension using the <<file-system,`index.store.preload`>>
setting. setting.
WARNING: Loading data into the filesystem cache eagerly on too many indices or WARNING: Loading data into the filesystem cache eagerly on too many indices or
too many files will make searh _slower_ if the filesystem cache is not large too many files will make search _slower_ if the filesystem cache is not large
enough to hold all the data. Use with caution. enough to hold all the data. Use with caution.

View File

@ -48,7 +48,7 @@ PUT my_index/my_type/1
GET my_index/_search GET my_index/_search
{ {
"stored_fields": [ "title", "date" ] <2> "fields": [ "title", "date" ] <2>
} }
-------------------------------------------------- --------------------------------------------------
// CONSOLE // CONSOLE

View File

@ -122,3 +122,8 @@ been removed.
Plugins that register custom scripts should implement `ScriptPlugin` and remove Plugins that register custom scripts should implement `ScriptPlugin` and remove
their `onModule(ScriptModule)` implementation. their `onModule(ScriptModule)` implementation.
==== AnalysisPlugin
Plugins that register custom analysis components should implement
`AnalysisPlugin` and remove their `onModule(AnalysisModule)` implementation.

View File

@ -64,15 +64,11 @@ characteristics as the former `scan` search type.
==== `fields` parameter ==== `fields` parameter
The `fields` parameter has been replaced by `stored_fields`. The `fields` parameter used to try to retrieve field values from stored
The `stored_fields` parameter will only return stored fields fields, and fall back to extracting from the `_source` if a field is not
marked as stored. Now, the `fields` parameter will only return stored fields
-- it will no longer extract values from the `_source`. -- it will no longer extract values from the `_source`.
==== `fielddata_fields` parameter
The `fielddata_fields` has been deprecated, use parameter `docvalue_fields` instead.
==== search-exists API removed ==== search-exists API removed
The search exists api has been removed in favour of using the search api with The search exists api has been removed in favour of using the search api with

View File

@ -250,11 +250,11 @@ Due to the fact that indexed script has been replaced by stored
scripts the following settings have been replaced to: scripts the following settings have been replaced to:
* `script.indexed` has been replaced by `script.stored` * `script.indexed` has been replaced by `script.stored`
* `script.engine.*.indexed.aggs` has been replaced by `script.engine.*.stored.aggs` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) * `script.engine.*.indexed.aggs` has been replaced by `script.engine.*.stored.aggs` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.)
* `script.engine.*.indexed.mapping` has been replaced by `script.engine.*.stored.mapping` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) * `script.engine.*.indexed.mapping` has been replaced by `script.engine.*.stored.mapping` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.)
* `script.engine.*.indexed.search` has been replaced by `script.engine.*.stored.search` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) * `script.engine.*.indexed.search` has been replaced by `script.engine.*.stored.search` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.)
* `script.engine.*.indexed.update` has been replaced by `script.engine.*.stored.update` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) * `script.engine.*.indexed.update` has been replaced by `script.engine.*.stored.update` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.)
* `script.engine.*.indexed.plugin` has been replaced by `script.engine.*.stored.plugin` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) * `script.engine.*.indexed.plugin` has been replaced by `script.engine.*.stored.plugin` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.)
==== Script mode settings ==== Script mode settings

View File

@ -143,7 +143,7 @@ First, let's look at the source data for a player by submitting the following re
---------------------------------------------------------------- ----------------------------------------------------------------
GET hockey/_search GET hockey/_search
{ {
"stored_fields": [ "fields": [
"_id", "_id",
"_source" "_source"
], ],

View File

@ -143,11 +143,11 @@ include::request/sort.asciidoc[]
include::request/source-filtering.asciidoc[] include::request/source-filtering.asciidoc[]
include::request/stored-fields.asciidoc[] include::request/fields.asciidoc[]
include::request/script-fields.asciidoc[] include::request/script-fields.asciidoc[]
include::request/docvalue-fields.asciidoc[] include::request/fielddata-fields.asciidoc[]
include::request/post-filter.asciidoc[] include::request/post-filter.asciidoc[]

View File

@ -1,23 +0,0 @@
[[search-request-docvalue-fields]]
=== Doc value Fields
Allows to return the <<doc-values,doc value>> representation of a field for each hit, for
example:
[source,js]
--------------------------------------------------
GET /_search
{
"query" : {
"match_all": {}
},
"docvalue_fields" : ["test1", "test2"]
}
--------------------------------------------------
// CONSOLE
Doc value fields can work on fields that are not stored.
Note that if the fields parameter specifies fields without docvalues it will try to load the value from the fielddata cache
causing the terms for that field to be loaded to memory (cached), which will result in more memory consumption.

Some files were not shown because too many files have changed in this diff Show More