diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy index 450d3645182..6ed6ecf8619 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy @@ -28,12 +28,6 @@ import org.gradle.api.logging.LogLevel import org.gradle.api.logging.Logger import org.junit.runner.Description -import javax.sound.sampled.AudioSystem -import javax.sound.sampled.Clip -import javax.sound.sampled.Line -import javax.sound.sampled.LineEvent -import javax.sound.sampled.LineListener -import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicInteger import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDescription @@ -123,36 +117,9 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv formatTime(e.getCurrentTime()) + ", stalled for " + formatDurationInSeconds(e.getNoEventDuration()) + " at: " + (e.getDescription() == null ? "" : formatDescription(e.getDescription()))) - try { - playBeat(); - } catch (Exception nosound) { /* handling exceptions with style */ } slowTestsFound = true } - void playBeat() throws Exception { - Clip clip = (Clip)AudioSystem.getLine(new Line.Info(Clip.class)); - final AtomicBoolean stop = new AtomicBoolean(); - clip.addLineListener(new LineListener() { - @Override - public void update(LineEvent event) { - if (event.getType() == LineEvent.Type.STOP) { - stop.set(true); - } - } - }); - InputStream stream = getClass().getResourceAsStream("/beat.wav"); - try { - clip.open(AudioSystem.getAudioInputStream(stream)); - clip.start(); - while (!stop.get()) { - Thread.sleep(20); - } - clip.close(); - } finally { - stream.close(); - } - } - @Subscribe void onQuit(AggregatedQuitEvent e) throws IOException { if (config.showNumFailuresAtEnd > 0 && !failedTests.isEmpty()) { diff --git a/buildSrc/src/main/resources/beat.wav b/buildSrc/src/main/resources/beat.wav deleted file mode 100644 index 4083a4ce618..00000000000 Binary files a/buildSrc/src/main/resources/beat.wav and /dev/null differ diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 02aec49b1c7..745c8374063 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -477,8 +477,6 @@ - - @@ -879,9 +877,6 @@ - - - @@ -1186,8 +1181,6 @@ - - diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java index 652401194bb..575fbcd3b98 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java @@ -77,7 +77,7 @@ final class SettingsUpdater { Settings settings = build.metaData().settings(); // now we try to apply things and if they are invalid we fail // this dryRun will validate & parse settings but won't actually apply them. - clusterSettings.dryRun(settings); + clusterSettings.validateUpdate(settings); return build; } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 5c08acb99ea..2297c98b636 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -252,8 +252,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder> extends ActionRequest { - public static String[] ALL_NODES = Strings.EMPTY_ARRAY; - + /** + * the list of nodesIds that will be used to resolve this request and {@link #concreteNodes} + * will be populated. Note that if {@link #concreteNodes} is not null, it will be used and nodeIds + * will be ignored. + * + * See {@link DiscoveryNodes#resolveNodes} for a full description of the options. + * + * TODO: once we stop using the transport client as a gateway to the cluster, we can get rid of this and resolve it to concrete nodes + * in the rest layer + **/ private String[] nodesIds; + /** + * once {@link #nodesIds} are resolved this will contain the concrete nodes that are part of this request. If set, {@link #nodesIds} + * will be ignored and this will be used. + * */ + private DiscoveryNode[] concreteNodes; + private TimeValue timeout; protected BaseNodesRequest() { @@ -47,6 +62,11 @@ public abstract class BaseNodesRequest this.nodesIds = nodesIds; } + protected BaseNodesRequest(DiscoveryNode... concreteNodes) { + this.nodesIds = null; + this.concreteNodes = concreteNodes; + } + public final String[] nodesIds() { return nodesIds; } @@ -72,6 +92,13 @@ public abstract class BaseNodesRequest this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"); return (Request) this; } + public DiscoveryNode[] concreteNodes() { + return concreteNodes; + } + + public void setConcreteNodes(DiscoveryNode[] concreteNodes) { + this.concreteNodes = concreteNodes; + } @Override public ActionRequestValidationException validate() { @@ -82,6 +109,7 @@ public abstract class BaseNodesRequest public void readFrom(StreamInput in) throws IOException { super.readFrom(in); nodesIds = in.readStringArray(); + concreteNodes = in.readOptionalArray(DiscoveryNode::new, DiscoveryNode[]::new); timeout = in.readOptionalWriteable(TimeValue::new); } @@ -89,6 +117,7 @@ public abstract class BaseNodesRequest public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeStringArrayNullable(nodesIds); + out.writeOptionalArray(concreteNodes); out.writeOptionalWriteable(timeout); } } diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 5d39d378b21..1609a08bb14 100644 --- a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; @@ -42,6 +41,7 @@ import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; @@ -148,20 +148,19 @@ public abstract class TransportNodesAction listener; private final AtomicReferenceArray responses; private final AtomicInteger counter = new AtomicInteger(); @@ -171,24 +170,18 @@ public abstract class TransportNodesAction(this.nodesIds.length); + this.responses = new AtomicReferenceArray<>(request.concreteNodes().length); } void start() { - if (nodesIds.length == 0) { + final DiscoveryNode[] nodes = request.concreteNodes(); + if (nodes.length == 0) { // nothing to notify - threadPool.generic().execute(new Runnable() { - @Override - public void run() { - listener.onResponse(newResponse(request, responses)); - } - }); + threadPool.generic().execute(() -> listener.onResponse(newResponse(request, responses))); return; } TransportRequestOptions.Builder builder = TransportRequestOptions.builder(); @@ -196,10 +189,10 @@ public abstract class TransportNodesAction implements * @throws IllegalArgumentException if more than one node matches the request or no nodes have been resolved */ public DiscoveryNode resolveNode(String node) { - String[] resolvedNodeIds = resolveNodesIds(node); + String[] resolvedNodeIds = resolveNodes(node); if (resolvedNodeIds.length > 1) { throw new IllegalArgumentException("resolved [" + node + "] into [" + resolvedNodeIds.length + "] nodes, where expected to be resolved to a single node"); } @@ -255,17 +255,25 @@ public class DiscoveryNodes extends AbstractDiffable implements return nodes.get(resolvedNodeIds[0]); } - public String[] resolveNodesIds(String... nodesIds) { - if (isAllNodes(nodesIds)) { + /** + * resolves a set of node "descriptions" to concrete and existing node ids. "descriptions" can be (resolved in this order): + * - "_local" or "_master" for the relevant nodes + * - a node id + * - a wild card pattern that will be matched against node names + * - a "attr:value" pattern, where attr can be a node role (master, data, ingest etc.) in which case the value can be true of false + * or a generic node attribute name in which case value will be treated as a wildcard and matched against the node attribute values. + */ + public String[] resolveNodes(String... nodes) { + if (isAllNodes(nodes)) { int index = 0; - nodesIds = new String[nodes.size()]; + nodes = new String[this.nodes.size()]; for (DiscoveryNode node : this) { - nodesIds[index++] = node.getId(); + nodes[index++] = node.getId(); } - return nodesIds; + return nodes; } else { - ObjectHashSet resolvedNodesIds = new ObjectHashSet<>(nodesIds.length); - for (String nodeId : nodesIds) { + ObjectHashSet resolvedNodesIds = new ObjectHashSet<>(nodes.length); + for (String nodeId : nodes) { if (nodeId.equals("_local")) { String localNodeId = getLocalNodeId(); if (localNodeId != null) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index 91b1df9b453..be09f17f866 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.routing; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Randomness; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -357,7 +356,7 @@ public class IndexShardRoutingTable implements Iterable { */ public ShardIterator onlyNodeSelectorActiveInitializingShardsIt(String[] nodeAttributes, DiscoveryNodes discoveryNodes) { ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); - Set selectedNodes = Sets.newHashSet(discoveryNodes.resolveNodesIds(nodeAttributes)); + Set selectedNodes = Sets.newHashSet(discoveryNodes.resolveNodes(nodeAttributes)); int seed = shuffler.nextSeed(); for (ShardRouting shardRouting : shuffler.shuffle(activeShards, seed)) { if (selectedNodes.contains(shardRouting.currentNodeId())) { diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index ceb909fb49a..ce0bf59e9e4 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -57,6 +57,7 @@ import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.function.IntFunction; import java.util.function.Supplier; import static org.elasticsearch.ElasticsearchException.readException; @@ -608,6 +609,19 @@ public abstract class StreamInput extends InputStream { return bytes; } + public T[] readArray(Writeable.Reader reader, IntFunction arraySupplier) throws IOException { + int length = readVInt(); + T[] values = arraySupplier.apply(length); + for (int i = 0; i < length; i++) { + values[i] = reader.read(this); + } + return values; + } + + public T[] readOptionalArray(Writeable.Reader reader, IntFunction arraySupplier) throws IOException { + return readBoolean() ? readArray(reader, arraySupplier) : null; + } + /** * Serializes a potential null value. */ @@ -782,7 +796,7 @@ public abstract class StreamInput extends InputStream { /** * Reads a list of objects */ - public List readList(StreamInputReader reader) throws IOException { + public List readList(Writeable.Reader reader) throws IOException { int count = readVInt(); List builder = new ArrayList<>(count); for (int i=0; i { - T read(StreamInput t) throws IOException; -} diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 7d17029cd61..bb9af656ea8 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -594,6 +594,22 @@ public abstract class StreamOutput extends OutputStream { } } + public void writeArray(T[] array) throws IOException { + writeVInt(array.length); + for (T value: array) { + value.writeTo(this); + } + } + + public void writeOptionalArray(@Nullable T[] array) throws IOException { + if (array == null) { + writeBoolean(false); + } else { + writeBoolean(true); + writeArray(array); + } + } + /** * Serializes a potential null value. */ diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index 13f92ba698d..b0287ad153a 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -238,7 +238,8 @@ public class NetworkModule extends AbstractModule { RestIndexAction.class, RestGetAction.class, RestGetSourceAction.class, - RestHeadAction.class, + RestHeadAction.Document.class, + RestHeadAction.Source.class, RestMultiGetAction.class, RestDeleteAction.class, org.elasticsearch.rest.action.count.RestCountAction.class, diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 817e109bf4d..efa4c1316ac 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -115,18 +115,18 @@ public abstract class AbstractScopedSettings extends AbstractComponent { } /** - * Applies the given settings to all listeners and rolls back the result after application. This + * Validates the given settings by running it through all update listeners without applying it. This * method will not change any settings but will fail if any of the settings can't be applied. */ - public synchronized Settings dryRun(Settings settings) { + public synchronized Settings validateUpdate(Settings settings) { final Settings current = Settings.builder().put(this.settings).put(settings).build(); final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); List exceptions = new ArrayList<>(); for (SettingUpdater settingUpdater : settingUpdaters) { try { - if (settingUpdater.hasChanged(current, previous)) { - settingUpdater.getValue(current, previous); - } + // ensure running this through the updater / dynamic validator + // don't check if the value has changed we wanna test this anyways + settingUpdater.getValue(current, previous); } catch (RuntimeException ex) { exceptions.add(ex); logger.debug("failed to prepareCommit settings for [{}]", ex, settingUpdater); diff --git a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java index b74507a4acc..737ed7d591c 100644 --- a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java +++ b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java @@ -60,7 +60,7 @@ public abstract class AsyncShardFetch implements Rel * An action that lists the relevant shard data that needs to be fetched. */ public interface Lister, NodeResponse extends BaseNodeResponse> { - void list(ShardId shardId, String[] nodesIds, ActionListener listener); + void list(ShardId shardId, DiscoveryNode[] nodes, ActionListener listener); } protected final ESLogger logger; @@ -116,12 +116,9 @@ public abstract class AsyncShardFetch implements Rel for (NodeEntry nodeEntry : nodesToFetch) { nodeEntry.markAsFetching(); } - String[] nodesIds = new String[nodesToFetch.size()]; - int index = 0; - for (NodeEntry nodeEntry : nodesToFetch) { - nodesIds[index++] = nodeEntry.getNodeId(); - } - asyncFetch(shardId, nodesIds); + DiscoveryNode[] discoNodesToFetch = nodesToFetch.stream().map(NodeEntry::getNodeId).map(nodes::get) + .toArray(DiscoveryNode[]::new); + asyncFetch(shardId, discoNodesToFetch); } // if we are still fetching, return null to indicate it @@ -187,7 +184,7 @@ public abstract class AsyncShardFetch implements Rel if (nodeEntry.isFailed()) { logger.trace("{} node {} has failed for [{}] (failure [{}])", shardId, nodeEntry.getNodeId(), type, nodeEntry.getFailure()); } else { - logger.trace("{} marking {} as done for [{}]", shardId, nodeEntry.getNodeId(), type); + logger.trace("{} marking {} as done for [{}], result is [{}]", shardId, nodeEntry.getNodeId(), type, response); nodeEntry.doneFetching(response); } } @@ -268,9 +265,9 @@ public abstract class AsyncShardFetch implements Rel * Async fetches data for the provided shard with the set of nodes that need to be fetched from. */ // visible for testing - void asyncFetch(final ShardId shardId, final String[] nodesIds) { - logger.trace("{} fetching [{}] from {}", shardId, type, nodesIds); - action.list(shardId, nodesIds, new ActionListener>() { + void asyncFetch(final ShardId shardId, final DiscoveryNode[] nodes) { + logger.trace("{} fetching [{}] from {}", shardId, type, nodes); + action.list(shardId, nodes, new ActionListener>() { @Override public void onResponse(BaseNodesResponse response) { processAsyncFetch(shardId, response.getNodes(), response.failures()); @@ -278,9 +275,9 @@ public abstract class AsyncShardFetch implements Rel @Override public void onFailure(Throwable e) { - List failures = new ArrayList<>(nodesIds.length); - for (String nodeId : nodesIds) { - failures.add(new FailedNodeException(nodeId, "total failure in fetching", e)); + List failures = new ArrayList<>(nodes.length); + for (final DiscoveryNode node: nodes) { + failures.add(new FailedNodeException(node.getId(), "total failure in fetching", e)); } processAsyncFetch(shardId, null, failures); } diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index 602a05c4321..e5f37c360c3 100644 --- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -38,13 +38,13 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; /** */ @@ -91,7 +91,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { ShardRouting primaryShard = allocation.routingNodes().activePrimary(shard.shardId()); assert primaryShard != null : "the replica shard can be allocated on at least one node, so there must be an active primary"; TransportNodesListShardStoreMetaData.StoreFilesMetaData primaryStore = findStore(primaryShard, allocation, shardStores); - if (primaryStore == null || primaryStore.allocated() == false) { + if (primaryStore == null) { // if we can't find the primary data, it is probably because the primary shard is corrupted (and listing failed) // just let the recovery find it out, no need to do anything about it for the initializing shard logger.trace("{}: no primary shard store found or allocated, letting actual allocation figure it out", shard); @@ -102,8 +102,15 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { if (matchingNodes.getNodeWithHighestMatch() != null) { DiscoveryNode currentNode = allocation.nodes().get(shard.currentNodeId()); DiscoveryNode nodeWithHighestMatch = matchingNodes.getNodeWithHighestMatch(); + // current node will not be in matchingNodes as it is filtered away by SameShardAllocationDecider + final String currentSyncId; + if (shardStores.getData().containsKey(currentNode)) { + currentSyncId = shardStores.getData().get(currentNode).storeFilesMetaData().syncId(); + } else { + currentSyncId = null; + } if (currentNode.equals(nodeWithHighestMatch) == false - && matchingNodes.isNodeMatchBySyncID(currentNode) == false + && Objects.equals(currentSyncId, primaryStore.syncId()) == false && matchingNodes.isNodeMatchBySyncID(nodeWithHighestMatch) == true) { // we found a better match that has a full sync id match, the existing allocation is not fully synced // so we found a better one, cancel this one @@ -160,7 +167,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { ShardRouting primaryShard = routingNodes.activePrimary(shard.shardId()); assert primaryShard != null : "the replica shard can be allocated on at least one node, so there must be an active primary"; TransportNodesListShardStoreMetaData.StoreFilesMetaData primaryStore = findStore(primaryShard, allocation, shardStores); - if (primaryStore == null || primaryStore.allocated() == false) { + if (primaryStore == null) { // if we can't find the primary data, it is probably because the primary shard is corrupted (and listing failed) // we want to let the replica be allocated in order to expose the actual problem with the primary that the replica // will try and recover from @@ -257,8 +264,8 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { for (Map.Entry nodeStoreEntry : data.getData().entrySet()) { DiscoveryNode discoNode = nodeStoreEntry.getKey(); TransportNodesListShardStoreMetaData.StoreFilesMetaData storeFilesMetaData = nodeStoreEntry.getValue().storeFilesMetaData(); - if (storeFilesMetaData == null) { - // already allocated on that node... + // we don't have any files at all, it is an empty index + if (storeFilesMetaData.isEmpty()) { continue; } @@ -275,16 +282,6 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { continue; } - // if it is already allocated, we can't assign to it... (and it might be primary as well) - if (storeFilesMetaData.allocated()) { - continue; - } - - // we don't have any files at all, it is an empty index - if (storeFilesMetaData.iterator().hasNext() == false) { - continue; - } - String primarySyncId = primaryStore.syncId(); String replicaSyncId = storeFilesMetaData.syncId(); // see if we have a sync id we can make use of diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index 5ae75a209eb..42fc8163706 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -81,16 +80,9 @@ public class TransportNodesListGatewayStartedShards extends } @Override - public void list(ShardId shardId, String[] nodesIds, + public void list(ShardId shardId, DiscoveryNode[] nodes, ActionListener listener) { - execute(new Request(shardId, nodesIds), listener); - } - - @Override - protected String[] resolveNodes(Request request, ClusterState clusterState) { - // default implementation may filter out non existent nodes. it's important to keep exactly the ids - // we were given for accounting on the caller - return request.nodesIds(); + execute(new Request(shardId, nodes), listener); } @Override @@ -177,8 +169,8 @@ public class TransportNodesListGatewayStartedShards extends public Request() { } - public Request(ShardId shardId, String[] nodesIds) { - super(nodesIds); + public Request(ShardId shardId, DiscoveryNode[] nodes) { + super(nodes); this.shardId = shardId; } diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index d233faf4f19..f6227ca3276 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -126,6 +126,17 @@ public final class IndexModule { indexSettings.getScopedSettings().addSettingsUpdateConsumer(setting, consumer); } + /** + * Adds a Setting, it's consumer and validator for this index. + */ + public void addSettingsUpdateConsumer(Setting setting, Consumer consumer, Consumer validator) { + ensureNotFrozen(); + if (setting == null) { + throw new IllegalArgumentException("setting must not be null"); + } + indexSettings.getScopedSettings().addSettingsUpdateConsumer(setting, consumer, validator); + } + /** * Returns the index {@link Settings} for this index */ diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index 2c20697d757..df348a5d6a1 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -275,6 +275,7 @@ public final class IndexSettings { scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval); scopedSettings.addSettingsUpdateConsumer(MAX_REFRESH_LISTENERS_PER_SHARD, this::setMaxRefreshListeners); scopedSettings.addSettingsUpdateConsumer(MAX_SLICES_PER_SCROLL, this::setMaxSlicesPerScroll); + } private void setTranslogFlushThresholdSize(ByteSizeValue byteSizeValue) { @@ -545,5 +546,5 @@ public final class IndexSettings { this.maxSlicesPerScroll = value; } - IndexScopedSettings getScopedSettings() { return scopedSettings;} + public IndexScopedSettings getScopedSettings() { return scopedSettings;} } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 2d73df76f07..548bc91b0a5 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -23,14 +23,11 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory; -import org.elasticsearch.index.analysis.compound.HyphenationCompoundWordTokenFilterFactory; import org.elasticsearch.indices.analysis.AnalysisModule; -import org.elasticsearch.indices.analysis.HunspellService; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; import org.elasticsearch.indices.analysis.PreBuiltCharFilters; import org.elasticsearch.indices.analysis.PreBuiltTokenFilters; @@ -45,48 +42,32 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; +import static java.util.Collections.unmodifiableMap; + /** * An internal registry for tokenizer, token filter, char filter and analyzer. * This class exists per node and allows to create per-index {@link AnalysisService} via {@link #build(IndexSettings)} */ public final class AnalysisRegistry implements Closeable { - private final Map> charFilters; - private final Map> tokenFilters; - private final Map> tokenizers; - private final Map> analyzers; + private final PrebuiltAnalysis prebuiltAnalysis = new PrebuiltAnalysis(); private final Map cachedAnalyzer = new ConcurrentHashMap<>(); - private final PrebuiltAnalysis prebuiltAnalysis; - private final HunspellService hunspellService; + private final Environment environment; + private final Map> charFilters; + private final Map> tokenFilters; + private final Map> tokenizers; + private final Map>> analyzers; - public AnalysisRegistry(HunspellService hunspellService, Environment environment) { - this(hunspellService, environment, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); - } - - public AnalysisRegistry(HunspellService hunspellService, Environment environment, - Map> charFilters, - Map> tokenFilters, - Map> tokenizers, - Map> analyzers) { - prebuiltAnalysis = new PrebuiltAnalysis(); - this.hunspellService = hunspellService; + public AnalysisRegistry(Environment environment, + Map> charFilters, + Map> tokenFilters, + Map> tokenizers, + Map>> analyzers) { this.environment = environment; - final Map> charFilterBuilder = new HashMap<>(charFilters); - final Map> tokenFilterBuilder = new HashMap<>(tokenFilters); - final Map> tokenizerBuilder = new HashMap<>(tokenizers); - final Map> analyzerBuilder= new HashMap<>(analyzers); - registerBuiltInAnalyzer(analyzerBuilder); - registerBuiltInCharFilter(charFilterBuilder); - registerBuiltInTokenizer(tokenizerBuilder); - registerBuiltInTokenFilters(tokenFilterBuilder); - this.tokenFilters = Collections.unmodifiableMap(tokenFilterBuilder); - this.tokenizers = Collections.unmodifiableMap(tokenizerBuilder); - this.charFilters = Collections.unmodifiableMap(charFilterBuilder); - this.analyzers = Collections.unmodifiableMap(analyzerBuilder); - } - - public HunspellService getHunspellService() { - return hunspellService; + this.charFilters = unmodifiableMap(charFilters); + this.tokenFilters = unmodifiableMap(tokenFilters); + this.tokenizers = unmodifiableMap(tokenizers); + this.analyzers = unmodifiableMap(analyzers); } /** @@ -114,9 +95,9 @@ public final class AnalysisRegistry implements Closeable { * Returns a registered {@link Analyzer} provider by name or null if the analyzer was not registered */ public Analyzer getAnalyzer(String analyzer) throws IOException { - AnalysisModule.AnalysisProvider analyzerProvider = this.prebuiltAnalysis.getAnalyzerProvider(analyzer); + AnalysisModule.AnalysisProvider> analyzerProvider = this.prebuiltAnalysis.getAnalyzerProvider(analyzer); if (analyzerProvider == null) { - AnalysisModule.AnalysisProvider provider = analyzers.get(analyzer); + AnalysisModule.AnalysisProvider> provider = analyzers.get(analyzer); return provider == null ? null : cachedAnalyzer.computeIfAbsent(analyzer, (key) -> { try { return provider.get(environment, key).get(); @@ -157,7 +138,8 @@ public final class AnalysisRegistry implements Closeable { */ tokenFilters.put("synonym", requriesAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, tokenizerFactories, name, settings))); final Map tokenFilterFactories = buildMapping(false, "tokenfilter", indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.tokenFilterFactories); - final Map analyzierFactories = buildMapping(true, "analyzer", indexSettings, analyzersSettings, analyzers, prebuiltAnalysis.analyzerProviderFactories); + final Map> analyzierFactories = buildMapping(true, "analyzer", indexSettings, analyzersSettings, + analyzers, prebuiltAnalysis.analyzerProviderFactories); return new AnalysisService(indexSettings, analyzierFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories); } @@ -175,140 +157,9 @@ public final class AnalysisRegistry implements Closeable { }; } - private void registerBuiltInCharFilter(Map> charFilters) { - charFilters.put("html_strip", HtmlStripCharFilterFactory::new); - charFilters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceCharFilterFactory::new)); - charFilters.put("mapping", requriesAnalysisSettings(MappingCharFilterFactory::new)); - } - - private void registerBuiltInTokenizer(Map> tokenizers) { - tokenizers.put("standard", StandardTokenizerFactory::new); - tokenizers.put("uax_url_email", UAX29URLEmailTokenizerFactory::new); - tokenizers.put("path_hierarchy", PathHierarchyTokenizerFactory::new); - tokenizers.put("PathHierarchy", PathHierarchyTokenizerFactory::new); - tokenizers.put("keyword", KeywordTokenizerFactory::new); - tokenizers.put("letter", LetterTokenizerFactory::new); - tokenizers.put("lowercase", LowerCaseTokenizerFactory::new); - tokenizers.put("whitespace", WhitespaceTokenizerFactory::new); - tokenizers.put("nGram", NGramTokenizerFactory::new); - tokenizers.put("ngram", NGramTokenizerFactory::new); - tokenizers.put("edgeNGram", EdgeNGramTokenizerFactory::new); - tokenizers.put("edge_ngram", EdgeNGramTokenizerFactory::new); - tokenizers.put("pattern", PatternTokenizerFactory::new); - tokenizers.put("classic", ClassicTokenizerFactory::new); - tokenizers.put("thai", ThaiTokenizerFactory::new); - } - - private void registerBuiltInTokenFilters(Map> tokenFilters) { - tokenFilters.put("stop", StopTokenFilterFactory::new); - tokenFilters.put("reverse", ReverseTokenFilterFactory::new); - tokenFilters.put("asciifolding", ASCIIFoldingTokenFilterFactory::new); - tokenFilters.put("length", LengthTokenFilterFactory::new); - tokenFilters.put("lowercase", LowerCaseTokenFilterFactory::new); - tokenFilters.put("uppercase", UpperCaseTokenFilterFactory::new); - tokenFilters.put("porter_stem", PorterStemTokenFilterFactory::new); - tokenFilters.put("kstem", KStemTokenFilterFactory::new); - tokenFilters.put("standard", StandardTokenFilterFactory::new); - tokenFilters.put("nGram", NGramTokenFilterFactory::new); - tokenFilters.put("ngram", NGramTokenFilterFactory::new); - tokenFilters.put("edgeNGram", EdgeNGramTokenFilterFactory::new); - tokenFilters.put("edge_ngram", EdgeNGramTokenFilterFactory::new); - tokenFilters.put("shingle", ShingleTokenFilterFactory::new); - tokenFilters.put("unique", UniqueTokenFilterFactory::new); - tokenFilters.put("truncate", requriesAnalysisSettings(TruncateTokenFilterFactory::new)); - tokenFilters.put("trim", TrimTokenFilterFactory::new); - tokenFilters.put("limit", LimitTokenCountFilterFactory::new); - tokenFilters.put("common_grams", requriesAnalysisSettings(CommonGramsTokenFilterFactory::new)); - tokenFilters.put("snowball", SnowballTokenFilterFactory::new); - tokenFilters.put("stemmer", StemmerTokenFilterFactory::new); - tokenFilters.put("word_delimiter", WordDelimiterTokenFilterFactory::new); - tokenFilters.put("delimited_payload_filter", DelimitedPayloadTokenFilterFactory::new); - tokenFilters.put("elision", ElisionTokenFilterFactory::new); - tokenFilters.put("keep", requriesAnalysisSettings(KeepWordFilterFactory::new)); - tokenFilters.put("keep_types", requriesAnalysisSettings(KeepTypesFilterFactory::new)); - tokenFilters.put("pattern_capture", requriesAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new)); - tokenFilters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new)); - tokenFilters.put("dictionary_decompounder", requriesAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new)); - tokenFilters.put("hyphenation_decompounder", requriesAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new)); - tokenFilters.put("arabic_stem", ArabicStemTokenFilterFactory::new); - tokenFilters.put("brazilian_stem", BrazilianStemTokenFilterFactory::new); - tokenFilters.put("czech_stem", CzechStemTokenFilterFactory::new); - tokenFilters.put("dutch_stem", DutchStemTokenFilterFactory::new); - tokenFilters.put("french_stem", FrenchStemTokenFilterFactory::new); - tokenFilters.put("german_stem", GermanStemTokenFilterFactory::new); - tokenFilters.put("russian_stem", RussianStemTokenFilterFactory::new); - tokenFilters.put("keyword_marker", requriesAnalysisSettings(KeywordMarkerTokenFilterFactory::new)); - tokenFilters.put("stemmer_override", requriesAnalysisSettings(StemmerOverrideTokenFilterFactory::new)); - tokenFilters.put("arabic_normalization", ArabicNormalizationFilterFactory::new); - tokenFilters.put("german_normalization", GermanNormalizationFilterFactory::new); - tokenFilters.put("hindi_normalization", HindiNormalizationFilterFactory::new); - tokenFilters.put("indic_normalization", IndicNormalizationFilterFactory::new); - tokenFilters.put("sorani_normalization", SoraniNormalizationFilterFactory::new); - tokenFilters.put("persian_normalization", PersianNormalizationFilterFactory::new); - tokenFilters.put("scandinavian_normalization", ScandinavianNormalizationFilterFactory::new); - tokenFilters.put("scandinavian_folding", ScandinavianFoldingFilterFactory::new); - tokenFilters.put("serbian_normalization", SerbianNormalizationFilterFactory::new); - - if (hunspellService != null) { - tokenFilters.put("hunspell", requriesAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory(indexSettings, name, settings, hunspellService))); - } - tokenFilters.put("cjk_bigram", CJKBigramFilterFactory::new); - tokenFilters.put("cjk_width", CJKWidthFilterFactory::new); - - tokenFilters.put("apostrophe", ApostropheFilterFactory::new); - tokenFilters.put("classic", ClassicFilterFactory::new); - tokenFilters.put("decimal_digit", DecimalDigitFilterFactory::new); - tokenFilters.put("fingerprint", FingerprintTokenFilterFactory::new); - } - - private void registerBuiltInAnalyzer(Map> analyzers) { - analyzers.put("default", StandardAnalyzerProvider::new); - analyzers.put("standard", StandardAnalyzerProvider::new); - analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new); - analyzers.put("simple", SimpleAnalyzerProvider::new); - analyzers.put("stop", StopAnalyzerProvider::new); - analyzers.put("whitespace", WhitespaceAnalyzerProvider::new); - analyzers.put("keyword", KeywordAnalyzerProvider::new); - analyzers.put("pattern", PatternAnalyzerProvider::new); - analyzers.put("snowball", SnowballAnalyzerProvider::new); - analyzers.put("arabic", ArabicAnalyzerProvider::new); - analyzers.put("armenian", ArmenianAnalyzerProvider::new); - analyzers.put("basque", BasqueAnalyzerProvider::new); - analyzers.put("brazilian", BrazilianAnalyzerProvider::new); - analyzers.put("bulgarian", BulgarianAnalyzerProvider::new); - analyzers.put("catalan", CatalanAnalyzerProvider::new); - analyzers.put("chinese", ChineseAnalyzerProvider::new); - analyzers.put("cjk", CjkAnalyzerProvider::new); - analyzers.put("czech", CzechAnalyzerProvider::new); - analyzers.put("danish", DanishAnalyzerProvider::new); - analyzers.put("dutch", DutchAnalyzerProvider::new); - analyzers.put("english", EnglishAnalyzerProvider::new); - analyzers.put("finnish", FinnishAnalyzerProvider::new); - analyzers.put("french", FrenchAnalyzerProvider::new); - analyzers.put("galician", GalicianAnalyzerProvider::new); - analyzers.put("german", GermanAnalyzerProvider::new); - analyzers.put("greek", GreekAnalyzerProvider::new); - analyzers.put("hindi", HindiAnalyzerProvider::new); - analyzers.put("hungarian", HungarianAnalyzerProvider::new); - analyzers.put("indonesian", IndonesianAnalyzerProvider::new); - analyzers.put("irish", IrishAnalyzerProvider::new); - analyzers.put("italian", ItalianAnalyzerProvider::new); - analyzers.put("latvian", LatvianAnalyzerProvider::new); - analyzers.put("lithuanian", LithuanianAnalyzerProvider::new); - analyzers.put("norwegian", NorwegianAnalyzerProvider::new); - analyzers.put("persian", PersianAnalyzerProvider::new); - analyzers.put("portuguese", PortugueseAnalyzerProvider::new); - analyzers.put("romanian", RomanianAnalyzerProvider::new); - analyzers.put("russian", RussianAnalyzerProvider::new); - analyzers.put("sorani", SoraniAnalyzerProvider::new); - analyzers.put("spanish", SpanishAnalyzerProvider::new); - analyzers.put("swedish", SwedishAnalyzerProvider::new); - analyzers.put("turkish", TurkishAnalyzerProvider::new); - analyzers.put("thai", ThaiAnalyzerProvider::new); - analyzers.put("fingerprint", FingerprintAnalyzerProvider::new); - } - - private Map buildMapping(boolean analyzer, String toBuild, IndexSettings settings, Map settingsMap, Map> providerMap, Map> defaultInstance) throws IOException { + private Map buildMapping(boolean analyzer, String toBuild, IndexSettings settings, Map settingsMap, + Map> providerMap, Map> defaultInstance) + throws IOException { Settings defaultSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, settings.getIndexVersionCreated()).build(); Map factories = new HashMap<>(); for (Map.Entry entry : settingsMap.entrySet()) { @@ -383,7 +234,7 @@ public final class AnalysisRegistry implements Closeable { private static class PrebuiltAnalysis implements Closeable { - final Map> analyzerProviderFactories; + final Map>> analyzerProviderFactories; final Map> tokenizerFactories; final Map> tokenFilterFactories; final Map> charFilterFactories; @@ -446,7 +297,7 @@ public final class AnalysisRegistry implements Closeable { return tokenizerFactories.get(name); } - public AnalysisModule.AnalysisProvider getAnalyzerProvider(String name) { + public AnalysisModule.AnalysisProvider> getAnalyzerProvider(String name) { return analyzerProviderFactories.get(name); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java index b9146df8c96..0009fc95409 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java @@ -48,7 +48,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable private final NamedAnalyzer defaultSearchQuoteAnalyzer; public AnalysisService(IndexSettings indexSettings, - Map analyzerProviders, + Map> analyzerProviders, Map tokenizerFactoryFactories, Map charFilterFactoryFactories, Map tokenFilterFactoryFactories) { @@ -69,8 +69,8 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable } Map analyzers = new HashMap<>(); - for (Map.Entry entry : analyzerProviders.entrySet()) { - AnalyzerProvider analyzerFactory = entry.getValue(); + for (Map.Entry> entry : analyzerProviders.entrySet()) { + AnalyzerProvider analyzerFactory = entry.getValue(); String name = entry.getKey(); /* * Lucene defaults positionIncrementGap to 0 in all analyzers but diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactory.java index 00724dccfa1..786e24a0844 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerProviderFactory.java @@ -32,7 +32,7 @@ import java.io.IOException; /** * */ -public class PreBuiltAnalyzerProviderFactory implements AnalysisModule.AnalysisProvider { +public class PreBuiltAnalyzerProviderFactory implements AnalysisModule.AnalysisProvider> { private final PreBuiltAnalyzerProvider analyzerProvider; @@ -40,7 +40,7 @@ public class PreBuiltAnalyzerProviderFactory implements AnalysisModule.AnalysisP analyzerProvider = new PreBuiltAnalyzerProvider(name, scope, analyzer); } - public AnalyzerProvider create(String name, Settings settings) { + public AnalyzerProvider create(String name, Settings settings) { Version indexVersion = Version.indexCreated(settings); if (!Version.CURRENT.equals(indexVersion)) { PreBuiltAnalyzers preBuiltAnalyzers = PreBuiltAnalyzers.getOrDefault(name, null); @@ -54,7 +54,8 @@ public class PreBuiltAnalyzerProviderFactory implements AnalysisModule.AnalysisP } @Override - public AnalyzerProvider get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { + public AnalyzerProvider get(IndexSettings indexSettings, Environment environment, String name, Settings settings) + throws IOException { return create(name, settings); } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index 0b04629e2cc..63247019501 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -378,6 +378,10 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder { - throw new ParsingException(p.getTokenLocation(), "The field [" + - SearchSourceBuilder.FIELDS_FIELD + "] is not longer supported, please use [" + - SearchSourceBuilder.STORED_FIELDS_FIELD + "] to retrieve stored fields or _source filtering " + - "if the field is not stored"); - }, SearchSourceBuilder.FIELDS_FIELD, ObjectParser.ValueType.STRING_ARRAY); - PARSER.declareStringArray(InnerHitBuilder::setDocValueFields, SearchSourceBuilder.DOCVALUE_FIELDS_FIELD); + PARSER.declareStringArray(InnerHitBuilder::setFieldNames, SearchSourceBuilder.FIELDS_FIELD); + PARSER.declareStringArray(InnerHitBuilder::setFieldDataFields, SearchSourceBuilder.FIELDDATA_FIELDS_FIELD); PARSER.declareField((p, i, c) -> { try { Set scriptFields = new HashSet<>(); @@ -137,10 +131,10 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl private boolean version; private boolean trackScores; - private List storedFieldNames; + private List fieldNames; private QueryBuilder query = DEFAULT_INNER_HIT_QUERY; private List> sorts; - private List docValueFields; + private List fieldDataFields; private Set scriptFields; private HighlightBuilder highlightBuilder; private FetchSourceContext fetchSourceContext; @@ -149,6 +143,46 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl public InnerHitBuilder() { } + /** + * Read from a stream. + */ + public InnerHitBuilder(StreamInput in) throws IOException { + name = in.readOptionalString(); + nestedPath = in.readOptionalString(); + parentChildType = in.readOptionalString(); + from = in.readVInt(); + size = in.readVInt(); + explain = in.readBoolean(); + version = in.readBoolean(); + trackScores = in.readBoolean(); + fieldNames = (List) in.readGenericValue(); + fieldDataFields = (List) in.readGenericValue(); + if (in.readBoolean()) { + int size = in.readVInt(); + scriptFields = new HashSet<>(size); + for (int i = 0; i < size; i++) { + scriptFields.add(new ScriptField(in)); + } + } + fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); + if (in.readBoolean()) { + int size = in.readVInt(); + sorts = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + sorts.add(in.readNamedWriteable(SortBuilder.class)); + } + } + highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new); + query = in.readNamedWriteable(QueryBuilder.class); + if (in.readBoolean()) { + int size = in.readVInt(); + childInnerHits = new HashMap<>(size); + for (int i = 0; i < size; i++) { + childInnerHits.put(in.readString(), new InnerHitBuilder(in)); + } + } + } + private InnerHitBuilder(InnerHitBuilder other) { name = other.name; from = other.from; @@ -156,11 +190,11 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl explain = other.explain; version = other.version; trackScores = other.trackScores; - if (other.storedFieldNames != null) { - storedFieldNames = new ArrayList<>(other.storedFieldNames); + if (other.fieldNames != null) { + fieldNames = new ArrayList<>(other.fieldNames); } - if (other.docValueFields != null) { - docValueFields = new ArrayList<>(other.docValueFields); + if (other.fieldDataFields != null) { + fieldDataFields = new ArrayList<>(other.fieldDataFields); } if (other.scriptFields != null) { scriptFields = new HashSet<>(other.scriptFields); @@ -198,46 +232,6 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl } } - /** - * Read from a stream. - */ - public InnerHitBuilder(StreamInput in) throws IOException { - name = in.readOptionalString(); - nestedPath = in.readOptionalString(); - parentChildType = in.readOptionalString(); - from = in.readVInt(); - size = in.readVInt(); - explain = in.readBoolean(); - version = in.readBoolean(); - trackScores = in.readBoolean(); - storedFieldNames = (List) in.readGenericValue(); - docValueFields = (List) in.readGenericValue(); - if (in.readBoolean()) { - int size = in.readVInt(); - scriptFields = new HashSet<>(size); - for (int i = 0; i < size; i++) { - scriptFields.add(new ScriptField(in)); - } - } - fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); - if (in.readBoolean()) { - int size = in.readVInt(); - sorts = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - sorts.add(in.readNamedWriteable(SortBuilder.class)); - } - } - highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new); - query = in.readNamedWriteable(QueryBuilder.class); - if (in.readBoolean()) { - int size = in.readVInt(); - childInnerHits = new HashMap<>(size); - for (int i = 0; i < size; i++) { - childInnerHits.put(in.readString(), new InnerHitBuilder(in)); - } - } - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(name); @@ -248,8 +242,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl out.writeBoolean(explain); out.writeBoolean(version); out.writeBoolean(trackScores); - out.writeGenericValue(storedFieldNames); - out.writeGenericValue(docValueFields); + out.writeGenericValue(fieldNames); + out.writeGenericValue(fieldDataFields); boolean hasScriptFields = scriptFields != null; out.writeBoolean(hasScriptFields); if (hasScriptFields) { @@ -340,103 +334,29 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl return this; } - /** - * Gets the stored fields to load and return. - * - * @deprecated Use {@link InnerHitBuilder#getStoredFieldNames()} instead. - */ - @Deprecated public List getFieldNames() { - return storedFieldNames; + return fieldNames; } - /** - * Sets the stored fields to load and return. If none - * are specified, the source of the document will be returned. - * - * @deprecated Use {@link InnerHitBuilder#setStoredFieldNames(List)} instead. - */ - @Deprecated public InnerHitBuilder setFieldNames(List fieldNames) { - this.storedFieldNames = fieldNames; + this.fieldNames = fieldNames; return this; } - - /** - * Gets the stored fields to load and return. - */ - public List getStoredFieldNames() { - return storedFieldNames; - } - - /** - * Sets the stored fields to load and return. If none - * are specified, the source of the document will be returned. - */ - public InnerHitBuilder setStoredFieldNames(List fieldNames) { - this.storedFieldNames = fieldNames; - return this; - } - - /** - * Gets the docvalue fields. - * - * @deprecated Use {@link InnerHitBuilder#getDocValueFields()} instead. - */ - @Deprecated public List getFieldDataFields() { - return docValueFields; + return fieldDataFields; } - /** - * Sets the stored fields to load from the docvalue and return. - * - * @deprecated Use {@link InnerHitBuilder#setDocValueFields(List)} instead. - */ - @Deprecated public InnerHitBuilder setFieldDataFields(List fieldDataFields) { - this.docValueFields = fieldDataFields; + this.fieldDataFields = fieldDataFields; return this; } - /** - * Adds a field to load from the docvalue and return. - * - * @deprecated Use {@link InnerHitBuilder#addDocValueField(String)} instead. - */ - @Deprecated public InnerHitBuilder addFieldDataField(String field) { - if (docValueFields == null) { - docValueFields = new ArrayList<>(); + if (fieldDataFields == null) { + fieldDataFields = new ArrayList<>(); } - docValueFields.add(field); - return this; - } - - /** - * Gets the docvalue fields. - */ - public List getDocValueFields() { - return docValueFields; - } - - /** - * Sets the stored fields to load from the docvalue and return. - */ - public InnerHitBuilder setDocValueFields(List docValueFields) { - this.docValueFields = docValueFields; - return this; - } - - /** - * Adds a field to load from the docvalue and return. - */ - public InnerHitBuilder addDocValueField(String field) { - if (docValueFields == null) { - docValueFields = new ArrayList<>(); - } - docValueFields.add(field); + fieldDataFields.add(field); return this; } @@ -564,19 +484,19 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl innerHitsContext.explain(explain); innerHitsContext.version(version); innerHitsContext.trackScores(trackScores); - if (storedFieldNames != null) { - if (storedFieldNames.isEmpty()) { + if (fieldNames != null) { + if (fieldNames.isEmpty()) { innerHitsContext.emptyFieldNames(); } else { - for (String fieldName : storedFieldNames) { + for (String fieldName : fieldNames) { innerHitsContext.fieldNames().add(fieldName); } } } - if (docValueFields != null) { + if (fieldDataFields != null) { FieldDataFieldsContext fieldDataFieldsContext = innerHitsContext .getFetchSubPhaseContext(FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY); - for (String field : docValueFields) { + for (String field : fieldDataFields) { fieldDataFieldsContext.add(new FieldDataFieldsContext.FieldDataField(field)); } fieldDataFieldsContext.setHitExecutionNeeded(true); @@ -633,20 +553,20 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl if (fetchSourceContext != null) { builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), fetchSourceContext, params); } - if (storedFieldNames != null) { - if (storedFieldNames.size() == 1) { - builder.field(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), storedFieldNames.get(0)); + if (fieldNames != null) { + if (fieldNames.size() == 1) { + builder.field(SearchSourceBuilder.FIELDS_FIELD.getPreferredName(), fieldNames.get(0)); } else { - builder.startArray(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName()); - for (String fieldName : storedFieldNames) { + builder.startArray(SearchSourceBuilder.FIELDS_FIELD.getPreferredName()); + for (String fieldName : fieldNames) { builder.value(fieldName); } builder.endArray(); } } - if (docValueFields != null) { - builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName()); - for (String fieldDataField : docValueFields) { + if (fieldDataFields != null) { + builder.startArray(SearchSourceBuilder.FIELDDATA_FIELDS_FIELD.getPreferredName()); + for (String fieldDataField : fieldDataFields) { builder.value(fieldDataField); } builder.endArray(); @@ -693,8 +613,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl Objects.equals(explain, that.explain) && Objects.equals(version, that.version) && Objects.equals(trackScores, that.trackScores) && - Objects.equals(storedFieldNames, that.storedFieldNames) && - Objects.equals(docValueFields, that.docValueFields) && + Objects.equals(fieldNames, that.fieldNames) && + Objects.equals(fieldDataFields, that.fieldDataFields) && Objects.equals(scriptFields, that.scriptFields) && Objects.equals(fetchSourceContext, that.fetchSourceContext) && Objects.equals(sorts, that.sorts) && @@ -705,8 +625,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl @Override public int hashCode() { - return Objects.hash(name, nestedPath, parentChildType, from, size, explain, version, trackScores, storedFieldNames, - docValueFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, childInnerHits); + return Objects.hash(name, nestedPath, parentChildType, from, size, explain, version, trackScores, fieldNames, + fieldDataFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, childInnerHits); } public static InnerHitBuilder fromXContent(QueryParseContext context) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java index 7d27a911f88..c0abe37e412 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java @@ -328,7 +328,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id()) .preference("_local").routing(termsLookup.routing()); final GetResponse getResponse = client.get(getRequest).actionGet(); - if (getResponse.isExists()) { + if (getResponse.isSourceEmpty() == false) { // extract terms only if the doc source exists List extractedValues = XContentMapValues.extractRawValues(termsLookup.path(), getResponse.getSourceAsMap()); terms.addAll(extractedValues); } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index edd5e5d8e4b..89cfad98c4f 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -425,12 +425,13 @@ public class IndicesService extends AbstractLifecycleComponent // this will also fail if some plugin fails etc. which is nice since we can verify that early final IndexService service = createIndexService("metadata verification", nodeServicesProvider, metaData, indicesQueryCache, indicesFieldDataCache, Collections.emptyList()); + closeables.add(() -> service.close("metadata verification", false)); for (ObjectCursor typeMapping : metaData.getMappings().values()) { // don't apply the default mapping, it has been applied when the mapping was created service.mapperService().merge(typeMapping.value.type(), typeMapping.value.source(), MapperService.MergeReason.MAPPING_RECOVERY, true); } - closeables.add(() -> service.close("metadata verification", false)); + service.getIndexSettings().getScopedSettings().validateUpdate(metaData.getSettings()); } finally { IOUtils.close(closeables); } diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index b5b480fc909..12d0b8bbb6e 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -19,49 +19,141 @@ package org.elasticsearch.indices.analysis; -import org.apache.lucene.analysis.hunspell.Dictionary; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.ASCIIFoldingTokenFilterFactory; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.ApostropheFilterFactory; +import org.elasticsearch.index.analysis.ArabicAnalyzerProvider; +import org.elasticsearch.index.analysis.ArabicNormalizationFilterFactory; +import org.elasticsearch.index.analysis.ArabicStemTokenFilterFactory; +import org.elasticsearch.index.analysis.ArmenianAnalyzerProvider; +import org.elasticsearch.index.analysis.BasqueAnalyzerProvider; +import org.elasticsearch.index.analysis.BrazilianAnalyzerProvider; +import org.elasticsearch.index.analysis.BrazilianStemTokenFilterFactory; +import org.elasticsearch.index.analysis.BulgarianAnalyzerProvider; +import org.elasticsearch.index.analysis.CJKBigramFilterFactory; +import org.elasticsearch.index.analysis.CJKWidthFilterFactory; +import org.elasticsearch.index.analysis.CatalanAnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.ChineseAnalyzerProvider; +import org.elasticsearch.index.analysis.CjkAnalyzerProvider; +import org.elasticsearch.index.analysis.ClassicFilterFactory; +import org.elasticsearch.index.analysis.ClassicTokenizerFactory; +import org.elasticsearch.index.analysis.CommonGramsTokenFilterFactory; +import org.elasticsearch.index.analysis.CzechAnalyzerProvider; +import org.elasticsearch.index.analysis.CzechStemTokenFilterFactory; +import org.elasticsearch.index.analysis.DanishAnalyzerProvider; +import org.elasticsearch.index.analysis.DecimalDigitFilterFactory; +import org.elasticsearch.index.analysis.DelimitedPayloadTokenFilterFactory; +import org.elasticsearch.index.analysis.DutchAnalyzerProvider; +import org.elasticsearch.index.analysis.DutchStemTokenFilterFactory; +import org.elasticsearch.index.analysis.EdgeNGramTokenFilterFactory; +import org.elasticsearch.index.analysis.EdgeNGramTokenizerFactory; +import org.elasticsearch.index.analysis.ElisionTokenFilterFactory; +import org.elasticsearch.index.analysis.EnglishAnalyzerProvider; +import org.elasticsearch.index.analysis.FingerprintAnalyzerProvider; +import org.elasticsearch.index.analysis.FingerprintTokenFilterFactory; +import org.elasticsearch.index.analysis.FinnishAnalyzerProvider; +import org.elasticsearch.index.analysis.FrenchAnalyzerProvider; +import org.elasticsearch.index.analysis.FrenchStemTokenFilterFactory; +import org.elasticsearch.index.analysis.GalicianAnalyzerProvider; +import org.elasticsearch.index.analysis.GermanAnalyzerProvider; +import org.elasticsearch.index.analysis.GermanNormalizationFilterFactory; +import org.elasticsearch.index.analysis.GermanStemTokenFilterFactory; +import org.elasticsearch.index.analysis.GreekAnalyzerProvider; +import org.elasticsearch.index.analysis.HindiAnalyzerProvider; +import org.elasticsearch.index.analysis.HindiNormalizationFilterFactory; +import org.elasticsearch.index.analysis.HtmlStripCharFilterFactory; +import org.elasticsearch.index.analysis.HungarianAnalyzerProvider; +import org.elasticsearch.index.analysis.HunspellTokenFilterFactory; +import org.elasticsearch.index.analysis.IndicNormalizationFilterFactory; +import org.elasticsearch.index.analysis.IndonesianAnalyzerProvider; +import org.elasticsearch.index.analysis.IrishAnalyzerProvider; +import org.elasticsearch.index.analysis.ItalianAnalyzerProvider; +import org.elasticsearch.index.analysis.KStemTokenFilterFactory; +import org.elasticsearch.index.analysis.KeepTypesFilterFactory; +import org.elasticsearch.index.analysis.KeepWordFilterFactory; +import org.elasticsearch.index.analysis.KeywordAnalyzerProvider; +import org.elasticsearch.index.analysis.KeywordMarkerTokenFilterFactory; +import org.elasticsearch.index.analysis.KeywordTokenizerFactory; +import org.elasticsearch.index.analysis.LatvianAnalyzerProvider; +import org.elasticsearch.index.analysis.LengthTokenFilterFactory; +import org.elasticsearch.index.analysis.LetterTokenizerFactory; +import org.elasticsearch.index.analysis.LimitTokenCountFilterFactory; +import org.elasticsearch.index.analysis.LithuanianAnalyzerProvider; +import org.elasticsearch.index.analysis.LowerCaseTokenFilterFactory; +import org.elasticsearch.index.analysis.LowerCaseTokenizerFactory; +import org.elasticsearch.index.analysis.MappingCharFilterFactory; +import org.elasticsearch.index.analysis.NGramTokenFilterFactory; +import org.elasticsearch.index.analysis.NGramTokenizerFactory; +import org.elasticsearch.index.analysis.NorwegianAnalyzerProvider; +import org.elasticsearch.index.analysis.PathHierarchyTokenizerFactory; +import org.elasticsearch.index.analysis.PatternAnalyzerProvider; +import org.elasticsearch.index.analysis.PatternCaptureGroupTokenFilterFactory; +import org.elasticsearch.index.analysis.PatternReplaceCharFilterFactory; +import org.elasticsearch.index.analysis.PatternReplaceTokenFilterFactory; +import org.elasticsearch.index.analysis.PatternTokenizerFactory; +import org.elasticsearch.index.analysis.PersianAnalyzerProvider; +import org.elasticsearch.index.analysis.PersianNormalizationFilterFactory; +import org.elasticsearch.index.analysis.PorterStemTokenFilterFactory; +import org.elasticsearch.index.analysis.PortugueseAnalyzerProvider; +import org.elasticsearch.index.analysis.ReverseTokenFilterFactory; +import org.elasticsearch.index.analysis.RomanianAnalyzerProvider; +import org.elasticsearch.index.analysis.RussianAnalyzerProvider; +import org.elasticsearch.index.analysis.RussianStemTokenFilterFactory; +import org.elasticsearch.index.analysis.ScandinavianFoldingFilterFactory; +import org.elasticsearch.index.analysis.ScandinavianNormalizationFilterFactory; +import org.elasticsearch.index.analysis.SerbianNormalizationFilterFactory; +import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; +import org.elasticsearch.index.analysis.SimpleAnalyzerProvider; +import org.elasticsearch.index.analysis.SnowballAnalyzerProvider; +import org.elasticsearch.index.analysis.SnowballTokenFilterFactory; +import org.elasticsearch.index.analysis.SoraniAnalyzerProvider; +import org.elasticsearch.index.analysis.SoraniNormalizationFilterFactory; +import org.elasticsearch.index.analysis.SpanishAnalyzerProvider; +import org.elasticsearch.index.analysis.StandardAnalyzerProvider; +import org.elasticsearch.index.analysis.StandardHtmlStripAnalyzerProvider; +import org.elasticsearch.index.analysis.StandardTokenFilterFactory; +import org.elasticsearch.index.analysis.StandardTokenizerFactory; +import org.elasticsearch.index.analysis.StemmerOverrideTokenFilterFactory; +import org.elasticsearch.index.analysis.StemmerTokenFilterFactory; +import org.elasticsearch.index.analysis.StopAnalyzerProvider; +import org.elasticsearch.index.analysis.StopTokenFilterFactory; +import org.elasticsearch.index.analysis.SwedishAnalyzerProvider; +import org.elasticsearch.index.analysis.ThaiAnalyzerProvider; +import org.elasticsearch.index.analysis.ThaiTokenizerFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.index.analysis.TrimTokenFilterFactory; +import org.elasticsearch.index.analysis.TruncateTokenFilterFactory; +import org.elasticsearch.index.analysis.TurkishAnalyzerProvider; +import org.elasticsearch.index.analysis.UAX29URLEmailTokenizerFactory; +import org.elasticsearch.index.analysis.UniqueTokenFilterFactory; +import org.elasticsearch.index.analysis.UpperCaseTokenFilterFactory; +import org.elasticsearch.index.analysis.WhitespaceAnalyzerProvider; +import org.elasticsearch.index.analysis.WhitespaceTokenizerFactory; +import org.elasticsearch.index.analysis.WordDelimiterTokenFilterFactory; +import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory; +import org.elasticsearch.index.analysis.compound.HyphenationCompoundWordTokenFilterFactory; +import org.elasticsearch.plugins.AnalysisPlugin; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.function.Function; + +import static java.util.Objects.requireNonNull; /** - * The AnalysisModule is the main extension point for node and index level analysis components. The lucene classes - * {@link org.apache.lucene.analysis.Analyzer}, {@link org.apache.lucene.analysis.TokenFilter}, {@link org.apache.lucene.analysis.Tokenizer} - * and {@link org.apache.lucene.analysis.CharFilter} can be extended in plugins and registered on node startup when the analysis module - * gets loaded. Since elasticsearch needs to create multiple instances for different configurations dedicated factories need to be provided for - * each of the components: - *
    - *
  • {@link org.apache.lucene.analysis.Analyzer} can be exposed via {@link AnalyzerProvider} and registered on {@link #registerAnalyzer(String, AnalysisProvider)}
  • - *
  • {@link org.apache.lucene.analysis.TokenFilter} can be exposed via {@link TokenFilterFactory} and registered on {@link #registerTokenFilter(String, AnalysisProvider)}
  • - *
  • {@link org.apache.lucene.analysis.Tokenizer} can be exposed via {@link TokenizerFactory} and registered on {@link #registerTokenizer(String, AnalysisProvider)}
  • - *
  • {@link org.apache.lucene.analysis.CharFilter} can be exposed via {@link CharFilterFactory} and registered on {@link #registerCharFilter(String, AnalysisProvider)}
  • - *
- * - * The {@link org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider} is only a functional interface that allows to register factory constructors directly like the plugin example below: - *
- *     public class MyAnalysisPlugin extends Plugin {
- *       public void onModule(AnalysisModule module) {
- *         module.registerAnalyzer("my-analyzer-name", MyAnalyzer::new);
- *       }
- *     }
- * 
+ * Sets up {@link AnalysisRegistry}. */ -public final class AnalysisModule extends AbstractModule { - +public final class AnalysisModule { static { Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) @@ -71,108 +163,195 @@ public final class AnalysisModule extends AbstractModule { NA_INDEX_SETTINGS = new IndexSettings(metaData, Settings.EMPTY); } private static final IndexSettings NA_INDEX_SETTINGS; - private final Environment environment; - private final Map> charFilters = new HashMap<>(); - private final Map> tokenFilters = new HashMap<>(); - private final Map> tokenizers = new HashMap<>(); - private final Map> analyzers = new HashMap<>(); - private final Map knownDictionaries = new HashMap<>(); - /** - * Creates a new AnalysisModule - */ - public AnalysisModule(Environment environment) { - this.environment = environment; + private final HunspellService hunspellService; + private final AnalysisRegistry analysisRegistry; + + public AnalysisModule(Environment environment, List plugins) throws IOException { + NamedRegistry> charFilters = setupCharFilters(plugins); + NamedRegistry hunspellDictionaries = setupHunspellDictionaries(plugins); + hunspellService = new HunspellService(environment.settings(), environment, hunspellDictionaries.registry); + NamedRegistry> tokenFilters = setupTokenFilters(plugins, hunspellService); + NamedRegistry> tokenizers = setupTokenizers(plugins); + NamedRegistry>> analyzers = setupAnalyzers(plugins); + analysisRegistry = new AnalysisRegistry(environment, charFilters.registry, tokenFilters.registry, + tokenizers.registry, analyzers.registry); + } + + HunspellService getHunspellService() { + return hunspellService; + } + + public AnalysisRegistry getAnalysisRegistry() { + return analysisRegistry; + } + + private NamedRegistry> setupCharFilters(List plugins) { + NamedRegistry> charFilters = new NamedRegistry<>("char_filter"); + charFilters.register("html_strip", HtmlStripCharFilterFactory::new); + charFilters.register("pattern_replace", requriesAnalysisSettings(PatternReplaceCharFilterFactory::new)); + charFilters.register("mapping", requriesAnalysisSettings(MappingCharFilterFactory::new)); + charFilters.registerPlugins(plugins, AnalysisPlugin::getCharFilters); + return charFilters; + } + + public NamedRegistry setupHunspellDictionaries(List plugins) { + NamedRegistry hunspellDictionaries = new NamedRegistry<>("dictionary"); + hunspellDictionaries.registerPlugins(plugins, AnalysisPlugin::getHunspellDictionaries); + return hunspellDictionaries; + } + + private NamedRegistry> setupTokenFilters(List plugins, + HunspellService hunspellService) { + NamedRegistry> tokenFilters = new NamedRegistry<>("token_filter"); + tokenFilters.register("stop", StopTokenFilterFactory::new); + tokenFilters.register("reverse", ReverseTokenFilterFactory::new); + tokenFilters.register("asciifolding", ASCIIFoldingTokenFilterFactory::new); + tokenFilters.register("length", LengthTokenFilterFactory::new); + tokenFilters.register("lowercase", LowerCaseTokenFilterFactory::new); + tokenFilters.register("uppercase", UpperCaseTokenFilterFactory::new); + tokenFilters.register("porter_stem", PorterStemTokenFilterFactory::new); + tokenFilters.register("kstem", KStemTokenFilterFactory::new); + tokenFilters.register("standard", StandardTokenFilterFactory::new); + tokenFilters.register("nGram", NGramTokenFilterFactory::new); + tokenFilters.register("ngram", NGramTokenFilterFactory::new); + tokenFilters.register("edgeNGram", EdgeNGramTokenFilterFactory::new); + tokenFilters.register("edge_ngram", EdgeNGramTokenFilterFactory::new); + tokenFilters.register("shingle", ShingleTokenFilterFactory::new); + tokenFilters.register("unique", UniqueTokenFilterFactory::new); + tokenFilters.register("truncate", requriesAnalysisSettings(TruncateTokenFilterFactory::new)); + tokenFilters.register("trim", TrimTokenFilterFactory::new); + tokenFilters.register("limit", LimitTokenCountFilterFactory::new); + tokenFilters.register("common_grams", requriesAnalysisSettings(CommonGramsTokenFilterFactory::new)); + tokenFilters.register("snowball", SnowballTokenFilterFactory::new); + tokenFilters.register("stemmer", StemmerTokenFilterFactory::new); + tokenFilters.register("word_delimiter", WordDelimiterTokenFilterFactory::new); + tokenFilters.register("delimited_payload_filter", DelimitedPayloadTokenFilterFactory::new); + tokenFilters.register("elision", ElisionTokenFilterFactory::new); + tokenFilters.register("keep", requriesAnalysisSettings(KeepWordFilterFactory::new)); + tokenFilters.register("keep_types", requriesAnalysisSettings(KeepTypesFilterFactory::new)); + tokenFilters.register("pattern_capture", requriesAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new)); + tokenFilters.register("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new)); + tokenFilters.register("dictionary_decompounder", requriesAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new)); + tokenFilters.register("hyphenation_decompounder", requriesAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new)); + tokenFilters.register("arabic_stem", ArabicStemTokenFilterFactory::new); + tokenFilters.register("brazilian_stem", BrazilianStemTokenFilterFactory::new); + tokenFilters.register("czech_stem", CzechStemTokenFilterFactory::new); + tokenFilters.register("dutch_stem", DutchStemTokenFilterFactory::new); + tokenFilters.register("french_stem", FrenchStemTokenFilterFactory::new); + tokenFilters.register("german_stem", GermanStemTokenFilterFactory::new); + tokenFilters.register("russian_stem", RussianStemTokenFilterFactory::new); + tokenFilters.register("keyword_marker", requriesAnalysisSettings(KeywordMarkerTokenFilterFactory::new)); + tokenFilters.register("stemmer_override", requriesAnalysisSettings(StemmerOverrideTokenFilterFactory::new)); + tokenFilters.register("arabic_normalization", ArabicNormalizationFilterFactory::new); + tokenFilters.register("german_normalization", GermanNormalizationFilterFactory::new); + tokenFilters.register("hindi_normalization", HindiNormalizationFilterFactory::new); + tokenFilters.register("indic_normalization", IndicNormalizationFilterFactory::new); + tokenFilters.register("sorani_normalization", SoraniNormalizationFilterFactory::new); + tokenFilters.register("persian_normalization", PersianNormalizationFilterFactory::new); + tokenFilters.register("scandinavian_normalization", ScandinavianNormalizationFilterFactory::new); + tokenFilters.register("scandinavian_folding", ScandinavianFoldingFilterFactory::new); + tokenFilters.register("serbian_normalization", SerbianNormalizationFilterFactory::new); + + tokenFilters.register("hunspell", requriesAnalysisSettings( + (indexSettings, env, name, settings) -> new HunspellTokenFilterFactory(indexSettings, name, settings, hunspellService))); + tokenFilters.register("cjk_bigram", CJKBigramFilterFactory::new); + tokenFilters.register("cjk_width", CJKWidthFilterFactory::new); + + tokenFilters.register("apostrophe", ApostropheFilterFactory::new); + tokenFilters.register("classic", ClassicFilterFactory::new); + tokenFilters.register("decimal_digit", DecimalDigitFilterFactory::new); + tokenFilters.register("fingerprint", FingerprintTokenFilterFactory::new); + tokenFilters.registerPlugins(plugins, AnalysisPlugin::getTokenFilters); + return tokenFilters; + } + + private NamedRegistry> setupTokenizers(List plugins) { + NamedRegistry> tokenizers = new NamedRegistry<>("tokenizer"); + tokenizers.register("standard", StandardTokenizerFactory::new); + tokenizers.register("uax_url_email", UAX29URLEmailTokenizerFactory::new); + tokenizers.register("path_hierarchy", PathHierarchyTokenizerFactory::new); + tokenizers.register("PathHierarchy", PathHierarchyTokenizerFactory::new); + tokenizers.register("keyword", KeywordTokenizerFactory::new); + tokenizers.register("letter", LetterTokenizerFactory::new); + tokenizers.register("lowercase", LowerCaseTokenizerFactory::new); + tokenizers.register("whitespace", WhitespaceTokenizerFactory::new); + tokenizers.register("nGram", NGramTokenizerFactory::new); + tokenizers.register("ngram", NGramTokenizerFactory::new); + tokenizers.register("edgeNGram", EdgeNGramTokenizerFactory::new); + tokenizers.register("edge_ngram", EdgeNGramTokenizerFactory::new); + tokenizers.register("pattern", PatternTokenizerFactory::new); + tokenizers.register("classic", ClassicTokenizerFactory::new); + tokenizers.register("thai", ThaiTokenizerFactory::new); + tokenizers.registerPlugins(plugins, AnalysisPlugin::getTokenizers); + return tokenizers; + } + + private NamedRegistry>> setupAnalyzers(List plugins) { + NamedRegistry>> analyzers = new NamedRegistry<>("analyzer"); + analyzers.register("default", StandardAnalyzerProvider::new); + analyzers.register("standard", StandardAnalyzerProvider::new); + analyzers.register("standard_html_strip", StandardHtmlStripAnalyzerProvider::new); + analyzers.register("simple", SimpleAnalyzerProvider::new); + analyzers.register("stop", StopAnalyzerProvider::new); + analyzers.register("whitespace", WhitespaceAnalyzerProvider::new); + analyzers.register("keyword", KeywordAnalyzerProvider::new); + analyzers.register("pattern", PatternAnalyzerProvider::new); + analyzers.register("snowball", SnowballAnalyzerProvider::new); + analyzers.register("arabic", ArabicAnalyzerProvider::new); + analyzers.register("armenian", ArmenianAnalyzerProvider::new); + analyzers.register("basque", BasqueAnalyzerProvider::new); + analyzers.register("brazilian", BrazilianAnalyzerProvider::new); + analyzers.register("bulgarian", BulgarianAnalyzerProvider::new); + analyzers.register("catalan", CatalanAnalyzerProvider::new); + analyzers.register("chinese", ChineseAnalyzerProvider::new); + analyzers.register("cjk", CjkAnalyzerProvider::new); + analyzers.register("czech", CzechAnalyzerProvider::new); + analyzers.register("danish", DanishAnalyzerProvider::new); + analyzers.register("dutch", DutchAnalyzerProvider::new); + analyzers.register("english", EnglishAnalyzerProvider::new); + analyzers.register("finnish", FinnishAnalyzerProvider::new); + analyzers.register("french", FrenchAnalyzerProvider::new); + analyzers.register("galician", GalicianAnalyzerProvider::new); + analyzers.register("german", GermanAnalyzerProvider::new); + analyzers.register("greek", GreekAnalyzerProvider::new); + analyzers.register("hindi", HindiAnalyzerProvider::new); + analyzers.register("hungarian", HungarianAnalyzerProvider::new); + analyzers.register("indonesian", IndonesianAnalyzerProvider::new); + analyzers.register("irish", IrishAnalyzerProvider::new); + analyzers.register("italian", ItalianAnalyzerProvider::new); + analyzers.register("latvian", LatvianAnalyzerProvider::new); + analyzers.register("lithuanian", LithuanianAnalyzerProvider::new); + analyzers.register("norwegian", NorwegianAnalyzerProvider::new); + analyzers.register("persian", PersianAnalyzerProvider::new); + analyzers.register("portuguese", PortugueseAnalyzerProvider::new); + analyzers.register("romanian", RomanianAnalyzerProvider::new); + analyzers.register("russian", RussianAnalyzerProvider::new); + analyzers.register("sorani", SoraniAnalyzerProvider::new); + analyzers.register("spanish", SpanishAnalyzerProvider::new); + analyzers.register("swedish", SwedishAnalyzerProvider::new); + analyzers.register("turkish", TurkishAnalyzerProvider::new); + analyzers.register("thai", ThaiAnalyzerProvider::new); + analyzers.register("fingerprint", FingerprintAnalyzerProvider::new); + analyzers.registerPlugins(plugins, AnalysisPlugin::getAnalyzers); + return analyzers; + } + + private static AnalysisModule.AnalysisProvider requriesAnalysisSettings(AnalysisModule.AnalysisProvider provider) { + return new AnalysisModule.AnalysisProvider() { + @Override + public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { + return provider.get(indexSettings, environment, name, settings); + } + @Override + public boolean requiresAnalysisSettings() { + return true; + } + }; } /** - * Registers a new {@link AnalysisProvider} to create - * {@link CharFilterFactory} instance per node as well as per index. - */ - public void registerCharFilter(String name, AnalysisProvider charFilter) { - if (charFilter == null) { - throw new IllegalArgumentException("char_filter provider must not be null"); - } - if (charFilters.putIfAbsent(name, charFilter) != null) { - throw new IllegalArgumentException("char_filter provider for name " + name + " already registered"); - } - } - - /** - * Registers a new {@link AnalysisProvider} to create - * {@link TokenFilterFactory} instance per node as well as per index. - */ - public void registerTokenFilter(String name, AnalysisProvider tokenFilter) { - if (tokenFilter == null) { - throw new IllegalArgumentException("token_filter provider must not be null"); - } - if (tokenFilters.putIfAbsent(name, tokenFilter) != null) { - throw new IllegalArgumentException("token_filter provider for name " + name + " already registered"); - } - } - - /** - * Registers a new {@link AnalysisProvider} to create - * {@link TokenizerFactory} instance per node as well as per index. - */ - public void registerTokenizer(String name, AnalysisProvider tokenizer) { - if (tokenizer == null) { - throw new IllegalArgumentException("tokenizer provider must not be null"); - } - if (tokenizers.putIfAbsent(name, tokenizer) != null) { - throw new IllegalArgumentException("tokenizer provider for name " + name + " already registered"); - } - } - - /** - * Registers a new {@link AnalysisProvider} to create - * {@link AnalyzerProvider} instance per node as well as per index. - */ - public void registerAnalyzer(String name, AnalysisProvider analyzer) { - if (analyzer == null) { - throw new IllegalArgumentException("analyzer provider must not be null"); - } - if (analyzers.putIfAbsent(name, analyzer) != null) { - throw new IllegalArgumentException("analyzer provider for name " + name + " already registered"); - } - } - - /** - * Registers a new hunspell {@link Dictionary} that can be referenced by the given name in - * hunspell analysis configuration. - */ - public void registerHunspellDictionary(String name, Dictionary dictionary) { - if (knownDictionaries.putIfAbsent(name, dictionary) != null) { - throw new IllegalArgumentException("dictionary for [" + name + "] is already registered"); - } - } - - @Override - protected void configure() { - try { - AnalysisRegistry registry = buildRegistry(); - bind(HunspellService.class).toInstance(registry.getHunspellService()); - bind(AnalysisRegistry.class).toInstance(registry); - } catch (IOException e) { - throw new ElasticsearchException("failed to load hunspell service", e); - } - } - - /** - * Builds an {@link AnalysisRegistry} from the current configuration. - */ - public AnalysisRegistry buildRegistry() throws IOException { - return new AnalysisRegistry(new HunspellService(environment.settings(), environment, knownDictionaries), environment, charFilters, tokenFilters, tokenizers, analyzers); - } - - /** - * AnalysisProvider is the basic factory interface for registering analysis components like: - *
    - *
  • {@link TokenizerFactory} - see {@link AnalysisModule#registerTokenizer(String, AnalysisProvider)}
  • - *
  • {@link CharFilterFactory} - see {@link AnalysisModule#registerCharFilter(String, AnalysisProvider)}
  • - *
  • {@link AnalyzerProvider} - see {@link AnalysisModule#registerAnalyzer(String, AnalysisProvider)}
  • - *
  • {@link TokenFilterFactory}- see {@link AnalysisModule#registerTokenFilter(String, AnalysisProvider)} )}
  • - *
+ * The basic factory interface for analysis components. */ public interface AnalysisProvider { @@ -195,7 +374,8 @@ public final class AnalysisModule extends AbstractModule { * @param name the name of the analysis component * @return a new provider instance * @throws IOException if an {@link IOException} occurs - * @throws IllegalArgumentException if the provider requires analysis settings ie. if {@link #requiresAnalysisSettings()} returns true + * @throws IllegalArgumentException if the provider requires analysis settings ie. if {@link #requiresAnalysisSettings()} returns + * true */ default T get(Environment environment, String name) throws IOException { if (requiresAnalysisSettings()) { @@ -212,4 +392,29 @@ public final class AnalysisModule extends AbstractModule { return false; } } + + private static class NamedRegistry { + private final Map registry = new HashMap<>(); + private final String targetName; + + public NamedRegistry(String targetName) { + this.targetName = targetName; + } + + private void register(String name, T t) { + requireNonNull(name, "name is required"); + requireNonNull(t, targetName + " is required"); + if (registry.putIfAbsent(name, t) != null) { + throw new IllegalArgumentException(targetName + " for name " + name + " already registered"); + } + } + + private

void registerPlugins(List

plugins, Function> lookup) { + for (P plugin : plugins) { + for (Map.Entry entry : lookup.apply(plugin).entrySet()) { + register(entry.getKey(), entry.getValue()); + } + } + } + } } diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 27b7ab23c9f..97667fcdac6 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -52,7 +52,8 @@ import java.util.function.Function; * The following settings can be set for each dictionary: *

    *
  • {@code ignore_case} - If true, dictionary matching will be case insensitive (defaults to {@code false})
  • - *
  • {@code strict_affix_parsing} - Determines whether errors while reading a affix rules file will cause exception or simple be ignored (defaults to {@code true})
  • + *
  • {@code strict_affix_parsing} - Determines whether errors while reading a affix rules file will cause exception or simple be ignored + * (defaults to {@code true})
  • *
*

* These settings can either be configured as node level configuration, such as: @@ -86,7 +87,8 @@ public class HunspellService extends AbstractComponent { private final Path hunspellDir; private final Function loadingFunction; - public HunspellService(final Settings settings, final Environment env, final Map knownDictionaries) throws IOException { + public HunspellService(final Settings settings, final Environment env, final Map knownDictionaries) + throws IOException { super(settings); this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries); this.hunspellDir = resolveHunspellDirectory(env); @@ -166,7 +168,7 @@ public class HunspellService extends AbstractComponent { // merging node settings with hunspell dictionary specific settings Settings dictSettings = HUNSPELL_DICTIONARY_OPTIONS.get(nodeSettings); - nodeSettings = loadDictionarySettings(dicDir, dictSettings.getByPrefix(locale)); + nodeSettings = loadDictionarySettings(dicDir, dictSettings.getByPrefix(locale + ".")); boolean ignoreCase = nodeSettings.getAsBoolean("ignore_case", defaultIgnoreCase); diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index da5f9d16f51..a4517baf45b 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -56,18 +55,17 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Iterator; import java.util.List; -import java.util.Set; import java.util.concurrent.TimeUnit; /** * */ public class TransportNodesListShardStoreMetaData extends TransportNodesAction + TransportNodesListShardStoreMetaData.NodesStoreFilesMetaData, + TransportNodesListShardStoreMetaData.NodeRequest, + TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> implements AsyncShardFetch.Lister { + TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> { public static final String ACTION_NAME = "internal:cluster/nodes/indices/shard/store"; @@ -81,21 +79,14 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction listener) { - execute(new Request(shardId, false, nodesIds), listener); - } - - @Override - protected String[] resolveNodes(Request request, ClusterState clusterState) { - // default implementation may filter out non existent nodes. it's important to keep exactly the ids - // we were given for accounting on the caller - return request.nodesIds(); + public void list(ShardId shardId, DiscoveryNode[] nodes, ActionListener listener) { + execute(new Request(shardId, nodes), listener); } @Override @@ -116,19 +107,6 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction, Streamable { - // here also trasmit sync id, else recovery will not use sync id because of stupid gateway allocator every now and then... - private boolean allocated; private ShardId shardId; Store.MetadataSnapshot metadataSnapshot; StoreFilesMetaData() { } - public StoreFilesMetaData(boolean allocated, ShardId shardId, Store.MetadataSnapshot metadataSnapshot) { - this.allocated = allocated; + public StoreFilesMetaData(ShardId shardId, Store.MetadataSnapshot metadataSnapshot) { this.shardId = shardId; this.metadataSnapshot = metadataSnapshot; } - public boolean allocated() { - return allocated; - } - public ShardId shardId() { return this.shardId; } + public boolean isEmpty() { + return metadataSnapshot.size() == 0; + } + @Override public Iterator iterator() { return metadataSnapshot.iterator(); @@ -225,14 +207,12 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction nodesIds) { - super(nodesIds.toArray(new String[nodesIds.size()])); + public Request(ShardId shardId, DiscoveryNode[] nodes) { + super(nodes); this.shardId = shardId; - this.unallocated = unallocated; - } - - public Request(ShardId shardId, boolean unallocated, String... nodesIds) { - super(nodesIds); - this.shardId = shardId; - this.unallocated = unallocated; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); shardId = ShardId.readShardId(in); - unallocated = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); shardId.writeTo(out); - out.writeBoolean(unallocated); } } @@ -307,29 +284,24 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction{@code + * public class AnalysisPhoneticPlugin extends Plugin implements AnalysisPlugin { + * @Override + * public Map> getTokenFilters() { + * return singletonMap("phonetic", PhoneticTokenFilterFactory::new); + * } + * } + * } + */ +public interface AnalysisPlugin { + /** + * Override to add additional {@link CharFilter}s. + */ + default Map> getCharFilters() { + return emptyMap(); + } + + /** + * Override to add additional {@link TokenFilter}s. + */ + default Map> getTokenFilters() { + return emptyMap(); + } + + /** + * Override to add additional {@link Tokenizer}s. + */ + default Map> getTokenizers() { + return emptyMap(); + } + + /** + * Override to add additional {@link Analyzer}s. + */ + default Map>> getAnalyzers() { + return emptyMap(); + } + + /** + * Override to add additional hunspell {@link org.apache.lucene.analysis.hunspell.Dictionary}s. + */ + default Map getHunspellDictionaries() { + return emptyMap(); + } +} diff --git a/core/src/main/java/org/elasticsearch/plugins/Plugin.java b/core/src/main/java/org/elasticsearch/plugins/Plugin.java index 0df9eaf467f..08a8ce7124c 100644 --- a/core/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.IndexModule; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.threadpool.ExecutorBuilder; @@ -103,6 +104,14 @@ public abstract class Plugin { @Deprecated public final void onModule(ScriptModule module) {} + /** + * Old-style analysis extension point. + * + * @deprecated implement {@link AnalysisPlugin} instead + */ + @Deprecated + public final void onModule(AnalysisModule module) {} + /** * Provides the list of this plugin's custom thread pools, empty if * none. diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 71081740d33..4769b5954ba 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.IndexModule; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.script.NativeScriptFactory; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngineService; @@ -207,8 +208,8 @@ public class PluginsService extends AbstractComponent { } Class moduleClass = method.getParameterTypes()[0]; if (!Module.class.isAssignableFrom(moduleClass)) { - if (moduleClass == ScriptModule.class) { - // This is still part of the Plugin class to point the user to the new implementation + if (method.getDeclaringClass() == Plugin.class) { + // These are still part of the Plugin class to point the user to the new implementations continue; } throw new RuntimeException( diff --git a/core/src/main/java/org/elasticsearch/plugins/ScriptPlugin.java b/core/src/main/java/org/elasticsearch/plugins/ScriptPlugin.java index 7f85b8182a5..c1e2a43c953 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ScriptPlugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/ScriptPlugin.java @@ -27,8 +27,7 @@ import java.util.Collections; import java.util.List; /** - * An additional extension point to {@link Plugin}. Plugins extending the scripting functionality must implement this inteface - * to provide access to script engines or script factories. + * An additional extension point for {@link Plugin}s that extends Elasticsearch's scripting functionality. */ public interface ScriptPlugin { diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java index af2faba01c9..7d8a39b02b9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java @@ -78,7 +78,7 @@ public class RestGetSourceAction extends BaseRestHandler { @Override public RestResponse buildResponse(GetResponse response) throws Exception { XContentBuilder builder = channel.newBuilder(response.getSourceInternal(), false); - if (!response.isExists()) { + if (response.isSourceEmpty()) { // check if doc source (or doc itself) is missing return new BytesRestResponse(NOT_FOUND, builder); } else { builder.rawValue(response.getSourceInternal()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java index 747e62ea381..01f3be435e7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java @@ -39,15 +39,47 @@ import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; /** - * + * Base class for {@code HEAD} request handlers for a single document. */ -public class RestHeadAction extends BaseRestHandler { +public abstract class RestHeadAction extends BaseRestHandler { - @Inject - public RestHeadAction(Settings settings, RestController controller, Client client) { + /** + * Handler to check for document existence. + */ + public static class Document extends RestHeadAction { + + @Inject + public Document(Settings settings, RestController controller, Client client) { + super(settings, client, false); + controller.registerHandler(HEAD, "/{index}/{type}/{id}", this); + } + } + + /** + * Handler to check for document source existence (may be disabled in the mapping). + */ + public static class Source extends RestHeadAction { + + @Inject + public Source(Settings settings, RestController controller, Client client) { + super(settings, client, true); + controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this); + } + } + + private final boolean source; + + /** + * All subclasses must be registered in {@link org.elasticsearch.common.network.NetworkModule}. + * + * @param settings injected settings + * @param client injected client + * @param source {@code false} to check for {@link GetResponse#isExists()}. + * {@code true} to also check for {@link GetResponse#isSourceEmpty()}. + */ + public RestHeadAction(Settings settings, Client client, boolean source) { super(settings, client); - controller.registerHandler(HEAD, "/{index}/{type}/{id}", this); - controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this); + this.source = source; } @Override @@ -68,6 +100,8 @@ public class RestHeadAction extends BaseRestHandler { public RestResponse buildResponse(GetResponse response) { if (!response.isExists()) { return new BytesRestResponse(NOT_FOUND, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY); + } else if (source && response.isSourceEmpty()) { // doc exists, but source might not (disabled in the mapping) + return new BytesRestResponse(NOT_FOUND, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY); } else { return new BytesRestResponse(OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index f46c41dcd17..5f5fe84d573 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; @@ -176,35 +175,27 @@ public class RestSearchAction extends BaseRestHandler { } } - if (request.param("fields") != null) { - throw new IllegalArgumentException("The parameter [" + - SearchSourceBuilder.FIELDS_FIELD + "] is not longer supported, please use [" + - SearchSourceBuilder.STORED_FIELDS_FIELD + "] to retrieve stored fields or _source filtering " + - "if the field is not stored"); - } - - String sField = request.param("stored_fields"); + String sField = request.param("fields"); if (sField != null) { if (!Strings.hasText(sField)) { - searchSourceBuilder.noStoredFields(); + searchSourceBuilder.noFields(); } else { String[] sFields = Strings.splitStringByCommaToArray(sField); if (sFields != null) { for (String field : sFields) { - searchSourceBuilder.storedField(field); + searchSourceBuilder.field(field); } } } } - String sDocValueFields = request.param("docvalue_fields"); - if (sDocValueFields == null) { - sDocValueFields = request.param("fielddata_fields"); - } - if (sDocValueFields != null) { - if (Strings.hasText(sDocValueFields)) { - String[] sFields = Strings.splitStringByCommaToArray(sDocValueFields); - for (String field : sFields) { - searchSourceBuilder.docValueField(field); + String sFieldDataFields = request.param("fielddata_fields"); + if (sFieldDataFields != null) { + if (Strings.hasText(sFieldDataFields)) { + String[] sFields = Strings.splitStringByCommaToArray(sFieldDataFields); + if (sFields != null) { + for (String field : sFields) { + searchSourceBuilder.fieldDataField(field); + } } } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 6ceb021d9f6..50b59631e95 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -713,8 +713,8 @@ public class SearchService extends AbstractLifecycleComponent imp throw new SearchContextException(context, "failed to create RescoreSearchContext", e); } } - if (source.storedFields() != null) { - context.fieldNames().addAll(source.storedFields()); + if (source.fields() != null) { + context.fieldNames().addAll(source.fields()); } if (source.explain() != null) { context.explain(source.explain()); @@ -722,9 +722,9 @@ public class SearchService extends AbstractLifecycleComponent imp if (source.fetchSource() != null) { context.fetchSourceContext(source.fetchSource()); } - if (source.docValueFields() != null) { + if (source.fieldDataFields() != null) { FieldDataFieldsContext fieldDataFieldsContext = context.getFetchSubPhaseContext(FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY); - for (String field : source.docValueFields()) { + for (String field : source.fieldDataFields()) { fieldDataFieldsContext.add(new FieldDataField(field)); } fieldDataFieldsContext.setHitExecutionNeeded(true); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregatorFactory.java index a9c3d24a429..d44f3133789 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregatorFactory.java @@ -69,9 +69,7 @@ public abstract class AbstractHistogramAggregatorFactory pipelineAggregators, Map metaData) throws IOException { - Rounding rounding = createRounding(); - return new HistogramAggregator(name, factories, rounding, order, keyed, minDocCount, extendedBounds, null, config.format(), - histogramFactory, context, parent, pipelineAggregators, metaData); + return createAggregator(null, parent, pipelineAggregators, metaData); } protected Rounding createRounding() { @@ -92,6 +90,11 @@ public abstract class AbstractHistogramAggregatorFactory pipelineAggregators, + Map metaData) throws IOException { Rounding rounding = createRounding(); // we need to round the bounds given by the user and we have to do it // for every aggregator we create diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java index 0121da2fa90..13d10bd0a0c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java @@ -20,8 +20,8 @@ package org.elasticsearch.search.aggregations.bucket.range; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamInputReader; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; import org.elasticsearch.search.aggregations.support.ValuesSource; @@ -47,7 +47,7 @@ public abstract class AbstractRangeBuilder rangeFactory, StreamInputReader rangeReader) + protected AbstractRangeBuilder(StreamInput in, InternalRange.Factory rangeFactory, Writeable.Reader rangeReader) throws IOException { super(in, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType()); this.rangeFactory = rangeFactory; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java index c9ed41c6ddc..00d783c12e3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java @@ -567,9 +567,9 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder fieldNames = new ArrayList<>(); fieldNames.add(parser.text()); factory.fields(fieldNames); @@ -694,7 +694,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder fieldNames = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { @@ -705,7 +705,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder fieldDataFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index e4b72ef3b28..24278bdf127 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.builder; import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -42,7 +41,6 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; @@ -51,6 +49,7 @@ import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; @@ -84,8 +83,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ public static final ParseField EXPLAIN_FIELD = new ParseField("explain"); public static final ParseField _SOURCE_FIELD = new ParseField("_source"); public static final ParseField FIELDS_FIELD = new ParseField("fields"); - public static final ParseField STORED_FIELDS_FIELD = new ParseField("stored_fields"); - public static final ParseField DOCVALUE_FIELDS_FIELD = new ParseField("docvalue_fields", "fielddata_fields"); + public static final ParseField FIELDDATA_FIELDS_FIELD = new ParseField("fielddata_fields"); public static final ParseField SCRIPT_FIELDS_FIELD = new ParseField("script_fields"); public static final ParseField SCRIPT_FIELD = new ParseField("script"); public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure"); @@ -148,8 +146,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ private long timeoutInMillis = -1; private int terminateAfter = SearchContext.DEFAULT_TERMINATE_AFTER; - private List storedFieldNames; - private List docValueFields; + private List fieldNames; + private List fieldDataFields; private List scriptFields; private FetchSourceContext fetchSourceContext; @@ -183,8 +181,22 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new); explain = in.readOptionalBoolean(); fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); - docValueFields = (List) in.readGenericValue(); - storedFieldNames = (List) in.readGenericValue(); + boolean hasFieldDataFields = in.readBoolean(); + if (hasFieldDataFields) { + int size = in.readVInt(); + fieldDataFields = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + fieldDataFields.add(in.readString()); + } + } + boolean hasFieldNames = in.readBoolean(); + if (hasFieldNames) { + int size = in.readVInt(); + fieldNames = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + fieldNames.add(in.readString()); + } + } from = in.readVInt(); highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new); boolean hasIndexBoost = in.readBoolean(); @@ -243,8 +255,22 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ out.writeOptionalWriteable(aggregations); out.writeOptionalBoolean(explain); out.writeOptionalStreamable(fetchSourceContext); - out.writeGenericValue(docValueFields); - out.writeGenericValue(storedFieldNames); + boolean hasFieldDataFields = fieldDataFields != null; + out.writeBoolean(hasFieldDataFields); + if (hasFieldDataFields) { + out.writeVInt(fieldDataFields.size()); + for (String field : fieldDataFields) { + out.writeString(field); + } + } + boolean hasFieldNames = fieldNames != null; + out.writeBoolean(hasFieldNames); + if (hasFieldNames) { + out.writeVInt(fieldNames.size()); + for (String field : fieldNames) { + out.writeString(field); + } + } out.writeVInt(from); out.writeOptionalWriteable(highlightBuilder); boolean hasIndexBoost = indexBoost != null; @@ -706,87 +732,60 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } /** - * Adds a stored field to load and return as part of the + * Adds a field to load and return (note, it must be stored) as part of the * search request. If none are specified, the source of the document will be * return. */ - public SearchSourceBuilder storedField(String name) { - if (storedFieldNames == null) { - storedFieldNames = new ArrayList<>(); + public SearchSourceBuilder field(String name) { + if (fieldNames == null) { + fieldNames = new ArrayList<>(); } - storedFieldNames.add(name); + fieldNames.add(name); return this; } /** - * Sets the stored fields to load and return as part of the search request. If none + * Sets the fields to load and return as part of the search request. If none * are specified, the source of the document will be returned. */ - public SearchSourceBuilder storedFields(List fields) { - this.storedFieldNames = fields; + public SearchSourceBuilder fields(List fields) { + this.fieldNames = fields; return this; } /** - * Sets no stored fields to be loaded, resulting in only id and type to be returned + * Sets no fields to be loaded, resulting in only id and type to be returned * per field. */ - public SearchSourceBuilder noStoredFields() { - this.storedFieldNames = Collections.emptyList(); + public SearchSourceBuilder noFields() { + this.fieldNames = Collections.emptyList(); return this; } /** - * Gets the stored fields to load and return as part of the search request. + * Gets the fields to load and return as part of the search request. */ - public List storedFields() { - return storedFieldNames; + public List fields() { + return fieldNames; } - /** - * Adds a field to load from the docvalue and return as part of the + * Adds a field to load from the field data cache and return as part of the * search request. - * - * @deprecated Use {@link SearchSourceBuilder#docValueField(String)} instead. */ - @Deprecated public SearchSourceBuilder fieldDataField(String name) { - if (docValueFields == null) { - docValueFields = new ArrayList<>(); + if (fieldDataFields == null) { + fieldDataFields = new ArrayList<>(); } - docValueFields.add(name); + fieldDataFields.add(name); return this; } /** - * Gets the docvalue fields. - * - * @deprecated Use {@link SearchSourceBuilder#docValueFields()} instead. + * Gets the field-data fields. */ - @Deprecated public List fieldDataFields() { - return docValueFields; - } - - - /** - * Gets the docvalue fields. - */ - public List docValueFields() { - return docValueFields; - } - - /** - * Adds a field to load from the docvalue and return as part of the - * search request. - */ - public SearchSourceBuilder docValueField(String name) { - if (docValueFields == null) { - docValueFields = new ArrayList<>(); - } - docValueFields.add(name); - return this; + return fieldDataFields; } /** @@ -911,8 +910,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ rewrittenBuilder.explain = explain; rewrittenBuilder.ext = ext; rewrittenBuilder.fetchSourceContext = fetchSourceContext; - rewrittenBuilder.docValueFields = docValueFields; - rewrittenBuilder.storedFieldNames = storedFieldNames; + rewrittenBuilder.fieldDataFields = fieldDataFields; + rewrittenBuilder.fieldNames = fieldNames; rewrittenBuilder.from = from; rewrittenBuilder.highlightBuilder = highlightBuilder; rewrittenBuilder.indexBoost = indexBoost; @@ -959,7 +958,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } else if (context.getParseFieldMatcher().match(currentFieldName, SIZE_FIELD)) { size = parser.intValue(); } else if (context.getParseFieldMatcher().match(currentFieldName, TIMEOUT_FIELD)) { - timeoutInMillis = parser.longValue(); + timeoutInMillis = TimeValue.parseTimeValue(parser.text(), null, TIMEOUT_FIELD.getPreferredName()).millis(); } else if (context.getParseFieldMatcher().match(currentFieldName, TERMINATE_AFTER_FIELD)) { terminateAfter = parser.intValue(); } else if (context.getParseFieldMatcher().match(currentFieldName, MIN_SCORE_FIELD)) { @@ -972,16 +971,12 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ trackScores = parser.booleanValue(); } else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { fetchSourceContext = FetchSourceContext.parse(context); - } else if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) { - storedField(parser.text()); + } else if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { + field(parser.text()); } else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) { sort(parser.text()); } else if (context.getParseFieldMatcher().match(currentFieldName, PROFILE_FIELD)) { profile = parser.booleanValue(); - } else if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { - throw new ParsingException(parser.getTokenLocation(), "Deprecated field [" + - SearchSourceBuilder.FIELDS_FIELD + "] used, expected [" + - SearchSourceBuilder.STORED_FIELDS_FIELD + "] instead"); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); @@ -1031,21 +1026,22 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ parser.getTokenLocation()); } } else if (token == XContentParser.Token.START_ARRAY) { - if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) { - storedFieldNames = new ArrayList<>(); + + if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { + fieldNames = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { - storedFieldNames.add(parser.text()); + fieldNames.add(parser.text()); } else { throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation()); } } - } else if (context.getParseFieldMatcher().match(currentFieldName, DOCVALUE_FIELDS_FIELD)) { - docValueFields = new ArrayList<>(); + } else if (context.getParseFieldMatcher().match(currentFieldName, FIELDDATA_FIELDS_FIELD)) { + fieldDataFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { - docValueFields.add(parser.text()); + fieldDataFields.add(parser.text()); } else { throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation()); @@ -1072,11 +1068,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ fetchSourceContext = FetchSourceContext.parse(context); } else if (context.getParseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) { searchAfterBuilder = SearchAfterBuilder.fromXContent(parser, context.getParseFieldMatcher()); - } else if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { - throw new ParsingException(parser.getTokenLocation(), "The field [" + - SearchSourceBuilder.FIELDS_FIELD + "] is not longer supported, please use [" + - SearchSourceBuilder.STORED_FIELDS_FIELD + "] to retrieve stored fields or _source filtering " + - "if the field is not stored"); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); @@ -1105,7 +1096,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } if (timeoutInMillis != -1) { - builder.field(TIMEOUT_FIELD.getPreferredName(), timeoutInMillis); + builder.field(TIMEOUT_FIELD.getPreferredName(), TimeValue.timeValueMillis(timeoutInMillis).toString()); } if (terminateAfter != SearchContext.DEFAULT_TERMINATE_AFTER) { @@ -1140,21 +1131,21 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ builder.field(_SOURCE_FIELD.getPreferredName(), fetchSourceContext); } - if (storedFieldNames != null) { - if (storedFieldNames.size() == 1) { - builder.field(STORED_FIELDS_FIELD.getPreferredName(), storedFieldNames.get(0)); + if (fieldNames != null) { + if (fieldNames.size() == 1) { + builder.field(FIELDS_FIELD.getPreferredName(), fieldNames.get(0)); } else { - builder.startArray(STORED_FIELDS_FIELD.getPreferredName()); - for (String fieldName : storedFieldNames) { + builder.startArray(FIELDS_FIELD.getPreferredName()); + for (String fieldName : fieldNames) { builder.value(fieldName); } builder.endArray(); } } - if (docValueFields != null) { - builder.startArray(DOCVALUE_FIELDS_FIELD.getPreferredName()); - for (String fieldDataField : docValueFields) { + if (fieldDataFields != null) { + builder.startArray(FIELDDATA_FIELDS_FIELD.getPreferredName()); + for (String fieldDataField : fieldDataFields) { builder.value(fieldDataField); } builder.endArray(); @@ -1348,7 +1339,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ @Override public int hashCode() { - return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldNames, from, + return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from, highlightBuilder, indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile); } @@ -1365,8 +1356,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return Objects.equals(aggregations, other.aggregations) && Objects.equals(explain, other.explain) && Objects.equals(fetchSourceContext, other.fetchSourceContext) - && Objects.equals(docValueFields, other.docValueFields) - && Objects.equals(storedFieldNames, other.storedFieldNames) + && Objects.equals(fieldDataFields, other.fieldDataFields) + && Objects.equals(fieldNames, other.fieldNames) && Objects.equals(from, other.from) && Objects.equals(highlightBuilder, other.highlightBuilder) && Objects.equals(indexBoost, other.indexBoost) diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 44ca6fb972e..74434413ae9 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -39,7 +39,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.metadata.RepositoriesMetaData; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RestoreSource; @@ -436,7 +435,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis if (request.includeGlobalState()) { if (metaData.persistentSettings() != null) { Settings settings = metaData.persistentSettings(); - clusterSettings.dryRun(settings); + clusterSettings.validateUpdate(settings); mdBuilder.persistentSettings(settings); } if (metaData.templates() != null) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 8f8c10c8dda..812691f064b 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -38,7 +38,6 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; @@ -261,17 +260,6 @@ public class TestTaskPlugin extends Plugin { return new NodesResponse(clusterService.getClusterName(), responses, failures); } - @Override - protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) { - List list = new ArrayList<>(); - for (String node : nodesIds) { - if (nodes.nodeExists(node)) { - list.add(node); - } - } - return list.toArray(new String[list.size()]); - } - @Override protected NodeRequest newNodeRequest(String nodeId, NodesRequest request) { return new NodeRequest(request, nodeId, request.getShouldBlock()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index ff7faab1f6a..5e2c503eba1 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -29,12 +29,15 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; + public class TransportAnalyzeActionTests extends ESTestCase { private AnalysisService analysisService; @@ -56,7 +59,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { .putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter").build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); environment = new Environment(settings); - registry = new AnalysisRegistry(null, environment); + registry = new AnalysisModule(environment, emptyList()).getAnalysisRegistry(); analysisService = registry.build(idxSettings); } diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index 626850fd119..a15f89bced4 100644 --- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -96,7 +96,7 @@ public class TransportNodesActionTests extends ESTestCase { TestNodesRequest request = new TestNodesRequest(finalNodesIds); action.new AsyncAction(null, request, new PlainActionFuture<>()).start(); Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); - assertEquals(clusterService.state().nodes().resolveNodesIds(finalNodesIds).length, capturedRequests.size()); + assertEquals(clusterService.state().nodes().resolveNodes(finalNodesIds).length, capturedRequests.size()); } public void testNewResponseNullArray() { @@ -129,9 +129,9 @@ public class TransportNodesActionTests extends ESTestCase { assertTrue(failures.containsAll(response.failures())); } - public void testFiltering() throws Exception { - TransportNodesAction action = getFilteringTestTransportNodesAction(transportService); - TestNodesRequest request = new TestNodesRequest(); + public void testCustomResolving() throws Exception { + TransportNodesAction action = getDataNodesOnlyTransportNodesAction(transportService); + TestNodesRequest request = new TestNodesRequest(randomBoolean() ? null : generateRandomStringArray(10, 5, false, true)); PlainActionFuture listener = new PlainActionFuture<>(); action.new AsyncAction(null, request, listener).start(); Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); @@ -221,8 +221,8 @@ public class TransportNodesActionTests extends ESTestCase { ); } - public FilteringTestTransportNodesAction getFilteringTestTransportNodesAction(TransportService transportService) { - return new FilteringTestTransportNodesAction( + public DataNodesOnlyTransportNodesAction getDataNodesOnlyTransportNodesAction(TransportService transportService) { + return new DataNodesOnlyTransportNodesAction( Settings.EMPTY, THREAD_POOL, clusterService, @@ -276,18 +276,18 @@ public class TransportNodesActionTests extends ESTestCase { } } - private static class FilteringTestTransportNodesAction + private static class DataNodesOnlyTransportNodesAction extends TestTransportNodesAction { - FilteringTestTransportNodesAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService + DataNodesOnlyTransportNodesAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, Supplier request, Supplier nodeRequest, String nodeExecutor) { super(settings, threadPool, clusterService, transportService, actionFilters, request, nodeRequest, nodeExecutor); } @Override - protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) { - return nodes.getDataNodes().keys().toArray(String.class); + protected void resolveRequest(TestNodesRequest request, ClusterState clusterState) { + request.setConcreteNodes(clusterState.nodes().getDataNodes().values().toArray(DiscoveryNode.class)); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java index b0942ab401c..984cd31b7a0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java @@ -91,7 +91,7 @@ public class DiscoveryNodesTests extends ESTestCase { expectedNodeIdsSet.add(discoveryNode.getId()); } - String[] resolvedNodesIds = discoveryNodes.resolveNodesIds(nodeSelectors.toArray(new String[nodeSelectors.size()])); + String[] resolvedNodesIds = discoveryNodes.resolveNodes(nodeSelectors.toArray(new String[nodeSelectors.size()])); Arrays.sort(resolvedNodesIds); String[] expectedNodesIds = expectedNodeIdsSet.toArray(new String[expectedNodeIdsSet.size()]); Arrays.sort(expectedNodesIds); diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java index 72f933462e0..aa6016774b0 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java @@ -24,9 +24,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; -import java.io.FilterInputStream; import java.io.IOException; -import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; @@ -35,6 +33,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class StreamTests extends ESTestCase { public void testRandomVLongSerialization() throws IOException { for (int i = 0; i < 1024; i++) { @@ -121,4 +121,62 @@ public class StreamTests extends ESTestCase { streamInput.readBytes(new byte[bytesToRead], 0, bytesToRead); assertEquals(streamInput.available(), length - bytesToRead); } + + public void testWritableArrays() throws IOException { + + final String[] strings = generateRandomStringArray(10, 10, false, true); + WriteableString[] sourceArray = Arrays.stream(strings).map(WriteableString::new).toArray(WriteableString[]::new); + WriteableString[] targetArray; + BytesStreamOutput out = new BytesStreamOutput(); + + if (randomBoolean()) { + if (randomBoolean()) { + sourceArray = null; + } + out.writeOptionalArray(sourceArray); + targetArray = out.bytes().streamInput().readOptionalArray(WriteableString::new, WriteableString[]::new); + } else { + out.writeArray(sourceArray); + targetArray = out.bytes().streamInput().readArray(WriteableString::new, WriteableString[]::new); + } + + assertThat(targetArray, equalTo(sourceArray)); + } + + final static class WriteableString implements Writeable { + final String string; + + public WriteableString(String string) { + this.string = string; + } + + public WriteableString(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + WriteableString that = (WriteableString) o; + + return string.equals(that.string); + + } + + @Override + public int hashCode() { + return string.hashCode(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(string); + } + } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 664f8cb96ab..dee20d6b32e 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -98,7 +98,7 @@ public class ScopedSettingsTests extends ESTestCase { assertEquals(0, aC.get()); assertEquals(0, bC.get()); try { - service.dryRun(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", -15).build()); + service.validateUpdate(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", -15).build()); fail("invalid value"); } catch (IllegalArgumentException ex) { assertEquals("illegal value can't update [foo.bar.baz] from [1] to [-15]", ex.getMessage()); @@ -108,7 +108,7 @@ public class ScopedSettingsTests extends ESTestCase { assertEquals(0, consumer2.get()); assertEquals(0, aC.get()); assertEquals(0, bC.get()); - service.dryRun(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); + service.validateUpdate(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); assertEquals(0, consumer.get()); assertEquals(0, consumer2.get()); assertEquals(0, aC.get()); diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 109c59d845a..0187bb28f36 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -371,6 +371,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { * This test isolates the master from rest of the cluster, waits for a new master to be elected, restores the partition * and verifies that all node agree on the new cluster state */ + @TestLogging("_root:DEBUG,cluster.service:TRACE,gateway:TRACE,indices.store:TRACE") public void testIsolateMasterAndVerifyClusterStateConsensus() throws Exception { final List nodes = startCluster(3); diff --git a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java index 9ce2aa44ab6..948f4820439 100644 --- a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java @@ -270,8 +270,9 @@ public class AsyncShardFetchTests extends ESTestCase { } @Override - protected void asyncFetch(final ShardId shardId, String[] nodesIds) { - for (final String nodeId : nodesIds) { + protected void asyncFetch(final ShardId shardId, DiscoveryNode[] nodes) { + for (final DiscoveryNode node : nodes) { + final String nodeId = node.getId(); threadPool.generic().execute(new Runnable() { @Override public void run() { diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index aaa29ad1970..59f01f56ce1 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -565,7 +565,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { TransportNodesListGatewayStartedShards.NodesGatewayStartedShards response; response = internalCluster().getInstance(TransportNodesListGatewayStartedShards.class) - .execute(new TransportNodesListGatewayStartedShards.Request(shardId, new String[]{node.getId()})) + .execute(new TransportNodesListGatewayStartedShards.Request(shardId, new DiscoveryNode[]{node})) .get(); assertThat(response.getNodes(), hasSize(1)); diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index 99d6c56c88d..b417553a609 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -36,11 +36,11 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; @@ -118,8 +118,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testSimpleFullMatchAllocation() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3; - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(nodeToMatch, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(nodeToMatch, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId())); @@ -131,8 +131,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testSyncIdMatch() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3; - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(nodeToMatch, false, "MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(nodeToMatch, "MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId())); @@ -144,8 +144,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testFileChecksumMatch() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3; - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(nodeToMatch, false, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(nodeToMatch, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId())); @@ -159,7 +159,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { */ public void testNoPrimaryData() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); - testAllocator.addData(node2, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId)); @@ -171,7 +171,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { */ public void testNoDataForReplicaOnAnyNode() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId)); @@ -183,8 +183,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { */ public void testNoMatchingFilesForReplicaOnAnyNode() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "NO_MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "NO_MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId)); @@ -196,8 +196,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { */ public void testNoOrThrottleDecidersRemainsInUnassigned() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(randomBoolean() ? noAllocationDeciders() : throttleAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); @@ -209,7 +209,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { */ public void testThrottleWhenAllocatingToMatchingNode() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(new AllocationDeciders(Settings.EMPTY, - new AllocationDecider[]{new TestAllocateDecision(Decision.YES), new AllocationDecider(Settings.EMPTY) { + new AllocationDecider[]{new TestAllocateDecision(Decision.YES), new SameShardAllocationDecider(Settings.EMPTY), + new AllocationDecider(Settings.EMPTY) { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (node.node().equals(node2)) { @@ -218,8 +219,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { return Decision.YES; } }})); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); @@ -228,10 +229,10 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testDelayedAllocation() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); if (randomBoolean()) { // we sometime return empty list of files, make sure we test this as well - testAllocator.addData(node2, false, null); + testAllocator.addData(node2, null); } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); @@ -240,7 +241,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); - testAllocator.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); @@ -249,9 +250,9 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testCancelRecoveryBetterSyncId() { RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node3, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node3, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); boolean changed = testAllocator.processExistingRecoveries(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); @@ -260,9 +261,9 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testNotCancellingRecoveryIfSyncedOnExistingRecovery() { RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node3, false, randomBoolean() ? "MATCH" : "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node3, randomBoolean() ? "MATCH" : "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); boolean changed = testAllocator.processExistingRecoveries(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(0)); @@ -270,8 +271,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testNotCancellingRecovery() { RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); boolean changed = testAllocator.processExistingRecoveries(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(0)); @@ -352,7 +353,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { return fetchDataCalled.getAndSet(false); } - public TestAllocator addData(DiscoveryNode node, boolean allocated, String syncId, StoreFileMetaData... files) { + public TestAllocator addData(DiscoveryNode node, String syncId, StoreFileMetaData... files) { if (data == null) { data = new HashMap<>(); } @@ -364,7 +365,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { if (syncId != null) { commitData.put(Engine.SYNC_COMMIT_ID, syncId); } - data.put(node, new TransportNodesListShardStoreMetaData.StoreFilesMetaData(allocated, shardId, + data.put(node, new TransportNodesListShardStoreMetaData.StoreFilesMetaData(shardId, new Store.MetadataSnapshot(unmodifiableMap(filesAsMap), unmodifiableMap(commitData), randomInt()))); return this; } diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index f5ccd350348..2769534aee0 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -45,9 +44,9 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; import org.elasticsearch.index.analysis.AnalysisRegistry; -import org.elasticsearch.index.cache.query.QueryCache; -import org.elasticsearch.index.cache.query.IndexQueryCache; import org.elasticsearch.index.cache.query.DisabledQueryCache; +import org.elasticsearch.index.cache.query.IndexQueryCache; +import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -61,9 +60,9 @@ import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; @@ -73,6 +72,7 @@ import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.TestSearchContext; @@ -84,11 +84,12 @@ import org.elasticsearch.watcher.ResourceWatcherService; import java.io.IOException; import java.util.Arrays; import java.util.Collections; -import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; +import static java.util.Collections.emptyMap; + public class IndexModuleTests extends ESTestCase { private Index index; private Settings settings; @@ -147,7 +148,8 @@ public class IndexModuleTests extends ESTestCase { } public void testWrapperIsBound() throws IOException { - IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(indexSettings, null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.setSearcherWrapper((s) -> new Wrapper()); module.engineFactory.set(new MockEngineFactory(AssertingDirectoryReader.class)); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, listener)); @@ -165,7 +167,8 @@ public class IndexModuleTests extends ESTestCase { .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store") .build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(indexSettings, null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.addIndexStore("foo_store", FooStore::new); try { module.addIndexStore("foo_store", FooStore::new); @@ -188,7 +191,8 @@ public class IndexModuleTests extends ESTestCase { } }; IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(indexSettings, null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); Consumer listener = (s) -> {}; module.addIndexEventListener(eventListener); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, @@ -204,7 +208,8 @@ public class IndexModuleTests extends ESTestCase { public void testListener() throws IOException { Setting booleanSetting = Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); Setting booleanSetting2 = Setting.boolSetting("index.foo.bar.baz", false, Property.Dynamic, Property.IndexScope); AtomicBoolean atomicBoolean = new AtomicBoolean(false); module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); @@ -224,7 +229,8 @@ public class IndexModuleTests extends ESTestCase { } public void testAddIndexOperationListener() throws IOException { - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); AtomicBoolean executed = new AtomicBoolean(false); IndexingOperationListener listener = new IndexingOperationListener() { @Override @@ -254,7 +260,8 @@ public class IndexModuleTests extends ESTestCase { } public void testAddSearchOperationListener() throws IOException { - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); AtomicBoolean executed = new AtomicBoolean(false); SearchOperationListener listener = new SearchOperationListener() { @@ -289,7 +296,8 @@ public class IndexModuleTests extends ESTestCase { .put("index.similarity.my_similarity.key", "there is a key") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.addSimilarity("test_similarity", (string, settings) -> new SimilarityProvider() { @Override public String name() { @@ -313,7 +321,8 @@ public class IndexModuleTests extends ESTestCase { } public void testFrozen() { - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.freeze(); String msg = "Can't modify IndexModule once the index service has been created"; assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSearchOperationListener(null)).getMessage()); @@ -331,7 +340,8 @@ public class IndexModuleTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); try { module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, listener)); @@ -346,7 +356,8 @@ public class IndexModuleTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); try { module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, listener)); @@ -359,7 +370,8 @@ public class IndexModuleTests extends ESTestCase { Settings indexSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.forceQueryCacheProvider((a, b) -> new CustomQueryCache()); expectThrows(AlreadySetException.class, () -> module.forceQueryCacheProvider((a, b) -> new CustomQueryCache())); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, @@ -372,7 +384,8 @@ public class IndexModuleTests extends ESTestCase { Settings indexSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, listener)); assertTrue(indexService.cache().query() instanceof IndexQueryCache); @@ -384,7 +397,8 @@ public class IndexModuleTests extends ESTestCase { .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.forceQueryCacheProvider((a, b) -> new CustomQueryCache()); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, listener)); diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 9c5040589ae..3909354c989 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -65,6 +65,31 @@ public class IndexSettingsTests extends ESTestCase { assertEquals(42, integer.get()); } + public void testSettingsUpdateValidator() { + Version version = VersionUtils.getPreviousVersion(); + Settings theSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version) + .put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); + final AtomicInteger integer = new AtomicInteger(0); + Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, + Property.Dynamic, Property.IndexScope); + IndexMetaData metaData = newIndexMeta("index", theSettings); + IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting); + settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set, + (i) -> {if (i == 42) throw new AssertionError("boom");}); + + assertEquals(version, settings.getIndexVersionCreated()); + assertEquals("0xdeadbeef", settings.getUUID()); + + assertFalse(settings.updateIndexMetaData(metaData)); + assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap()); + assertEquals(0, integer.get()); + expectThrows(IllegalArgumentException.class, () -> settings.updateIndexMetaData(newIndexMeta("index", + Settings.builder().put(theSettings).put("index.test.setting.int", 42).build()))); + assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 41) + .build()))); + assertEquals(41, integer.get()); + } + public void testMergedSettingsArePassed() { Version version = VersionUtils.getPreviousVersion(); Settings theSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version) diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 596714b1c5b..f0e12abeac8 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -677,7 +677,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { client().prepareIndex(IDX, "doc", "4").setSource("foo", "eggplant").get(); flushAndRefresh(IDX); - SearchResponse resp = client().prepareSearch(IDX).setQuery(matchAllQuery()).addDocValueField("foo").addSort("foo", SortOrder.ASC).get(); + SearchResponse resp = client().prepareSearch(IDX).setQuery(matchAllQuery()).addFieldDataField("foo").addSort("foo", SortOrder.ASC).get(); assertHitCount(resp, 4); assertOrderedSearchHits(resp, "2", "3", "4", "1"); SearchHit[] hits = resp.getHits().hits(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java index b72996bd1a1..52fcdd4bb2e 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; @@ -29,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -41,12 +43,15 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class AnalysisServiceTests extends ESTestCase { - private static AnalyzerProvider analyzerProvider(final String name) { + private static AnalyzerProvider analyzerProvider(final String name) { return new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDEX, new EnglishAnalyzer()); } @@ -58,7 +63,8 @@ public class AnalysisServiceTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap()) + .build(idxSettings); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); @@ -68,33 +74,28 @@ public class AnalysisServiceTests extends ESTestCase { Version version = VersionUtils.randomVersion(random()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), - Collections.singletonMap("default", analyzerProvider("default")), - Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + singletonMap("default", analyzerProvider("default")), emptyMap(), emptyMap(), emptyMap()); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); } - public void testOverrideDefaultIndexAnalyzer() { + public void testOverrideDefaultIndexAnalyzerIsUnsupported() { Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - try { - AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), - Collections.singletonMap("default_index", new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer())), - Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); - fail("Expected ISE"); - } catch (IllegalArgumentException e) { - // expected - assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported")); - } + AnalyzerProvider defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), + singletonMap("default_index", defaultIndex), emptyMap(), emptyMap(), emptyMap())); + assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported")); } public void testBackCompatOverrideDefaultIndexAnalyzer() { - Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); + Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), + VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), - Collections.singletonMap("default_index", analyzerProvider("default_index")), - Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + singletonMap("default_index", analyzerProvider("default_index")), emptyMap(), emptyMap(), emptyMap()); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); @@ -104,17 +105,17 @@ public class AnalysisServiceTests extends ESTestCase { Version version = VersionUtils.randomVersion(random()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), - Collections.singletonMap("default_search", analyzerProvider("default_search")), - Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + singletonMap("default_search", analyzerProvider("default_search")), emptyMap(), emptyMap(), emptyMap()); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); } public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() { - Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); + Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), + VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - Map analyzers = new HashMap<>(); + Map> analyzers = new HashMap<>(); analyzers.put("default_index", analyzerProvider("default_index")); analyzers.put("default_search", analyzerProvider("default_search")); AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), @@ -125,7 +126,6 @@ public class AnalysisServiceTests extends ESTestCase { } public void testConfigureCamelCaseTokenFilter() throws IOException { - // tests a filter that Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -137,7 +137,9 @@ public class AnalysisServiceTests extends ESTestCase { .putArray("index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter").build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + + AnalysisService analysisService = new AnalysisModule(new Environment(settings), emptyList()).getAnalysisRegistry() + .build(idxSettings); try (NamedAnalyzer custom_analyser = analysisService.analyzer("custom_analyzer")) { assertNotNull(custom_analyser); TokenStream tokenStream = custom_analyser.tokenStream("foo", "J2SE j2ee"); @@ -176,8 +178,10 @@ public class AnalysisServiceTests extends ESTestCase { Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); - AnalysisService otherAnalysisSergice = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap()) + .build(idxSettings); + AnalysisService otherAnalysisSergice = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), + emptyMap()).build(idxSettings); final int numIters = randomIntBetween(5, 20); for (int i = 0; i < numIters; i++) { PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values()); @@ -196,7 +200,8 @@ public class AnalysisServiceTests extends ESTestCase { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new AnalysisRegistry(null, new Environment(settings)).build(idxSettings)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap()).build(idxSettings)); assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer")); } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java index 3e33123f932..40ec2b412ff 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java @@ -23,14 +23,14 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.indices.analysis.HunspellService; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; import java.nio.file.Path; -import java.util.Collections; + +import static java.util.Collections.emptyList; public class AnalysisTestsHelper { @@ -49,7 +49,6 @@ public class AnalysisTestsHelper { settings = Settings.builder().put(settings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); } IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - Environment environment = new Environment(settings); - return new AnalysisRegistry(new HunspellService(settings, environment, Collections.emptyMap()), environment).build(idxSettings); + return new AnalysisModule(new Environment(settings), emptyList()).getAnalysisRegistry().build(idxSettings); } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java index 2b2c9288f17..3f2b1461ef3 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java @@ -26,6 +26,8 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; +import static org.elasticsearch.test.ESTestCase.createAnalysisService; + /** */ public class CharFilterTests extends ESTokenStreamTestCase { @@ -39,7 +41,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); assertTokenStreamContents(analyzer1.tokenStream("test", "jeff quit phish"), new String[]{"jeff", "qit", "fish"}); @@ -56,7 +58,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); @@ -78,7 +80,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); assertTokenStreamContents(analyzer1.tokenStream("test", "faBBbBB aBbbbBf"), new String[]{"foo", "oof"}); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java index 0e3af58dc90..0c9010b2c9b 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java @@ -31,15 +31,20 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory; import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.hamcrest.MatcherAssert; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; +import java.util.Map; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.instanceOf; @@ -50,8 +55,13 @@ public class CompoundAnalysisTests extends ESTestCase { public void testDefaultsCompoundAnalysis() throws Exception { Settings settings = getJsonSettings(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings), - Collections.emptyMap(),Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings); + AnalysisModule analysisModule = new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() { + @Override + public Map> getTokenFilters() { + return singletonMap("myfilter", MyFilterTokenFilterFactory::new); + } + })); + AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings); TokenFilterFactory filterFactory = analysisService.tokenFilter("dict_dec"); MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class)); @@ -62,14 +72,20 @@ public class CompoundAnalysisTests extends ESTestCase { for (Settings settings : settingsArr) { List terms = analyze(settings, "decompoundingAnalyzer", "donaudampfschiff spargelcremesuppe"); MatcherAssert.assertThat(terms.size(), equalTo(8)); - MatcherAssert.assertThat(terms, hasItems("donau", "dampf", "schiff", "donaudampfschiff", "spargel", "creme", "suppe", "spargelcremesuppe")); + MatcherAssert.assertThat(terms, + hasItems("donau", "dampf", "schiff", "donaudampfschiff", "spargel", "creme", "suppe", "spargelcremesuppe")); } } private List analyze(Settings settings, String analyzerName, String text) throws IOException { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings), - Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings); + AnalysisModule analysisModule = new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() { + @Override + public Map> getTokenFilters() { + return singletonMap("myfilter", MyFilterTokenFilterFactory::new); + } + })); + AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings); Analyzer analyzer = analysisService.analyzer(analyzerName).analyzer(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java index 3f5ad6fed4f..caefb1039c2 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; +import static org.elasticsearch.test.ESTestCase.createAnalysisService; import static org.hamcrest.Matchers.containsString; public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase { @@ -39,7 +40,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); NamedAnalyzer analyzer1 = analysisService.analyzer("single"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java index 39641c281aa..88c5fe692d6 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java @@ -27,6 +27,8 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; +import static org.elasticsearch.test.ESTestCase.createAnalysisService; + public class StopAnalyzerTests extends ESTokenStreamTestCase { public void testDefaultsCompoundAnalysis() throws Exception { String json = "/org/elasticsearch/index/analysis/stop.json"; @@ -36,7 +38,7 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); NamedAnalyzer analyzer1 = analysisService.analyzer("analyzer1"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java index 104bd17fb33..9e4d5b27ad7 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java @@ -26,13 +26,10 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.lucene.all.AllEntries; import org.elasticsearch.common.lucene.all.AllTokenStream; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -67,8 +64,7 @@ public class SynonymsAnalysisTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); - + analysisService = createAnalysisService(idxSettings, settings); match("synonymAnalyzer", "kimchy is the dude abides", "shay is the elasticsearch man!"); match("synonymAnalyzer_file", "kimchy is the dude abides", "shay is the elasticsearch man!"); diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index fdf1411fdf8..80f1cbe46d0 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -36,9 +36,7 @@ import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.similarity.SimilarityService; @@ -97,7 +95,7 @@ public class CodecTests extends ESTestCase { .build(); IndexSettings settings = IndexSettingsModule.newIndexSettings("_na", nodeSettings); SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap()); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(settings); + AnalysisService analysisService = createAnalysisService(settings, nodeSettings); MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()); MapperService service = new MapperService(settings, analysisService, similarityService, mapperRegistry, () -> null); return new CodecService(service, ESLoggerFactory.getLogger("test")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java index da0c3d081af..31e4e2d0923 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java @@ -166,9 +166,9 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase { private SearchRequestBuilder prepareSearch() { SearchRequestBuilder request = client().prepareSearch("test").setTypes("test"); - request.addStoredField("foo.token_count"); + request.addField("foo.token_count"); if (loadCountedFields) { - request.addStoredField("foo"); + request.addField("foo"); } return request; } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 14dd370fbfd..202afd7a4b1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -816,7 +816,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); // match all search with geohash field - SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch().addField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); Map m = searchResponse.getHits().getAt(0).getFields(); // ensure single geohash was indexed @@ -841,7 +841,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); // match all search with geohash field (includes prefixes) - SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch().addField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); Map m = searchResponse.getHits().getAt(0).getFields(); List hashes = m.get("location.geohash").values(); @@ -872,11 +872,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } // query by geohash subfield - SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch().addField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); assertEquals(numDocs, searchResponse.getHits().totalHits()); // query by latlon subfield - searchResponse = client().prepareSearch().addStoredField("location.latlon").setQuery(matchAllQuery()).execute().actionGet(); + searchResponse = client().prepareSearch().addField("location.latlon").setQuery(matchAllQuery()).execute().actionGet(); assertEquals(numDocs, searchResponse.getHits().totalHits()); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index 841f35bd975..d4ba6ca9062 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -218,8 +218,8 @@ public class InnerHitBuilderTests extends ESTestCase { innerHits.setExplain(randomBoolean()); innerHits.setVersion(randomBoolean()); innerHits.setTrackScores(randomBoolean()); - innerHits.setStoredFieldNames(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); - innerHits.setDocValueFields(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); + innerHits.setFieldNames(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); + innerHits.setFieldDataFields(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); // Random script fields deduped on their field name. Map scriptFields = new HashMap<>(); for (SearchSourceBuilder.ScriptField field: randomListStuff(16, InnerHitBuilderTests::randomScript)) { @@ -294,11 +294,11 @@ public class InnerHitBuilderTests extends ESTestCase { break; case 6: if (randomBoolean()) { - instance.setDocValueFields(randomValueOtherThan(instance.getDocValueFields(), () -> { + instance.setFieldDataFields(randomValueOtherThan(instance.getFieldDataFields(), () -> { return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)); })); } else { - instance.addDocValueField(randomAsciiOfLengthBetween(1, 16)); + instance.addFieldDataField(randomAsciiOfLengthBetween(1, 16)); } break; case 7: @@ -341,12 +341,12 @@ public class InnerHitBuilderTests extends ESTestCase { HighlightBuilderTests::randomHighlighterBuilder)); break; case 11: - if (instance.getStoredFieldNames() == null || randomBoolean()) { - instance.setStoredFieldNames(randomValueOtherThan(instance.getStoredFieldNames(), () -> { + if (instance.getFieldNames() == null || randomBoolean()) { + instance.setFieldNames(randomValueOtherThan(instance.getFieldNames(), () -> { return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)); })); } else { - instance.getStoredFieldNames().add(randomAsciiOfLengthBetween(1, 16)); + instance.getFieldNames().add(randomAsciiOfLengthBetween(1, 16)); } break; default: diff --git a/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java index 590c5c624f1..163d72f4553 100644 --- a/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java @@ -113,7 +113,7 @@ public class ExceptionRetryIT extends ESIntegTestCase { } refresh(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(numDocs * 2).addStoredField("_id").get(); + SearchResponse searchResponse = client().prepareSearch("index").setSize(numDocs * 2).addField("_id").get(); Set uniqueIds = new HashSet(); long dupCounter = 0; diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index 4f3e68a9115..e005fa400ef 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -933,7 +933,7 @@ public class StoreTests extends ESTestCase { public void testStreamStoreFilesMetaData() throws Exception { Store.MetadataSnapshot metadataSnapshot = createMetaDataSnapshot(); - TransportNodesListShardStoreMetaData.StoreFilesMetaData outStoreFileMetaData = new TransportNodesListShardStoreMetaData.StoreFilesMetaData(randomBoolean(), new ShardId("test", "_na_", 0),metadataSnapshot); + TransportNodesListShardStoreMetaData.StoreFilesMetaData outStoreFileMetaData = new TransportNodesListShardStoreMetaData.StoreFilesMetaData(new ShardId("test", "_na_", 0),metadataSnapshot); ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); org.elasticsearch.Version targetNodeVersion = randomVersion(random()); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java similarity index 87% rename from core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java rename to core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index e20e7d1a7c9..869ac622b39 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; @@ -36,9 +36,20 @@ import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.Analysis; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.analysis.AnalysisTestsHelper; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.MappingCharFilterFactory; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.PatternReplaceCharFilterFactory; +import org.elasticsearch.index.analysis.StandardTokenizerFactory; +import org.elasticsearch.index.analysis.StopTokenFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; -import org.elasticsearch.indices.analysis.HunspellService; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.test.IndexSettingsModule; import org.hamcrest.MatcherAssert; @@ -49,9 +60,11 @@ import java.io.StringReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; -import java.util.Collections; +import java.util.Map; import java.util.Set; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -72,8 +85,16 @@ public class AnalysisModuleTests extends ModuleTestCase { } public AnalysisRegistry getNewRegistry(Settings settings) { - return new AnalysisRegistry(null, new Environment(settings), - Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new), Collections.emptyMap(), Collections.emptyMap()); + try { + return new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() { + @Override + public Map> getTokenFilters() { + return singletonMap("myfilter", MyFilterTokenFilterFactory::new); + } + })).getAnalysisRegistry(); + } catch (IOException e) { + throw new RuntimeException(e); + } } private Settings loadFromClasspath(String path) throws IOException { @@ -125,7 +146,7 @@ public class AnalysisModuleTests extends ModuleTestCase { assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), analysisService2.analyzer("custom7").analyzer().getVersion()); } - private void assertTokenFilter(String name, Class clazz) throws IOException { + private void assertTokenFilter(String name, Class clazz) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); @@ -148,17 +169,9 @@ public class AnalysisModuleTests extends ModuleTestCase { StopTokenFilterFactory stop1 = (StopTokenFilterFactory) custom1.tokenFilters()[0]; assertThat(stop1.stopWords().size(), equalTo(1)); - //assertThat((Iterable) stop1.stopWords(), hasItem("test-stop".toCharArray())); analyzer = analysisService.analyzer("custom2").analyzer(); assertThat(analyzer, instanceOf(CustomAnalyzer.class)); - CustomAnalyzer custom2 = (CustomAnalyzer) analyzer; - -// HtmlStripCharFilterFactory html = (HtmlStripCharFilterFactory) custom2.charFilters()[0]; -// assertThat(html.readAheadLimit(), equalTo(HTMLStripCharFilter.DEFAULT_READ_AHEAD)); -// -// html = (HtmlStripCharFilterFactory) custom2.charFilters()[1]; -// assertThat(html.readAheadLimit(), equalTo(1024)); // verify position increment gap analyzer = analysisService.analyzer("custom6").analyzer(); @@ -248,7 +261,8 @@ public class AnalysisModuleTests extends ModuleTestCase { getAnalysisService(settings); fail("This should fail with IllegalArgumentException because the analyzers name starts with _"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), either(equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")).or(equalTo("analyzer name must not start with '_'. got \"_invalidName\""))); + assertThat(e.getMessage(), either(equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")) + .or(equalTo("analyzer name must not start with '_'. got \"_invalidName\""))); } } @@ -289,13 +303,18 @@ public class AnalysisModuleTests extends ModuleTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); Environment environment = new Environment(settings); - AnalysisModule module = new AnalysisModule(environment); InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff"); InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic"); + Dictionary dictionary; try (Directory tmp = new SimpleFSDirectory(environment.tmpFile())) { - Dictionary dictionary = new Dictionary(tmp, "hunspell", aff, dic); - module.registerHunspellDictionary("foo", dictionary); - assertInstanceBinding(module, HunspellService.class, (x) -> x.getDictionary("foo") == dictionary); + dictionary = new Dictionary(tmp, "hunspell", aff, dic); } + AnalysisModule module = new AnalysisModule(environment, singletonList(new AnalysisPlugin() { + @Override + public Map getHunspellDictionaries() { + return singletonMap("foo", dictionary); + } + })); + assertSame(dictionary, module.getHunspellService().getDictionary("foo")); } } diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java index 86c54f2ece9..bbfeacfc590 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java @@ -19,15 +19,38 @@ package org.elasticsearch.indices.analysis; +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -public class DummyAnalysisPlugin extends Plugin { +import java.util.Map; - public void onModule(AnalysisModule module) { - module.registerAnalyzer("dummy", (a, b, c, d) -> new DummyAnalyzerProvider()); - module.registerTokenFilter("dummy_token_filter", (a, b, c, d) -> new DummyTokenFilterFactory()); - module.registerTokenizer("dummy_tokenizer", (a, b, c, d) -> new DummyTokenizerFactory()); - module.registerCharFilter("dummy_char_filter", (a, b, c, d) -> new DummyCharFilterFactory()); +import static java.util.Collections.singletonMap; + +public class DummyAnalysisPlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getCharFilters() { + return singletonMap("dummy_char_filter", (a, b, c, d) -> new DummyCharFilterFactory()); + } + + @Override + public Map> getTokenFilters() { + return singletonMap("dummy_token_filter", (a, b, c, d) -> new DummyTokenFilterFactory()); + } + + @Override + public Map> getTokenizers() { + return singletonMap("dummy_tokenizer", (a, b, c, d) -> new DummyTokenizerFactory()); + } + + @Override + public Map>> getAnalyzers() { + return singletonMap("dummy", (a, b, c, d) -> new DummyAnalyzerProvider()); } } diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceTests.java similarity index 58% rename from core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java rename to core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceTests.java index 84ce2c5da50..ba4467a5630 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceTests.java @@ -19,36 +19,30 @@ package org.elasticsearch.indices.analyze; import org.apache.lucene.analysis.hunspell.Dictionary; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.indices.analysis.HunspellService; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.hamcrest.Matchers; +import org.elasticsearch.test.ESTestCase; +import static java.util.Collections.emptyMap; import static org.elasticsearch.indices.analysis.HunspellService.HUNSPELL_IGNORE_CASE; import static org.elasticsearch.indices.analysis.HunspellService.HUNSPELL_LAZY_LOAD; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.notNullValue; -/** - * - */ -@ClusterScope(scope= Scope.TEST, numDataNodes=0) -public class HunspellServiceIT extends ESIntegTestCase { +public class HunspellServiceTests extends ESTestCase { public void testLocaleDirectoryWithNodeLevelConfig() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) .put(HUNSPELL_IGNORE_CASE.getKey(), true) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - internalCluster().startNode(settings); - Dictionary dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US"); + Dictionary dictionary = new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US"); assertThat(dictionary, notNullValue()); - assertIgnoreCase(true, dictionary); + assertTrue(dictionary.getIgnoreCase()); } public void testLocaleDirectoryWithLocaleSpecificConfig() throws Exception { @@ -58,58 +52,42 @@ public class HunspellServiceIT extends ESIntegTestCase { .put(HUNSPELL_IGNORE_CASE.getKey(), true) .put("indices.analysis.hunspell.dictionary.en_US.strict_affix_parsing", false) .put("indices.analysis.hunspell.dictionary.en_US.ignore_case", false) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - internalCluster().startNode(settings); - Dictionary dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US"); + Dictionary dictionary = new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US"); assertThat(dictionary, notNullValue()); - assertIgnoreCase(false, dictionary); - - + assertFalse(dictionary.getIgnoreCase()); // testing that dictionary specific settings override node level settings - dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US_custom"); + dictionary = new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US_custom"); assertThat(dictionary, notNullValue()); - assertIgnoreCase(true, dictionary); + assertTrue(dictionary.getIgnoreCase()); } public void testDicWithNoAff() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/no_aff_conf_dir")) .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - Dictionary dictionary = null; - try { - internalCluster().startNode(settings); - dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US"); - fail("Missing affix file didn't throw an error"); - } - catch (Throwable t) { - assertNull(dictionary); - assertThat(ExceptionsHelper.unwrap(t, ElasticsearchException.class).toString(), Matchers.containsString("Missing affix file")); - } + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US")); + assertEquals("failed to load hunspell dictionary for locale: en_US", e.getMessage()); + assertThat(e.getCause(), hasToString(containsString("Missing affix file"))); } public void testDicWithTwoAffs() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/two_aff_conf_dir")) .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - Dictionary dictionary = null; - try { - internalCluster().startNode(settings); - dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US"); - fail("Multiple affix files didn't throw an error"); - } catch (Throwable t) { - assertNull(dictionary); - assertThat(ExceptionsHelper.unwrap(t, ElasticsearchException.class).toString(), Matchers.containsString("Too many affix files")); - } - } - - // TODO: on next upgrade of lucene, just use new getter - private void assertIgnoreCase(boolean expected, Dictionary dictionary) throws Exception { - // assertEquals(expected, dictionary.getIgnoreCase()); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US")); + assertEquals("failed to load hunspell dictionary for locale: en_US", e.getMessage()); + assertThat(e.getCause(), hasToString(containsString("Too many affix files"))); } } diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index fc5b68f87d1..09441f70110 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -19,7 +19,6 @@ package org.elasticsearch.indices.cluster; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; @@ -156,7 +155,7 @@ public class ClusterStateChanges { metaDataIndexUpgradeService, nodeServicesProvider, indicesService); MetaDataDeleteIndexService deleteIndexService = new MetaDataDeleteIndexService(settings, clusterService, allocationService); MetaDataUpdateSettingsService metaDataUpdateSettingsService = new MetaDataUpdateSettingsService(settings, clusterService, - allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, new IndexNameExpressionResolver(settings)); + allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, indicesService, nodeServicesProvider); MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService(settings, clusterService, indicesService, allocationService, new AliasValidator(settings), Collections.emptySet(), environment, nodeServicesProvider, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index e7e3cb32226..2a67742fc68 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -29,7 +29,9 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.MergePolicyConfig; @@ -37,9 +39,13 @@ import org.elasticsearch.index.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; @@ -53,6 +59,42 @@ import static org.hamcrest.Matchers.nullValue; public class UpdateSettingsIT extends ESIntegTestCase { + + public void testInvalidDynamicUpdate() { + createIndex("test"); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder() + .put("index.dummy", "boom") + ) + .execute().actionGet()); + assertEquals(exception.getCause().getMessage(), "this setting goes boom"); + IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); + assertNotEquals(indexMetaData.getSettings().get("index.dummy"), "invalid dynamic value"); + } + + @Override + protected Collection> nodePlugins() { + return pluginList(DummySettingPlugin.class); + } + + public static class DummySettingPlugin extends Plugin { + public static final Setting DUMMY_SETTING = Setting.simpleString("index.dummy", + Setting.Property.IndexScope, Setting.Property.Dynamic); + @Override + public void onIndexModule(IndexModule indexModule) { + indexModule.addSettingsUpdateConsumer(DUMMY_SETTING, (s) -> {}, (s) -> { + if (s.equals("boom")) + throw new IllegalArgumentException("this setting goes boom"); + }); + } + + @Override + public List> getSettings() { + return Collections.singletonList(DUMMY_SETTING); + } + } + public void testResetDefault() { createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index acdfdd12266..493f8b74e04 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -116,7 +116,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { ensureGreen(); SearchResponse searchResponse = client().prepareSearch("test_index") .setQuery(termQuery("field1", "value1")) - .addStoredField("field1").addStoredField("field2") + .addField("field1").addField("field2") .execute().actionGet(); assertHitCount(searchResponse, 1); @@ -130,7 +130,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { // now only match on one template (template_1) searchResponse = client().prepareSearch("text_index") .setQuery(termQuery("field1", "value1")) - .addStoredField("field1").addStoredField("field2") + .addField("field1").addField("field2") .execute().actionGet(); if (searchResponse.getFailedShards() > 0) { logger.warn("failed search {}", Arrays.toString(searchResponse.getShardFailures())); diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 1145e594b25..bc9909b21c1 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -219,7 +219,7 @@ public class RelocationIT extends ESIntegTestCase { for (int i = 0; i < 10; i++) { try { logger.info("--> START search test round {}", i + 1); - SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).setNoStoredFields().execute().actionGet().getHits(); + SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).setNoFields().execute().actionGet().getHits(); ranOnce = true; if (hits.totalHits() != indexer.totalIndexedDocs()) { int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index 5cc6ec58630..49ef9e1b6b5 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -181,7 +181,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase { // Added to debug a test failure where the terms aggregation seems to be reporting two documents with the same value for NUMBER_FIELD_NAME. This will check that after // random indexing each document only has 1 value for NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type - SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME).addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME) + SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME).addField(NUMBER_FIELD_NAME).addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME) .order(SortOrder.ASC)).setSize(5000).get(); assertSearchResponse(response); long totalHits = response.getHits().totalHits(); diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index a3eac59ac2c..9d66669d07c 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.builder; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -89,7 +90,9 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasToString; public class SearchSourceBuilderTests extends ESTestCase { private static Injector injector; @@ -219,12 +222,12 @@ public class SearchSourceBuilderTests extends ESTestCase { for (int i = 0; i < fieldsSize; i++) { fields.add(randomAsciiOfLengthBetween(5, 50)); } - builder.storedFields(fields); + builder.fields(fields); } if (randomBoolean()) { int fieldDataFieldsSize = randomInt(25); for (int i = 0; i < fieldDataFieldsSize; i++) { - builder.docValueField(randomAsciiOfLengthBetween(5, 50)); + builder.fieldDataField(randomAsciiOfLengthBetween(5, 50)); } } if (randomBoolean()) { @@ -593,6 +596,27 @@ public class SearchSourceBuilderTests extends ESTestCase { } } + public void testTimeoutWithUnits() throws IOException { + final String timeout = randomTimeValue(); + final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}"; + try (XContentParser parser = XContentFactory.xContent(query).createParser(query)) { + final SearchSourceBuilder builder = SearchSourceBuilder.fromXContent(createParseContext(parser), aggParsers, suggesters); + assertThat(builder.timeoutInMillis(), equalTo(TimeValue.parseTimeValue(timeout, null, "timeout").millis())); + } + } + + public void testTimeoutWithoutUnits() throws IOException { + final int timeout = randomIntBetween(1, 1024); + final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}"; + try (XContentParser parser = XContentFactory.xContent(query).createParser(query)) { + final ElasticsearchParseException e = + expectThrows( + ElasticsearchParseException.class, + () -> SearchSourceBuilder.fromXContent(createParseContext(parser), aggParsers, suggesters)); + assertThat(e, hasToString(containsString("unit is missing or unrecognized"))); + } + } + public void testEmptyPostFilter() throws IOException { SearchSourceBuilder builder = new SearchSourceBuilder(); String query = "{ \"post_filter\": {} }"; diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 68679e89ae6..f8ca1e1aaf7 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -202,7 +202,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { refresh(); // TEST FETCHING _parent from child - SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").addIds("c1")).storedFields("_parent").execute() + SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").addIds("c1")).fields("_parent").execute() .actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); @@ -210,7 +210,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1")); // TEST matching on parent - searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).storedFields("_parent").get(); + searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).fields("_parent").get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2"))); @@ -218,7 +218,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1")); - searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).storedFields("_parent").get(); + searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).fields("_parent").get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2"))); @@ -1394,7 +1394,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { SearchResponse scrollResponse = client().prepareSearch("test") .setScroll(TimeValue.timeValueSeconds(30)) .setSize(1) - .addStoredField("_id") + .addField("_id") .setQuery(query) .execute() .actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 985605c4e65..0debdb263af 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -415,13 +415,13 @@ public class GeoFilterIT extends ESIntegTestCase { assertThat(hit.getId(), equalTo(key)); } - SearchResponse world = client().prepareSearch().addStoredField("pin").setQuery( + SearchResponse world = client().prepareSearch().addField("pin").setQuery( geoBoundingBoxQuery("pin").setCorners(90, -179.99999, -90, 179.99999) ).execute().actionGet(); assertHitCount(world, 53); - SearchResponse distance = client().prepareSearch().addStoredField("pin").setQuery( + SearchResponse distance = client().prepareSearch().addField("pin").setQuery( geoDistanceQuery("pin").distance("425km").point(51.11, 9.851) ).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index 7da18342edf..067c2dcac05 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.search.geo; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.settings.Settings; import org.locationtech.spatial4j.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; @@ -54,6 +56,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; public class GeoShapeQueryTests extends ESSingleNodeTestCase { @@ -197,6 +200,30 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase { assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1")); } + public void testIndexedShapeReferenceSourceDisabled() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .endObject() + .endObject() + .endObject(); + client().admin().indices().prepareCreate("test").addMapping("type1", mapping).get(); + createIndex("shapes", Settings.EMPTY, "shape_type", "_source", "enabled=false"); + ensureGreen(); + + ShapeBuilder shape = ShapeBuilders.newEnvelope(new Coordinate(-45, 45), new Coordinate(45, -45)); + + client().prepareIndex("shapes", "shape_type", "Big_Rectangle").setSource(jsonBuilder().startObject() + .field("shape", shape).endObject()).setRefreshPolicy(IMMEDIATE).get(); + + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client().prepareSearch("test").setTypes("type1") + .setQuery(geoIntersectionQuery("location", "Big_Rectangle", "shape_type")).get()); + assertThat(e.getRootCause(), instanceOf(IllegalArgumentException.class)); + assertThat(e.getRootCause().getMessage(), containsString("source disabled")); + } + public void testReusableBuilder() throws IOException { ShapeBuilder polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder() .coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close()) diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index 56b33c6007e..01f98564814 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -156,7 +156,7 @@ public class InnerHitsIT extends ESIntegTestCase { .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message")) .setExplain(true) - .addDocValueField("comments.message") + .addFieldDataField("comments.message") .addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())) .setSize(1) )).get(); @@ -287,7 +287,7 @@ public class InnerHitsIT extends ESIntegTestCase { .setQuery( hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit( new InnerHitBuilder() - .addDocValueField("message") + .addFieldDataField("message") .setHighlightBuilder(new HighlightBuilder().field("message")) .setExplain(true).setSize(1) .addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 8cb21dadb9b..1d54b412d6c 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1148,6 +1148,7 @@ public class SearchQueryIT extends ESIntegTestCase { jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("arr").startObject("properties").startObject("term").field("type", "text") .endObject().endObject().endObject().endObject().endObject().endObject())); + assertAcked(prepareCreate("lookup3").addMapping("type", "_source", "enabled=false", "terms","type=text")); assertAcked(prepareCreate("test").addMapping("type", "term", "type=text")); indexRandom(true, @@ -1172,6 +1173,7 @@ public class SearchQueryIT extends ESIntegTestCase { .startObject().field("term", "4").endObject() .endArray() .endObject()), + client().prepareIndex("lookup3", "type", "1").setSource("terms", new String[]{"1", "3"}), client().prepareIndex("test", "type", "1").setSource("term", "1"), client().prepareIndex("test", "type", "2").setSource("term", "2"), client().prepareIndex("test", "type", "3").setSource("term", "3"), @@ -1227,6 +1229,16 @@ public class SearchQueryIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test") .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term"))).get(); assertHitCount(searchResponse, 0L); + + // index "lookup" type "type" id "missing" document does not exist: ignore the lookup terms + searchResponse = client().prepareSearch("test") + .setQuery(termsLookupQuery("term" , new TermsLookup("lookup", "type", "missing", "terms"))).get(); + assertHitCount(searchResponse, 0L); + + // index "lookup3" type "type" has the source disabled: ignore the lookup terms + searchResponse = client().prepareSearch("test") + .setQuery(termsLookupQuery("term" , new TermsLookup("lookup3", "type", "1", "terms"))).get(); + assertHitCount(searchResponse, 0L); } public void testBasicQueryById() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/source/SourceFetchingIT.java b/core/src/test/java/org/elasticsearch/search/source/SourceFetchingIT.java index df147ce106f..33fcb55cada 100644 --- a/core/src/test/java/org/elasticsearch/search/source/SourceFetchingIT.java +++ b/core/src/test/java/org/elasticsearch/search/source/SourceFetchingIT.java @@ -37,10 +37,10 @@ public class SourceFetchingIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("test").get(); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - response = client().prepareSearch("test").addStoredField("bla").get(); + response = client().prepareSearch("test").addField("bla").get(); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - response = client().prepareSearch("test").addStoredField("_source").get(); + response = client().prepareSearch("test").addField("_source").get(); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); } diff --git a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java b/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java index e81b4decb2d..3573089fcaa 100644 --- a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java +++ b/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java @@ -87,7 +87,7 @@ public class TimestampTTLBWIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .setSize(randomIntBetween(1, numDocs + 5)) .addSort("_timestamp", order) - .addStoredField("_timestamp") + .addField("_timestamp") .execute().actionGet(); assertNoFailures(searchResponse); SearchHit[] hits = searchResponse.getHits().hits(); diff --git a/docs/plugins/mapper-attachments.asciidoc b/docs/plugins/mapper-attachments.asciidoc index f2c034a317e..b287decd4cf 100644 --- a/docs/plugins/mapper-attachments.asciidoc +++ b/docs/plugins/mapper-attachments.asciidoc @@ -246,7 +246,7 @@ PUT /test/person/1?refresh=true } GET /test/person/_search { - "stored_fields": [ "file.content_type" ], + "fields": [ "file.content_type" ], "query": { "match": { "file.content_type": "text plain" @@ -367,7 +367,7 @@ PUT /test/person/1?refresh=true } GET /test/person/_search { - "stored_fields": [], + "fields": [], "query": { "match": { "file.content": "king queen" diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index 639fabb62fd..ba9899f9d68 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -101,7 +101,7 @@ In the above example, the `init_script` creates an array `transactions` in the ` map_script:: Executed once per document collected. This is the only required script. If no combine_script is specified, the resulting state needs to be stored in an object named `_agg`. + -In the above example, the `map_script` checks the value of the type field. If the value if 'sale' the value of the amount field +In the above example, the `map_script` checks the value of the type field. If the value is 'sale' the value of the amount field is added to the transactions array. If the value of the type field is not 'sale' the negated value of the amount field is added to transactions. diff --git a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc index 2bad60ba0ec..1dd1524a58e 100644 --- a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc @@ -22,7 +22,7 @@ The top_hits aggregation returns regular search hits, because of this many per h * <> * <> * <> -* <> +* <> * <> ==== Example diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index d63ddb52f4b..1d6544cdd92 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -146,7 +146,8 @@ You can also use the same source filtering parameters to control which parts of curl -XGET 'http://localhost:9200/twitter/tweet/1/_source?_source_include=*.id&_source_exclude=entities' -------------------------------------------------- -Note, there is also a HEAD variant for the _source endpoint to efficiently test for document existence. +Note, there is also a HEAD variant for the _source endpoint to efficiently test for document _source existence. +An existing document will not have a _source if it is disabled in the <>. Curl example: [source,js] diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc index 8175c21ee8a..bb5a367a04c 100644 --- a/docs/reference/how-to/indexing-speed.asciidoc +++ b/docs/reference/how-to/indexing-speed.asciidoc @@ -28,7 +28,7 @@ help reduce the cost of each fsync. Make sure to watch for `TOO_MANY_REQUESTS (429)` response codes (`EsRejectedExecutionException` with the Java client), which is the way that elasticsearch tells you that it cannot keep up with the current indexing rate. -When it happens, you should pause ndexing a bit before trying again, ideally +When it happens, you should pause indexing a bit before trying again, ideally with randomized exponential backoff. Similarly to sizing bulk requests, only testing can tell what the optimal diff --git a/docs/reference/how-to/search-speed.asciidoc b/docs/reference/how-to/search-speed.asciidoc index b6e9c4e927d..67848c9edca 100644 --- a/docs/reference/how-to/search-speed.asciidoc +++ b/docs/reference/how-to/search-speed.asciidoc @@ -190,5 +190,5 @@ depending on the file extension using the <> setting. WARNING: Loading data into the filesystem cache eagerly on too many indices or -too many files will make searh _slower_ if the filesystem cache is not large +too many files will make search _slower_ if the filesystem cache is not large enough to hold all the data. Use with caution. diff --git a/docs/reference/mapping/params/store.asciidoc b/docs/reference/mapping/params/store.asciidoc index 53cac7493ff..d34d1016546 100644 --- a/docs/reference/mapping/params/store.asciidoc +++ b/docs/reference/mapping/params/store.asciidoc @@ -48,7 +48,7 @@ PUT my_index/my_type/1 GET my_index/_search { - "stored_fields": [ "title", "date" ] <2> + "fields": [ "title", "date" ] <2> } -------------------------------------------------- // CONSOLE diff --git a/docs/reference/migration/migrate_5_0/plugins.asciidoc b/docs/reference/migration/migrate_5_0/plugins.asciidoc index a1c0dad9ca1..9023c84b410 100644 --- a/docs/reference/migration/migrate_5_0/plugins.asciidoc +++ b/docs/reference/migration/migrate_5_0/plugins.asciidoc @@ -122,3 +122,8 @@ been removed. Plugins that register custom scripts should implement `ScriptPlugin` and remove their `onModule(ScriptModule)` implementation. + +==== AnalysisPlugin + +Plugins that register custom analysis components should implement +`AnalysisPlugin` and remove their `onModule(AnalysisModule)` implementation. diff --git a/docs/reference/migration/migrate_5_0/search.asciidoc b/docs/reference/migration/migrate_5_0/search.asciidoc index 09478ee3251..72b29e8a9e7 100644 --- a/docs/reference/migration/migrate_5_0/search.asciidoc +++ b/docs/reference/migration/migrate_5_0/search.asciidoc @@ -64,15 +64,11 @@ characteristics as the former `scan` search type. ==== `fields` parameter -The `fields` parameter has been replaced by `stored_fields`. -The `stored_fields` parameter will only return stored fields +The `fields` parameter used to try to retrieve field values from stored +fields, and fall back to extracting from the `_source` if a field is not +marked as stored. Now, the `fields` parameter will only return stored fields -- it will no longer extract values from the `_source`. -==== `fielddata_fields` parameter - -The `fielddata_fields` has been deprecated, use parameter `docvalue_fields` instead. - - ==== search-exists API removed The search exists api has been removed in favour of using the search api with diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index 4863588818f..be578b00c3b 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -250,11 +250,11 @@ Due to the fact that indexed script has been replaced by stored scripts the following settings have been replaced to: * `script.indexed` has been replaced by `script.stored` -* `script.engine.*.indexed.aggs` has been replaced by `script.engine.*.stored.aggs` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) -* `script.engine.*.indexed.mapping` has been replaced by `script.engine.*.stored.mapping` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) -* `script.engine.*.indexed.search` has been replaced by `script.engine.*.stored.search` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) -* `script.engine.*.indexed.update` has been replaced by `script.engine.*.stored.update` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) -* `script.engine.*.indexed.plugin` has been replaced by `script.engine.*.stored.plugin` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) +* `script.engine.*.indexed.aggs` has been replaced by `script.engine.*.stored.aggs` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.) +* `script.engine.*.indexed.mapping` has been replaced by `script.engine.*.stored.mapping` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.) +* `script.engine.*.indexed.search` has been replaced by `script.engine.*.stored.search` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.) +* `script.engine.*.indexed.update` has been replaced by `script.engine.*.stored.update` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.) +* `script.engine.*.indexed.plugin` has been replaced by `script.engine.*.stored.plugin` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.) ==== Script mode settings diff --git a/docs/reference/modules/scripting/painless.asciidoc b/docs/reference/modules/scripting/painless.asciidoc index 3633d2c3e71..db68db2c7a3 100644 --- a/docs/reference/modules/scripting/painless.asciidoc +++ b/docs/reference/modules/scripting/painless.asciidoc @@ -143,7 +143,7 @@ First, let's look at the source data for a player by submitting the following re ---------------------------------------------------------------- GET hockey/_search { - "stored_fields": [ + "fields": [ "_id", "_source" ], diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index a9adc157bd3..958320ea110 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -143,11 +143,11 @@ include::request/sort.asciidoc[] include::request/source-filtering.asciidoc[] -include::request/stored-fields.asciidoc[] +include::request/fields.asciidoc[] include::request/script-fields.asciidoc[] -include::request/docvalue-fields.asciidoc[] +include::request/fielddata-fields.asciidoc[] include::request/post-filter.asciidoc[] diff --git a/docs/reference/search/request/docvalue-fields.asciidoc b/docs/reference/search/request/docvalue-fields.asciidoc deleted file mode 100644 index b4d2493d853..00000000000 --- a/docs/reference/search/request/docvalue-fields.asciidoc +++ /dev/null @@ -1,23 +0,0 @@ -[[search-request-docvalue-fields]] -=== Doc value Fields - -Allows to return the <> representation of a field for each hit, for -example: - -[source,js] --------------------------------------------------- -GET /_search -{ - "query" : { - "match_all": {} - }, - "docvalue_fields" : ["test1", "test2"] -} --------------------------------------------------- -// CONSOLE - -Doc value fields can work on fields that are not stored. - -Note that if the fields parameter specifies fields without docvalues it will try to load the value from the fielddata cache -causing the terms for that field to be loaded to memory (cached), which will result in more memory consumption. - diff --git a/docs/reference/search/request/fielddata-fields.asciidoc b/docs/reference/search/request/fielddata-fields.asciidoc new file mode 100644 index 00000000000..f3a3508b144 --- /dev/null +++ b/docs/reference/search/request/fielddata-fields.asciidoc @@ -0,0 +1,23 @@ +[[search-request-fielddata-fields]] +=== Field Data Fields + +Allows to return the <> representation of a field for each hit, for +example: + +[source,js] +-------------------------------------------------- +GET /_search +{ + "query" : { + "match_all": {} + }, + "fielddata_fields" : ["test1", "test2"] +} +-------------------------------------------------- +// CONSOLE + +Field data fields can work on fields that are not stored. + +It's important to understand that using the `fielddata_fields` parameter will +cause the terms for that field to be loaded to memory (cached), which will +result in more memory consumption. diff --git a/docs/reference/search/request/stored-fields.asciidoc b/docs/reference/search/request/fields.asciidoc similarity index 91% rename from docs/reference/search/request/stored-fields.asciidoc rename to docs/reference/search/request/fields.asciidoc index 3d5b8c01b47..3483d470ee2 100644 --- a/docs/reference/search/request/stored-fields.asciidoc +++ b/docs/reference/search/request/fields.asciidoc @@ -1,7 +1,7 @@ [[search-request-fields]] === Fields -WARNING: The `stored_fields` parameter is about fields that are explicitly marked as +WARNING: The `fields` parameter is about fields that are explicitly marked as stored in the mapping, which is off by default and generally not recommended. Use <> instead to select subsets of the original source document to be returned. @@ -13,7 +13,7 @@ by a search hit. -------------------------------------------------- GET /_search { - "stored_fields" : ["user", "postDate"], + "fields" : ["user", "postDate"], "query" : { "term" : { "user" : "kimchy" } } @@ -30,7 +30,7 @@ returned, for example: -------------------------------------------------- GET /_search { - "stored_fields" : [], + "fields" : [], "query" : { "term" : { "user" : "kimchy" } } diff --git a/docs/reference/search/request/highlighting.asciidoc b/docs/reference/search/request/highlighting.asciidoc index efb7053c179..c235c37b338 100644 --- a/docs/reference/search/request/highlighting.asciidoc +++ b/docs/reference/search/request/highlighting.asciidoc @@ -372,7 +372,7 @@ query and the rescore query in `highlight_query`. -------------------------------------------------- GET /_search { - "stored_fields": [ "_id" ], + "fields": [ "_id" ], "query" : { "match": { "content": { diff --git a/docs/reference/search/request/inner-hits.asciidoc b/docs/reference/search/request/inner-hits.asciidoc index 3c8e0e9f00e..345bc9abde2 100644 --- a/docs/reference/search/request/inner-hits.asciidoc +++ b/docs/reference/search/request/inner-hits.asciidoc @@ -72,7 +72,7 @@ Inner hits also supports the following per document features: * <> * <> * <> -* <> +* <> * <> [[nested-inner-hits]] diff --git a/docs/reference/search/uri-request.asciidoc b/docs/reference/search/uri-request.asciidoc index ba36992f6fb..496f04ea4af 100644 --- a/docs/reference/search/uri-request.asciidoc +++ b/docs/reference/search/uri-request.asciidoc @@ -83,7 +83,7 @@ hits was computed. part of the document by using `_source_include` & `_source_exclude` (see the <> documentation for more details) -|`stored_fields` |The selective stored fields of the document to return for each hit, +|`fields` |The selective stored fields of the document to return for each hit, comma delimited. Not specifying any value will cause no fields to return. |`sort` |Sorting to perform. Can either be in the form of `fieldName`, or diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java index 0d7dd4e12e2..e9c4bf6e359 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java @@ -72,28 +72,28 @@ public class GeoDistanceTests extends ESIntegTestCase { refresh(); - SearchResponse searchResponse1 = client().prepareSearch().addStoredField("_source") + SearchResponse searchResponse1 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].arcDistance(" + target_lat + "," + target_long + ")")).execute() .actionGet(); Double resultDistance1 = searchResponse1.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance1, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.01d)); - SearchResponse searchResponse2 = client().prepareSearch().addStoredField("_source") + SearchResponse searchResponse2 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].distance(" + target_lat + "," + target_long + ")")).execute() .actionGet(); Double resultDistance2 = searchResponse2.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance2, closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.01d)); - SearchResponse searchResponse3 = client().prepareSearch().addStoredField("_source") + SearchResponse searchResponse3 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + target_lat + "," + target_long + ")")) .execute().actionGet(); Double resultArcDistance3 = searchResponse3.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance3, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d)); - SearchResponse searchResponse4 = client().prepareSearch().addStoredField("_source") + SearchResponse searchResponse4 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].distanceInKm(" + target_lat + "," + target_long + ")")).execute() .actionGet(); Double resultDistance4 = searchResponse4.getHits().getHits()[0].getFields().get("distance").getValue(); @@ -102,7 +102,7 @@ public class GeoDistanceTests extends ESIntegTestCase { SearchResponse searchResponse5 = client() .prepareSearch() - .addStoredField("_source") + .addField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + (target_lat) + "," + (target_long + 360) + ")")) .execute().actionGet(); Double resultArcDistance5 = searchResponse5.getHits().getHits()[0].getFields().get("distance").getValue(); @@ -111,21 +111,21 @@ public class GeoDistanceTests extends ESIntegTestCase { SearchResponse searchResponse6 = client() .prepareSearch() - .addStoredField("_source") + .addField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + (target_lat + 360) + "," + (target_long) + ")")) .execute().actionGet(); Double resultArcDistance6 = searchResponse6.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance6, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d)); - SearchResponse searchResponse7 = client().prepareSearch().addStoredField("_source") + SearchResponse searchResponse7 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInMiles(" + target_lat + "," + target_long + ")")) .execute().actionGet(); Double resultDistance7 = searchResponse7.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance7, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.01d)); - SearchResponse searchResponse8 = client().prepareSearch().addStoredField("_source") + SearchResponse searchResponse8 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].distanceInMiles(" + target_lat + "," + target_long + ")")) .execute().actionGet(); Double resultDistance8 = searchResponse8.getHits().getHits()[0].getFields().get("distance").getValue(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java index 5ada804954a..45274f44401 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.LongHashSet; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.groovy.GroovyPlugin; @@ -825,6 +826,83 @@ public class HistogramTests extends ESIntegTestCase { } } + public void testEmptyWithExtendedBounds() throws Exception { + int lastDataBucketKey = (numValueBuckets - 1) * interval; + + // randomizing the number of buckets on the min bound + // (can sometimes fall within the data range, but more frequently will fall before the data range) + int addedBucketsLeft = randomIntBetween(0, numValueBuckets); + long boundsMinKey = addedBucketsLeft * interval; + if (frequently()) { + boundsMinKey = -boundsMinKey; + } else { + addedBucketsLeft = 0; + } + long boundsMin = boundsMinKey + randomIntBetween(0, interval - 1); + + // randomizing the number of buckets on the max bound + // (can sometimes fall within the data range, but more frequently will fall after the data range) + int addedBucketsRight = randomIntBetween(0, numValueBuckets); + long boundsMaxKeyDelta = addedBucketsRight * interval; + if (rarely()) { + addedBucketsRight = 0; + boundsMaxKeyDelta = -boundsMaxKeyDelta; + } + long boundsMaxKey = lastDataBucketKey + boundsMaxKeyDelta; + long boundsMax = boundsMaxKey + randomIntBetween(0, interval - 1); + + + // it could be that the random bounds.min we chose ended up greater than bounds.max - this should cause an + // error + boolean invalidBoundsError = boundsMin > boundsMax; + + // constructing the newly expected bucket list + int bucketsCount = numValueBuckets + addedBucketsLeft + addedBucketsRight; + long[] extendedValueCounts = new long[bucketsCount]; + System.arraycopy(valueCounts, 0, extendedValueCounts, addedBucketsLeft, valueCounts.length); + + SearchResponse response = null; + try { + response = client().prepareSearch("idx") + .setQuery(QueryBuilders.termQuery("foo", "bar")) + .addAggregation(histogram("histo") + .field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .minDocCount(0) + .extendedBounds(new ExtendedBounds(boundsMin, boundsMax))) + .execute().actionGet(); + + if (invalidBoundsError) { + fail("Expected an exception to be thrown when bounds.min is greater than bounds.max"); + return; + } + + } catch (Exception e) { + if (invalidBoundsError) { + // expected + return; + } else { + throw e; + } + } + assertSearchResponse(response); + + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat(buckets.size(), equalTo(bucketsCount)); + + long key = Math.min(boundsMinKey, 0); + for (int i = 0; i < bucketsCount; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat(((Number) bucket.getKey()).longValue(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(0L)); + key += interval; + } + } + /** * see issue #9634, negative interval in histogram should raise exception */ diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index 6a8be584085..28894c5c1cc 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -102,33 +102,33 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().indices().prepareRefresh().execute().actionGet(); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field1").execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field1").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); // field2 is not stored, check that it is not extracted from source. - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field2").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field2").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(0)); assertThat(searchResponse.getHits().getAt(0).fields().get("field2"), nullValue()); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field3").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field3").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*3").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").addStoredField("field1").addStoredField("field2").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*3").addField("field1").addField("field2").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(2)); @@ -136,20 +136,20 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field*").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field*").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("f*3").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).source(), nullValue()); @@ -157,7 +157,7 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*").addField("_source").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).source(), notNullValue()); @@ -437,15 +437,15 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().indices().prepareRefresh().execute().actionGet(); SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()) - .addStoredField("byte_field") - .addStoredField("short_field") - .addStoredField("integer_field") - .addStoredField("long_field") - .addStoredField("float_field") - .addStoredField("double_field") - .addStoredField("date_field") - .addStoredField("boolean_field") - .addStoredField("binary_field") + .addField("byte_field") + .addField("short_field") + .addField("integer_field") + .addField("long_field") + .addField("float_field") + .addField("double_field") + .addField("date_field") + .addField("boolean_field") + .addField("binary_field") .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); @@ -478,7 +478,7 @@ public class SearchFieldsTests extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("my-index") .setTypes("my-type1") - .addStoredField("field1").addStoredField("_routing") + .addField("field1").addField("_routing") .get(); assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); @@ -493,7 +493,7 @@ public class SearchFieldsTests extends ESIntegTestCase { .setRefreshPolicy(IMMEDIATE) .get(); - assertFailures(client().prepareSearch("my-index").setTypes("my-type1").addStoredField("field1"), + assertFailures(client().prepareSearch("my-index").setTypes("my-type1").addField("field1"), RestStatus.BAD_REQUEST, containsString("field [field1] isn't a leaf field")); } @@ -557,14 +557,14 @@ public class SearchFieldsTests extends ESIntegTestCase { String field = "field1.field2.field3.field4"; - SearchResponse searchResponse = client().prepareSearch("my-index").setTypes("my-type1").addStoredField(field).get(); + SearchResponse searchResponse = client().prepareSearch("my-index").setTypes("my-type1").addField(field).get(); assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field(field).isMetadataField(), equalTo(false)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1")); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2")); - searchResponse = client().prepareSearch("my-index").setTypes("my-type2").addStoredField(field).get(); + searchResponse = client().prepareSearch("my-index").setTypes("my-type2").addField(field).get(); assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field(field).isMetadataField(), equalTo(false)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); @@ -621,16 +621,16 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().indices().prepareRefresh().execute().actionGet(); SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery()) - .addDocValueField("text_field") - .addDocValueField("keyword_field") - .addDocValueField("byte_field") - .addDocValueField("short_field") - .addDocValueField("integer_field") - .addDocValueField("long_field") - .addDocValueField("float_field") - .addDocValueField("double_field") - .addDocValueField("date_field") - .addDocValueField("boolean_field"); + .addFieldDataField("text_field") + .addFieldDataField("keyword_field") + .addFieldDataField("byte_field") + .addFieldDataField("short_field") + .addFieldDataField("integer_field") + .addFieldDataField("long_field") + .addFieldDataField("float_field") + .addFieldDataField("double_field") + .addFieldDataField("date_field") + .addFieldDataField("boolean_field"); SearchResponse searchResponse = builder.execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); @@ -704,7 +704,7 @@ public class SearchFieldsTests extends ESIntegTestCase { .setParent("parent_1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject())); - SearchResponse response = client().prepareSearch("test").addStoredField("field1").get(); + SearchResponse response = client().prepareSearch("test").addField("field1").get(); assertSearchResponse(response); assertHitCount(response, 1); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 2642430266b..8d258865acb 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.inject.multibindings.Multibinder; @@ -40,7 +39,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -129,7 +127,7 @@ public class TemplateQueryParserTests extends ESTestCase { new IndexSettingsModule(index, settings) ).createInjector(); - AnalysisService analysisService = new AnalysisRegistry(null, environment).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = new IndicesModule(new NamedWriteableRegistry(), Collections.emptyList()).getMapperRegistry(); MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, () -> diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java index fbe55782e69..147eae6e4d1 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java @@ -37,6 +37,7 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanFirstQuery; import org.apache.lucene.search.spans.SpanNearQuery; @@ -90,6 +91,7 @@ public final class ExtractQueryTermsService { map.put(SpanNotQuery.class, spanNotQuery()); map.put(BooleanQuery.class, booleanQuery()); map.put(DisjunctionMaxQuery.class, disjunctionMaxQuery()); + map.put(SynonymQuery.class, synonymQuery()); queryProcessors = Collections.unmodifiableMap(map); } @@ -221,6 +223,13 @@ public final class ExtractQueryTermsService { }; } + static Function synonymQuery() { + return query -> { + Set terms = new HashSet<>(((SynonymQuery) query).getTerms()); + return new Result(true, terms); + }; + } + static Function commonTermsQuery() { return query -> { List terms = ((CommonTermsQuery) query).getTerms(); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 1c47610ec15..8ce12c1f7e6 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -359,6 +359,11 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder { + client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", "type", "test", "type", "1", null, null, null)) + .get(); + }).getRootCause(); + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("source disabled")); + } + public void testPercolatorSpecificQueries() throws Exception { createIndex("test", client().admin().indices().prepareCreate("test") .addMapping("type", "field1", "type=text", "field2", "type=text") diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java index 7725ee7f519..80a6ff891da 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java @@ -123,8 +123,8 @@ public abstract class AbstractBulkByScrollRequest> getCharFilters() { + return singletonMap("icu_normalizer", IcuNormalizerCharFilterFactory::new); + } + + @Override + public Map> getTokenFilters() { + Map> extra = new HashMap<>(); + extra.put("icu_normalizer", IcuNormalizerTokenFilterFactory::new); + extra.put("icu_folding", IcuFoldingTokenFilterFactory::new); + extra.put("icu_collation", IcuCollationTokenFilterFactory::new); + extra.put("icu_transform", IcuTransformTokenFilterFactory::new); + return extra; + } + + @Override + public Map> getTokenizers() { + return singletonMap("icu_tokenizer", IcuTokenizerFactory::new); } } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuTokenizerFactoryTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuTokenizerFactoryTests.java index 1630d514ae3..180c4268612 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuTokenizerFactoryTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuTokenizerFactoryTests.java @@ -102,6 +102,6 @@ public class IcuTokenizerFactoryTests extends ESTestCase { .build(); Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); - return createAnalysisService(new Index("test", "_na_"), nodeSettings, settings, new AnalysisICUPlugin()::onModule); + return createAnalysisService(new Index("test", "_na_"), nodeSettings, settings, new AnalysisICUPlugin()); } } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java index b399dfd34f4..9255a250f16 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java @@ -31,8 +31,7 @@ import static org.hamcrest.Matchers.instanceOf; */ public class SimpleIcuAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), - Settings.EMPTY, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, new AnalysisICUPlugin()); TokenizerFactory tokenizerFactory = analysisService.tokenizer("icu_tokenizer"); assertThat(tokenizerFactory, instanceOf(IcuTokenizerFactory.class)); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java index 71752196af0..62e9c9db145 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java @@ -50,7 +50,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.language", "tr") .put("index.analysis.filter.myCollator.strength", "primary") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "I WİLL USE TURKİSH CASING", "ı will use turkish casıng"); @@ -66,7 +66,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.strength", "primary") .put("index.analysis.filter.myCollator.decomposition", "canonical") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "I W\u0049\u0307LL USE TURKİSH CASING", "ı will use turkish casıng"); @@ -82,7 +82,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.strength", "secondary") .put("index.analysis.filter.myCollator.decomposition", "no") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "TESTING", "testing"); @@ -99,7 +99,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.strength", "primary") .put("index.analysis.filter.myCollator.alternate", "shifted") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "foo-bar", "foo bar"); @@ -117,7 +117,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.alternate", "shifted") .put("index.analysis.filter.myCollator.variableTop", " ") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "foo bar", "foobar"); @@ -135,7 +135,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.numeric", "true") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollation(filterFactory, "foobar-9", "foobar-10", -1); @@ -152,7 +152,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.strength", "primary") .put("index.analysis.filter.myCollator.caseLevel", "true") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "résumé", "resume"); @@ -172,7 +172,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.strength", "tertiary") .put("index.analysis.filter.myCollator.caseFirst", "upper") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollation(filterFactory, "Resume", "resume", -1); @@ -200,7 +200,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.rules", tailoredRules) .put("index.analysis.filter.myCollator.strength", "primary") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "Töne", "Toene"); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java index 96defd0e076..b82accf0cf8 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java @@ -37,7 +37,7 @@ public class SimpleIcuNormalizerCharFilterTests extends ESTestCase { Settings settings = Settings.builder() .put("index.analysis.char_filter.myNormalizerChar.type", "icu_normalizer") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); CharFilterFactory charFilterFactory = analysisService.charFilter("myNormalizerChar"); String input = "ʰ㌰゙5℃№㈱㌘,バッファーの正規化のテスト.㋐㋑㋒㋓㋔カキクケコザジズゼゾg̈각/각நிเกषिchkʷक्षि"; @@ -61,7 +61,7 @@ public class SimpleIcuNormalizerCharFilterTests extends ESTestCase { .put("index.analysis.char_filter.myNormalizerChar.name", "nfkc") .put("index.analysis.char_filter.myNormalizerChar.mode", "decompose") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); CharFilterFactory charFilterFactory = analysisService.charFilter("myNormalizerChar"); String input = "ʰ㌰゙5℃№㈱㌘,バッファーの正規化のテスト.㋐㋑㋒㋓㋔カキクケコザジズゼゾg̈각/각நிเกषिchkʷक्षि"; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java index e92e782a96c..c4b4db53c4a 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java @@ -19,6 +19,9 @@ package org.elasticsearch.plugin.analysis.kuromoji; +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.JapaneseStopTokenFilterFactory; import org.elasticsearch.index.analysis.KuromojiAnalyzerProvider; import org.elasticsearch.index.analysis.KuromojiBaseFormFilterFactory; @@ -28,23 +31,42 @@ import org.elasticsearch.index.analysis.KuromojiNumberFilterFactory; import org.elasticsearch.index.analysis.KuromojiPartOfSpeechFilterFactory; import org.elasticsearch.index.analysis.KuromojiReadingFormFilterFactory; import org.elasticsearch.index.analysis.KuromojiTokenizerFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -/** - * - */ -public class AnalysisKuromojiPlugin extends Plugin { +import java.util.HashMap; +import java.util.Map; - public void onModule(AnalysisModule module) { - module.registerCharFilter("kuromoji_iteration_mark", KuromojiIterationMarkCharFilterFactory::new); - module.registerAnalyzer("kuromoji", KuromojiAnalyzerProvider::new); - module.registerTokenizer("kuromoji_tokenizer", KuromojiTokenizerFactory::new); - module.registerTokenFilter("kuromoji_baseform", KuromojiBaseFormFilterFactory::new); - module.registerTokenFilter("kuromoji_part_of_speech", KuromojiPartOfSpeechFilterFactory::new); - module.registerTokenFilter("kuromoji_readingform", KuromojiReadingFormFilterFactory::new); - module.registerTokenFilter("kuromoji_stemmer", KuromojiKatakanaStemmerFactory::new); - module.registerTokenFilter("ja_stop", JapaneseStopTokenFilterFactory::new); - module.registerTokenFilter("kuromoji_number", KuromojiNumberFilterFactory::new); +import static java.util.Collections.singletonMap; + +public class AnalysisKuromojiPlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getCharFilters() { + return singletonMap("kuromoji_iteration_mark", KuromojiIterationMarkCharFilterFactory::new); + } + + @Override + public Map> getTokenFilters() { + Map> extra = new HashMap<>(); + extra.put("kuromoji_baseform", KuromojiBaseFormFilterFactory::new); + extra.put("kuromoji_part_of_speech", KuromojiPartOfSpeechFilterFactory::new); + extra.put("kuromoji_readingform", KuromojiReadingFormFilterFactory::new); + extra.put("kuromoji_stemmer", KuromojiKatakanaStemmerFactory::new); + extra.put("ja_stop", JapaneseStopTokenFilterFactory::new); + extra.put("kuromoji_number", KuromojiNumberFilterFactory::new); + return extra; + } + + @Override + public Map> getTokenizers() { + return singletonMap("kuromoji_tokenizer", KuromojiTokenizerFactory::new); + } + + @Override + public Map>> getAnalyzers() { + return singletonMap("kuromoji", KuromojiAnalyzerProvider::new); } } diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java index 540e11250d0..53196ac7462 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java @@ -198,7 +198,7 @@ public class KuromojiAnalysisTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); - return createAnalysisService(new Index("test", "_na_"), nodeSettings, settings, new AnalysisKuromojiPlugin()::onModule); + return createAnalysisService(new Index("test", "_na_"), nodeSettings, settings, new AnalysisKuromojiPlugin()); } public static void assertSimpleTSOutput(TokenStream stream, diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/AnalysisPhoneticPlugin.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/AnalysisPhoneticPlugin.java index 3be97b444db..c7355e976ce 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/AnalysisPhoneticPlugin.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/AnalysisPhoneticPlugin.java @@ -20,13 +20,19 @@ package org.elasticsearch.plugin.analysis; import org.elasticsearch.index.analysis.PhoneticTokenFilterFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -public class AnalysisPhoneticPlugin extends Plugin { +import java.util.Map; - public void onModule(AnalysisModule module) { - module.registerTokenFilter("phonetic", PhoneticTokenFilterFactory::new); +import static java.util.Collections.singletonMap; + +public class AnalysisPhoneticPlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getTokenFilters() { + return singletonMap("phonetic", PhoneticTokenFilterFactory::new); } } diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java index 18e49fa6e51..3dcfadce781 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java @@ -39,8 +39,7 @@ public class SimplePhoneticAnalysisTests extends ESTestCase { Settings settings = Settings.builder().loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, - new AnalysisPhoneticPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisPhoneticPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("phonetic"); MatcherAssert.assertThat(filterFactory, instanceOf(PhoneticTokenFilterFactory.class)); } diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/AnalysisSmartChinesePlugin.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/AnalysisSmartChinesePlugin.java index 05c779bb5a9..b11a157c149 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/AnalysisSmartChinesePlugin.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/AnalysisSmartChinesePlugin.java @@ -19,20 +19,40 @@ package org.elasticsearch.plugin.analysis.smartcn; +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.SmartChineseAnalyzerProvider; import org.elasticsearch.index.analysis.SmartChineseNoOpTokenFilterFactory; import org.elasticsearch.index.analysis.SmartChineseTokenizerTokenizerFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -public class AnalysisSmartChinesePlugin extends Plugin { +import java.util.HashMap; +import java.util.Map; - public void onModule(AnalysisModule module) { - module.registerAnalyzer("smartcn", SmartChineseAnalyzerProvider::new); - module.registerTokenizer("smartcn_tokenizer", SmartChineseTokenizerTokenizerFactory::new); - // This is an alias to "smartcn_tokenizer"; it's here for backwards compat - module.registerTokenizer("smartcn_sentence", SmartChineseTokenizerTokenizerFactory::new); - // This is a noop token filter; it's here for backwards compat before we had "smartcn_tokenizer" - module.registerTokenFilter("smartcn_word", SmartChineseNoOpTokenFilterFactory::new); +import static java.util.Collections.singletonMap; + +public class AnalysisSmartChinesePlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getTokenFilters() { + // This is a noop token filter; it's here for backwards compat before we had "smartcn_tokenizer" + return singletonMap("smartcn_word", SmartChineseNoOpTokenFilterFactory::new); + } + + @Override + public Map> getTokenizers() { + Map> extra = new HashMap<>(); + extra.put("smartcn_tokenizer", SmartChineseTokenizerTokenizerFactory::new); + // This is an alias to "smartcn_tokenizer"; it's here for backwards compat + extra.put("smartcn_sentence", SmartChineseTokenizerTokenizerFactory::new); + return extra; + } + + @Override + public Map>> getAnalyzers() { + return singletonMap("smartcn", SmartChineseAnalyzerProvider::new); } } diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java index 0fcc42643d4..08aebdee2bb 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java @@ -29,11 +29,10 @@ import java.io.IOException; import static org.hamcrest.Matchers.instanceOf; -/** - */ public class SimpleSmartChineseAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { - final AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, new AnalysisSmartChinesePlugin()::onModule); + final AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, + new AnalysisSmartChinesePlugin()); TokenizerFactory tokenizerFactory = analysisService.tokenizer("smartcn_tokenizer"); MatcherAssert.assertThat(tokenizerFactory, instanceOf(SmartChineseTokenizerTokenizerFactory.class)); } diff --git a/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java b/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java index 8549795f4b6..98dd9634fb9 100644 --- a/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java +++ b/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java @@ -19,15 +19,27 @@ package org.elasticsearch.plugin.analysis.stempel; +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.pl.PolishAnalyzerProvider; import org.elasticsearch.index.analysis.pl.PolishStemTokenFilterFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -public class AnalysisStempelPlugin extends Plugin { +import java.util.Map; - public void onModule(AnalysisModule module) { - module.registerAnalyzer("polish", PolishAnalyzerProvider::new); - module.registerTokenFilter("polish_stem", PolishStemTokenFilterFactory::new); +import static java.util.Collections.singletonMap; + +public class AnalysisStempelPlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getTokenFilters() { + return singletonMap("polish_stem", PolishStemTokenFilterFactory::new); + } + + @Override + public Map>> getAnalyzers() { + return singletonMap("polish", PolishAnalyzerProvider::new); } } diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java index 9bfcc2c2f3f..4f7ee642ebd 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java @@ -36,7 +36,8 @@ import static org.hamcrest.Matchers.instanceOf; */ public class PolishAnalysisTests extends ESTestCase { public void testDefaultsPolishAnalysis() throws IOException { - final AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, new AnalysisStempelPlugin()::onModule); + final AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, + new AnalysisStempelPlugin()); TokenFilterFactory tokenizerFactory = analysisService.tokenFilter("polish_stem"); MatcherAssert.assertThat(tokenizerFactory, instanceOf(PolishStemTokenFilterFactory.class)); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java index 9458b6920c4..3fc12ccdfed 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java @@ -49,7 +49,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { Settings settings = Settings.builder() .put("index.analysis.filter.myStemmer.type", "polish_stem") .build(); - AnalysisService analysisService = createAnalysisService(index, settings, new AnalysisStempelPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(index, settings, new AnalysisStempelPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myStemmer"); @@ -65,8 +65,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { } private void testAnalyzer(String source, String... expected_terms) throws IOException { - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, - new AnalysisStempelPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, new AnalysisStempelPlugin()); Analyzer analyzer = analysisService.analyzer("polish").analyzer(); diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml index f180f51dfc5..458990cc90c 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml @@ -54,7 +54,7 @@ search: index: test body: - stored_fields: [file.content_type,file.name] + fields: [file.content_type,file.name] - match: { hits.total: 1 } - match: { hits.hits.0.fields: { file.content_type: ["text/my-dummy-content-type"], file.name: ["my-dummy-name-txt"] }} diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml index c2d42be3b9a..dc6f800c078 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml @@ -57,7 +57,7 @@ setup: query: match: file.content: "apache tika" - stored_fields: [] + fields: [] highlight: fields: file.content: {} diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml index 99f427c1ab9..69991b9d0c0 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml @@ -38,7 +38,7 @@ setup: search: index: test body: - stored_fields: [file.content, file.author, file.date, file.content_length, file.content_type] + fields: [file.content, file.author, file.date, file.content_length, file.content_type] - match: { hits.total: 1 } - match: { hits.hits.0.fields: { file.content: ["Test elasticsearch\n"], @@ -65,7 +65,7 @@ setup: search: index: test body: - stored_fields: [file.content, file.author, file.date, file.content_length, file.content_type] + fields: [file.content, file.author, file.date, file.content_length, file.content_type] - match: { hits.total: 1 } - match: { hits.hits.0.fields: { file.content: ["Test elasticsearch\n"], diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java index 956645aebd2..761fb5fd144 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java @@ -83,8 +83,8 @@ public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { ElasticsearchAssertions.assertHitCount(countResponse, 3L); final SearchResponse sizeResponse = client().prepareSearch(indexName) - .addStoredField("_source") - .addStoredField("_size") + .addField("_source") + .addField("_size") .get(); ElasticsearchAssertions.assertHitCount(sizeResponse, 3L); for (SearchHit hit : sizeResponse.getHits().getHits()) { diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index d3ef723cb20..125b7bdd6c5 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -40,3 +40,11 @@ thirdPartyAudit.excludes = [ 'org.slf4j.Logger', 'org.slf4j.LoggerFactory', ] + +integTest { + cluster { + setting 'cloud.azure.storage.my_account_test.account', 'cloudazureresource' + setting 'cloud.azure.storage.my_account_test.key', 'abcdefgh' + setting 'script.stored', 'true' + } +} \ No newline at end of file diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index b6f5f12b266..13db36aeb5a 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -25,6 +25,7 @@ import com.microsoft.azure.storage.StorageException; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -42,6 +43,9 @@ public interface AzureStorageService { final class Storage { public static final String PREFIX = "cloud.azure.storage."; + + public static final Setting STORAGE_ACCOUNTS = Setting.groupSetting(Storage.PREFIX, Setting.Property.NodeScope); + public static final Setting TIMEOUT_SETTING = Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(-1), Property.NodeScope); public static final Setting ACCOUNT_SETTING = diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java index 281ef79cb27..f64ffed4a03 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java @@ -112,9 +112,8 @@ public final class AzureStorageSettings { } private static List createStorageSettings(Settings settings) { - Setting storageGroupSetting = Setting.groupSetting(Storage.PREFIX, Setting.Property.NodeScope); // ignore global timeout which has the same prefix but does not belong to any group - Settings groups = storageGroupSetting.get(settings.filter((k) -> k.equals(Storage.TIMEOUT_SETTING.getKey()) == false)); + Settings groups = Storage.STORAGE_ACCOUNTS.get(settings.filter((k) -> k.equals(Storage.TIMEOUT_SETTING.getKey()) == false)); List storageSettings = new ArrayList<>(); for (String groupName : groups.getAsGroups().keySet()) { storageSettings.add( diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java index ce122353f51..b04b613df21 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; @@ -62,13 +61,13 @@ public class AzureRepositoryPlugin extends Plugin { @Override public List> getSettings() { - return Arrays.asList(AzureStorageService.Storage.ACCOUNT_SETTING, - AzureStorageService.Storage.COMPRESS_SETTING, - AzureStorageService.Storage.CONTAINER_SETTING, - AzureStorageService.Storage.BASE_PATH_SETTING, - AzureStorageService.Storage.CHUNK_SIZE_SETTING, - AzureStorageService.Storage.LOCATION_MODE_SETTING); - + return Arrays.asList(AzureStorageService.Storage.STORAGE_ACCOUNTS, + AzureStorageService.Storage.ACCOUNT_SETTING, + AzureStorageService.Storage.COMPRESS_SETTING, + AzureStorageService.Storage.CONTAINER_SETTING, + AzureStorageService.Storage.BASE_PATH_SETTING, + AzureStorageService.Storage.CHUNK_SIZE_SETTING, + AzureStorageService.Storage.LOCATION_MODE_SETTING); } @Override diff --git a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/20_repository.yaml b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/20_repository.yaml new file mode 100644 index 00000000000..e3979cd32c3 --- /dev/null +++ b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/20_repository.yaml @@ -0,0 +1,26 @@ +"Repository can be registered": + - do: + snapshot.create_repository: + repository: test_repo_azure + verify: false + body: + type: azure + settings: + account : "my_test_account" + container : "backup-container" + base_path : "backups" + chunk_size: "32m" + compress : true + + - is_true: acknowledged + + - do: + snapshot.get_repository: + repository: test_repo_azure + + - is_true : test_repo_azure + - match : { test_repo_azure.settings.account : "my_test_account" } + - match : { test_repo_azure.settings.container : "backup-container" } + - match : { test_repo_azure.settings.base_path : "backups" } + - match : { test_repo_azure.settings.chunk_size: "32m" } + - match : { test_repo_azure.settings.compress : "true" } diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 58e7e50e421..905710a82be 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -40,8 +40,9 @@ List availableBoxes = [ 'ubuntu-1504' ] +String vagrantBoxes = getProperties().get('vagrant.boxes', 'sample') List boxes = [] -for (String box : getProperties().get('vagrant.boxes', 'sample').split(',')) { +for (String box : vagrantBoxes.split(',')) { if (box == 'sample') { boxes.add('centos-7') boxes.add('ubuntu-1404') @@ -56,9 +57,28 @@ for (String box : getProperties().get('vagrant.boxes', 'sample').split(',')) { } } -/* The version of elasticsearch that we upgrade *from* as part of testing - * upgrades. */ -String upgradeFromVersion = '2.3.3' +long seed +String formattedSeed +String[] upgradeFromVersions +String upgradeFromVersion + +String maybeTestsSeed = System.getProperty("tests.seed", null); +if (maybeTestsSeed != null) { + seed = new BigInteger(maybeTestsSeed, 16).longValue() + formattedSeed = maybeTestsSeed +} else { + seed = new Random().nextLong() + formattedSeed = String.format("%016X", seed) +} + +String maybeUpdradeFromVersions = System.getProperty("tests.packaging.upgrade.from.versions", null) +if (maybeUpdradeFromVersions != null) { + upgradeFromVersions = maybeUpdradeFromVersions.split(",") +} else { + upgradeFromVersions = new File(project.projectDir, 'versions') +} + +upgradeFromVersion = upgradeFromVersions[new Random(seed).nextInt(upgradeFromVersions.length)] configurations { test @@ -95,8 +115,37 @@ task stop { description 'Stop any tasks from tests that still may be running' } +Set getVersions() { + Node xml + new URL('http://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s -> + xml = new XmlParser().parse(s) + } + return new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /2\.\d\.\d/ }) +} + +task updatePackagingTestUpgradeFromVersions { + doLast { + Set versions = getVersions() + new File(project.projectDir, 'versions').text = versions.join('\n') + '\n' + } +} + +task verifyPackagingTestUpgradeFromVersions { + doLast { + String maybeUpdateFromVersions = System.getProperty("tests.packaging.upgrade.from.versions", null) + if (maybeUpdateFromVersions == null) { + Set versions = getVersions() + Set actualVersions = new HashSet<>(Arrays.asList(upgradeFromVersions)) + if (!versions.equals(actualVersions)) { + throw new GradleException("out-of-date versions [" + actualVersions + "], expected [" + versions + "]; run gradle updatePackagingTestUpgradeFromVersions") + } + } + } +} + File testRoot = new File("$buildDir/testroot") task createTestRoot { + dependsOn verifyPackagingTestUpgradeFromVersions outputs.dir testRoot doLast { testRoot.mkdirs() @@ -147,6 +196,14 @@ task packagingTest { " 'sample' can be used to test a single yum and apt box. 'all' can be used to\n" + " test all available boxes. The available boxes are: \n" + " ${availableBoxes}" + gradle.addBuildListener new BuildAdapter() { + @Override + void buildFinished(BuildResult result) { + if (result.failure) { + println "Reproduce with: gradle packagingTest -Pvagrant.boxes=${vagrantBoxes} -Dtests.seed=${formattedSeed} -Dtests.packaging.upgrade.from.versions=${upgradeFromVersions.join(",")}" + } + } + } } // Each box gets it own set of tasks @@ -199,4 +256,3 @@ for (String box : availableBoxes) { } packagingTest.dependsOn(packaging) } - diff --git a/qa/vagrant/versions b/qa/vagrant/versions new file mode 100644 index 00000000000..5f6e0edf99c --- /dev/null +++ b/qa/vagrant/versions @@ -0,0 +1,13 @@ +2.0.0 +2.0.1 +2.0.2 +2.1.0 +2.1.1 +2.1.2 +2.2.0 +2.2.1 +2.2.2 +2.3.0 +2.3.1 +2.3.2 +2.3.3 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index 21fda8dc805..d2b9b8cf9b4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -38,17 +38,13 @@ "type" : "boolean", "description" : "Specify whether to return detailed information about score computation as part of a hit" }, - "stored_fields": { + "fields": { "type" : "list", - "description" : "A comma-separated list of stored fields to return as part of a hit" - }, - "docvalue_fields": { - "type" : "list", - "description" : "A comma-separated list of fields to return as the docvalue representation of a field for each hit" + "description" : "A comma-separated list of fields to return as part of a hit" }, "fielddata_fields": { "type" : "list", - "description" : "A comma-separated list of fields to return as the docvalue representation of a field for each hit" + "description" : "A comma-separated list of fields to return as the field data representation of a field for each hit" }, "from": { "type" : "number", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yaml new file mode 100644 index 00000000000..370f68d9504 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yaml @@ -0,0 +1,40 @@ +--- +setup: + - do: + indices.create: + index: test_1 + body: + mappings: + test: + _source: { enabled: false } + - do: + cluster.health: + wait_for_status: yellow + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + +--- +"Missing document source with catch": + + - do: + catch: missing + get_source: + index: test_1 + type: test + id: 1 + +--- +"Missing document source with ignore": + + - do: + get_source: + index: test_1 + type: test + id: 1 + ignore: 404 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml index caf44d0c788..e0ac2aea2df 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml @@ -77,25 +77,20 @@ - do: search: body: - stored_fields: [ include.field2 ] + fields: [ include.field2 ] query: { match_all: {} } - is_false: hits.hits.0._source - do: search: body: - stored_fields: [ include.field2, _source ] + fields: [ include.field2, _source ] query: { match_all: {} } - match: { hits.hits.0._source.include.field2: v2 } - is_true: hits.hits.0._source - - do: - search: - docvalue_fields: [ "count" ] - - match: { hits.hits.0.fields.count: [1] } - do: search: fielddata_fields: [ "count" ] - match: { hits.hits.0.fields.count: [1] } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/issue4895.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/issue4895.yaml index 993cbed2647..df7322f12c8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/issue4895.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/issue4895.yaml @@ -31,6 +31,6 @@ setup: term: data: some preference: _local - stored_fields: [user,amount] + fields: [user,amount] diff --git a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java index 55659164d76..8889dc5aac5 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java @@ -36,6 +36,8 @@ import java.io.IOException; import java.nio.file.Path; import java.util.Collections; +import static org.elasticsearch.test.ESTestCase.createAnalysisService; + public class MapperTestUtils { @@ -54,7 +56,7 @@ public class MapperTestUtils { Settings finalSettings = settingsBuilder.build(); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", finalSettings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(finalSettings)).build(indexSettings); + AnalysisService analysisService = createAnalysisService(indexSettings, finalSettings); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); return new MapperService(indexSettings, analysisService, diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index aaabb17aed8..6a5ea4f7afe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -71,7 +71,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -86,6 +85,7 @@ import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; @@ -118,6 +118,7 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.ExecutionException; +import static java.util.Collections.emptyList; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; @@ -917,7 +918,8 @@ public abstract class AbstractQueryTestCase> injector = modulesBuilder.createInjector(); IndexScopedSettings indexScopedSettings = injector.getInstance(IndexScopedSettings.class); idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings, indexScopedSettings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisModule analysisModule = new AnalysisModule(new Environment(settings), emptyList()); + AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings); scriptService = scriptModule.getScriptService(); similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = injector.getInstance(MapperRegistry.class); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java index 7cbb7e819c4..5704a178f48 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -38,6 +38,7 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -157,7 +158,8 @@ public abstract class ESAllocationTestCase extends ESTestCase { } protected static AllocationDeciders yesAllocationDeciders() { - return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.YES)}); + return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.YES), + new SameShardAllocationDecider(Settings.EMPTY)}); } protected static AllocationDeciders noAllocationDeciders() { @@ -165,7 +167,8 @@ public abstract class ESAllocationTestCase extends ESTestCase { } protected static AllocationDeciders throttleAllocationDeciders() { - return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.THROTTLE)}); + return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.THROTTLE), + new SameShardAllocationDecider(Settings.EMPTY)}); } public static class TestAllocateDecision extends AllocationDecider { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 287b7318250..860f133a453 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -28,6 +28,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; + import org.apache.lucene.uninverting.UninvertingReader; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; @@ -45,7 +46,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -55,6 +55,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; @@ -62,6 +63,7 @@ import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; @@ -773,29 +775,34 @@ public abstract class ESTestCase extends LuceneTestCase { } /** - * Creates an AnalysisService to test analysis factories and analyzers. + * Creates an AnalysisService with all the default analyzers configured. */ - @SafeVarargs - public static AnalysisService createAnalysisService(Index index, Settings settings, Consumer... moduleConsumers) throws IOException { + public static AnalysisService createAnalysisService(Index index, Settings settings, AnalysisPlugin... analysisPlugins) + throws IOException { Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); - return createAnalysisService(index, nodeSettings, settings, moduleConsumers); + return createAnalysisService(index, nodeSettings, settings, analysisPlugins); } /** - * Creates an AnalysisService to test analysis factories and analyzers. + * Creates an AnalysisService with all the default analyzers configured. */ - @SafeVarargs - public static AnalysisService createAnalysisService(Index index, Settings nodeSettings, Settings settings, Consumer... moduleConsumers) throws IOException { + public static AnalysisService createAnalysisService(Index index, Settings nodeSettings, Settings settings, + AnalysisPlugin... analysisPlugins) throws IOException { Settings indexSettings = Settings.builder().put(settings) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .build(); + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build(); + return createAnalysisService(IndexSettingsModule.newIndexSettings(index, indexSettings), nodeSettings, analysisPlugins); + } + + /** + * Creates an AnalysisService with all the default analyzers configured. + */ + public static AnalysisService createAnalysisService(IndexSettings indexSettings, Settings nodeSettings, + AnalysisPlugin... analysisPlugins) throws IOException { Environment env = new Environment(nodeSettings); - AnalysisModule analysisModule = new AnalysisModule(env); - for (Consumer consumer : moduleConsumers) { - consumer.accept(analysisModule); - } - SettingsModule settingsModule = new SettingsModule(nodeSettings, InternalSettingsPlugin.VERSION_CREATED); - final AnalysisService analysisService = analysisModule.buildRegistry().build(IndexSettingsModule.newIndexSettings(index, indexSettings)); + AnalysisModule analysisModule = new AnalysisModule(env, Arrays.asList(analysisPlugins)); + final AnalysisService analysisService = analysisModule.getAnalysisRegistry() + .build(indexSettings); return analysisService; }